Full Code of danwood/SwiftUICoreImage for AI

main 9b1b38ad99ff cached
19 files
388.3 KB
104.1k tokens
1 requests
Download .txt
Showing preview only (402K chars total). Download the full file or copy to clipboard to get everything.
Repository: danwood/SwiftUICoreImage
Branch: main
Commit: 9b1b38ad99ff
Files: 19
Total size: 388.3 KB

Directory structure:
gitextract_syle2tou/

├── .gitignore
├── .swiftpm/
│   └── xcode/
│       └── package.xcworkspace/
│           └── xcshareddata/
│               └── IDEWorkspaceChecks.plist
├── GenerateSwiftUICoreImage/
│   ├── GenerateSwiftUICoreImage/
│   │   ├── ContentView.swift
│   │   ├── GenerateSwiftUICoreImage.entitlements
│   │   └── GenerateSwiftUICoreImageApp.swift
│   └── GenerateSwiftUICoreImage.xcodeproj/
│       ├── project.pbxproj
│       └── project.xcworkspace/
│           ├── contents.xcworkspacedata
│           └── xcshareddata/
│               └── IDEWorkspaceChecks.plist
├── Generator/
│   ├── CIImage-Generation.swift
│   ├── FunctionMinima.json
│   ├── MissingParameterDocumentation.json
│   ├── abstracts.json
│   └── docLookup.json
├── LICENSE.txt
├── Package.swift
├── README.md
└── Sources/
    ├── CIImage-Extensions.swift
    ├── CIImage-Filters.swift
    └── Image-Extensions.swift

================================================
FILE CONTENTS
================================================

================================================
FILE: .gitignore
================================================
.DS_Store
/.build
/Packages
/*.xcodeproj
xcuserdata/
DerivedData/
.swiftpm/config/registries.json
.swiftpm/xcode/package.xcworkspace/contents.xcworkspacedata
.netrc


================================================
FILE: .swiftpm/xcode/package.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist
================================================
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
	<key>IDEDidComputeMac32BitWarning</key>
	<true/>
</dict>
</plist>


================================================
FILE: GenerateSwiftUICoreImage/GenerateSwiftUICoreImage/ContentView.swift
================================================
//
//  ContentView.swift
//  GenerateSwiftUICoreImage
//
//  Created by Dan Wood on 6/25/24.
//

import SwiftUI

struct ContentView: View {
    var body: some View {
        VStack {
            Image(systemName: "globe")
                .imageScale(.large)
                .foregroundStyle(.tint)
            Text("See console output")
        }
        .padding()
    }
}

#Preview {
    ContentView()
}


================================================
FILE: GenerateSwiftUICoreImage/GenerateSwiftUICoreImage/GenerateSwiftUICoreImage.entitlements
================================================
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
    <key>com.apple.security.app-sandbox</key>
    <true/>
    <key>com.apple.security.files.user-selected.read-only</key>
    <true/>
</dict>
</plist>


================================================
FILE: GenerateSwiftUICoreImage/GenerateSwiftUICoreImage/GenerateSwiftUICoreImageApp.swift
================================================
//
//  GenerateSwiftUICoreImageApp.swift
//  GenerateSwiftUICoreImage
//
//  Created by Dan Wood on 6/25/24.
//

import SwiftUI

@main
struct GenerateSwiftUICoreImageApp: App {
    var body: some Scene {
        let _ = dumpFilters()
		let _ = dumpUnknownProperties()
        WindowGroup {
            ContentView()
        }
    }
}


================================================
FILE: GenerateSwiftUICoreImage/GenerateSwiftUICoreImage.xcodeproj/project.pbxproj
================================================
// !$*UTF8*$!
{
	archiveVersion = 1;
	classes = {
	};
	objectVersion = 56;
	objects = {

/* Begin PBXBuildFile section */
		CE67B0D12C2C82A4003E692B /* docLookup.json in Resources */ = {isa = PBXBuildFile; fileRef = CE67B0D02C2C82A4003E692B /* docLookup.json */; };
		CE9A3AC02C2B75090085C241 /* GenerateSwiftUICoreImageApp.swift in Sources */ = {isa = PBXBuildFile; fileRef = CE9A3ABF2C2B75090085C241 /* GenerateSwiftUICoreImageApp.swift */; };
		CE9A3AC22C2B75090085C241 /* ContentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = CE9A3AC12C2B75090085C241 /* ContentView.swift */; };
		CE9A3AE82C2B75260085C241 /* abstracts.json in Resources */ = {isa = PBXBuildFile; fileRef = CE9A3AD12C2B75260085C241 /* abstracts.json */; };
		CE9A3AE92C2B75260085C241 /* CIImage-Generation.swift in Sources */ = {isa = PBXBuildFile; fileRef = CE9A3AD22C2B75260085C241 /* CIImage-Generation.swift */; };
		CE9A3AEB2C2B75260085C241 /* FunctionMinima.json in Resources */ = {isa = PBXBuildFile; fileRef = CE9A3AD42C2B75260085C241 /* FunctionMinima.json */; };
		CE9A3AEC2C2B75260085C241 /* MissingParameterDocumentation.json in Resources */ = {isa = PBXBuildFile; fileRef = CE9A3AD52C2B75260085C241 /* MissingParameterDocumentation.json */; };
		CE9A3AED2C2B75260085C241 /* CIImage-Extensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = CE9A3AD72C2B75260085C241 /* CIImage-Extensions.swift */; };
		CE9A3AEE2C2B75260085C241 /* CIImage-Filters.swift in Sources */ = {isa = PBXBuildFile; fileRef = CE9A3AD82C2B75260085C241 /* CIImage-Filters.swift */; };
		CE9A3AEF2C2B75260085C241 /* Image-Extensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = CE9A3AD92C2B75260085C241 /* Image-Extensions.swift */; };
/* End PBXBuildFile section */

/* Begin PBXFileReference section */
		CE67B0D02C2C82A4003E692B /* docLookup.json */ = {isa = PBXFileReference; lastKnownFileType = text.json; path = docLookup.json; sourceTree = "<group>"; };
		CE9A3ABC2C2B75090085C241 /* GenerateSwiftUICoreImage.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = GenerateSwiftUICoreImage.app; sourceTree = BUILT_PRODUCTS_DIR; };
		CE9A3ABF2C2B75090085C241 /* GenerateSwiftUICoreImageApp.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = GenerateSwiftUICoreImageApp.swift; sourceTree = "<group>"; };
		CE9A3AC12C2B75090085C241 /* ContentView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContentView.swift; sourceTree = "<group>"; };
		CE9A3AD12C2B75260085C241 /* abstracts.json */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.json; path = abstracts.json; sourceTree = "<group>"; };
		CE9A3AD22C2B75260085C241 /* CIImage-Generation.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "CIImage-Generation.swift"; sourceTree = "<group>"; };
		CE9A3AD42C2B75260085C241 /* FunctionMinima.json */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.json; path = FunctionMinima.json; sourceTree = "<group>"; };
		CE9A3AD52C2B75260085C241 /* MissingParameterDocumentation.json */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.json; path = MissingParameterDocumentation.json; sourceTree = "<group>"; };
		CE9A3AD72C2B75260085C241 /* CIImage-Extensions.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "CIImage-Extensions.swift"; sourceTree = "<group>"; };
		CE9A3AD82C2B75260085C241 /* CIImage-Filters.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "CIImage-Filters.swift"; sourceTree = "<group>"; };
		CE9A3AD92C2B75260085C241 /* Image-Extensions.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "Image-Extensions.swift"; sourceTree = "<group>"; };
		CE9A3AF42C2B7B6A0085C241 /* GenerateSwiftUICoreImage.entitlements */ = {isa = PBXFileReference; lastKnownFileType = text.plist.entitlements; path = GenerateSwiftUICoreImage.entitlements; sourceTree = "<group>"; };
/* End PBXFileReference section */

/* Begin PBXFrameworksBuildPhase section */
		CE9A3AB92C2B75090085C241 /* Frameworks */ = {
			isa = PBXFrameworksBuildPhase;
			buildActionMask = 2147483647;
			files = (
			);
			runOnlyForDeploymentPostprocessing = 0;
		};
/* End PBXFrameworksBuildPhase section */

/* Begin PBXGroup section */
		CE9A3AB32C2B75090085C241 = {
			isa = PBXGroup;
			children = (
				CE9A3ABE2C2B75090085C241 /* GenerateSwiftUICoreImage */,
				CE9A3AD62C2B75260085C241 /* Generator */,
				CE9A3ADA2C2B75260085C241 /* Sources */,
				CE9A3ABD2C2B75090085C241 /* Products */,
			);
			sourceTree = "<group>";
		};
		CE9A3ABD2C2B75090085C241 /* Products */ = {
			isa = PBXGroup;
			children = (
				CE9A3ABC2C2B75090085C241 /* GenerateSwiftUICoreImage.app */,
			);
			name = Products;
			sourceTree = "<group>";
		};
		CE9A3ABE2C2B75090085C241 /* GenerateSwiftUICoreImage */ = {
			isa = PBXGroup;
			children = (
				CE9A3ABF2C2B75090085C241 /* GenerateSwiftUICoreImageApp.swift */,
				CE9A3AC12C2B75090085C241 /* ContentView.swift */,
				CE9A3AF42C2B7B6A0085C241 /* GenerateSwiftUICoreImage.entitlements */,
			);
			path = GenerateSwiftUICoreImage;
			sourceTree = "<group>";
		};
		CE9A3AD62C2B75260085C241 /* Generator */ = {
			isa = PBXGroup;
			children = (
				CE67B0D02C2C82A4003E692B /* docLookup.json */,
				CE9A3AD12C2B75260085C241 /* abstracts.json */,
				CE9A3AD22C2B75260085C241 /* CIImage-Generation.swift */,
				CE9A3AD42C2B75260085C241 /* FunctionMinima.json */,
				CE9A3AD52C2B75260085C241 /* MissingParameterDocumentation.json */,
			);
			name = Generator;
			path = ../Generator;
			sourceTree = "<group>";
		};
		CE9A3ADA2C2B75260085C241 /* Sources */ = {
			isa = PBXGroup;
			children = (
				CE9A3AD72C2B75260085C241 /* CIImage-Extensions.swift */,
				CE9A3AD82C2B75260085C241 /* CIImage-Filters.swift */,
				CE9A3AD92C2B75260085C241 /* Image-Extensions.swift */,
			);
			name = Sources;
			path = ../Sources;
			sourceTree = "<group>";
		};
/* End PBXGroup section */

/* Begin PBXNativeTarget section */
		CE9A3ABB2C2B75090085C241 /* GenerateSwiftUICoreImage */ = {
			isa = PBXNativeTarget;
			buildConfigurationList = CE9A3ACB2C2B750B0085C241 /* Build configuration list for PBXNativeTarget "GenerateSwiftUICoreImage" */;
			buildPhases = (
				CE9A3AB82C2B75090085C241 /* Sources */,
				CE9A3AB92C2B75090085C241 /* Frameworks */,
				CE9A3ABA2C2B75090085C241 /* Resources */,
			);
			buildRules = (
			);
			dependencies = (
			);
			name = GenerateSwiftUICoreImage;
			productName = GenerateSwiftUICoreImage;
			productReference = CE9A3ABC2C2B75090085C241 /* GenerateSwiftUICoreImage.app */;
			productType = "com.apple.product-type.application";
		};
/* End PBXNativeTarget section */

/* Begin PBXProject section */
		CE9A3AB42C2B75090085C241 /* Project object */ = {
			isa = PBXProject;
			attributes = {
				BuildIndependentTargetsInParallel = 1;
				LastSwiftUpdateCheck = 1540;
				LastUpgradeCheck = 1540;
				TargetAttributes = {
					CE9A3ABB2C2B75090085C241 = {
						CreatedOnToolsVersion = 15.4;
					};
				};
			};
			buildConfigurationList = CE9A3AB72C2B75090085C241 /* Build configuration list for PBXProject "GenerateSwiftUICoreImage" */;
			compatibilityVersion = "Xcode 14.0";
			developmentRegion = en;
			hasScannedForEncodings = 0;
			knownRegions = (
				en,
				Base,
			);
			mainGroup = CE9A3AB32C2B75090085C241;
			productRefGroup = CE9A3ABD2C2B75090085C241 /* Products */;
			projectDirPath = "";
			projectRoot = "";
			targets = (
				CE9A3ABB2C2B75090085C241 /* GenerateSwiftUICoreImage */,
			);
		};
/* End PBXProject section */

/* Begin PBXResourcesBuildPhase section */
		CE9A3ABA2C2B75090085C241 /* Resources */ = {
			isa = PBXResourcesBuildPhase;
			buildActionMask = 2147483647;
			files = (
				CE9A3AE82C2B75260085C241 /* abstracts.json in Resources */,
				CE9A3AEC2C2B75260085C241 /* MissingParameterDocumentation.json in Resources */,
				CE9A3AEB2C2B75260085C241 /* FunctionMinima.json in Resources */,
				CE67B0D12C2C82A4003E692B /* docLookup.json in Resources */,
			);
			runOnlyForDeploymentPostprocessing = 0;
		};
/* End PBXResourcesBuildPhase section */

/* Begin PBXSourcesBuildPhase section */
		CE9A3AB82C2B75090085C241 /* Sources */ = {
			isa = PBXSourcesBuildPhase;
			buildActionMask = 2147483647;
			files = (
				CE9A3AE92C2B75260085C241 /* CIImage-Generation.swift in Sources */,
				CE9A3AC22C2B75090085C241 /* ContentView.swift in Sources */,
				CE9A3AEE2C2B75260085C241 /* CIImage-Filters.swift in Sources */,
				CE9A3AEF2C2B75260085C241 /* Image-Extensions.swift in Sources */,
				CE9A3AC02C2B75090085C241 /* GenerateSwiftUICoreImageApp.swift in Sources */,
				CE9A3AED2C2B75260085C241 /* CIImage-Extensions.swift in Sources */,
			);
			runOnlyForDeploymentPostprocessing = 0;
		};
/* End PBXSourcesBuildPhase section */

/* Begin XCBuildConfiguration section */
		CE9A3AC92C2B750B0085C241 /* Debug */ = {
			isa = XCBuildConfiguration;
			buildSettings = {
				ALWAYS_SEARCH_USER_PATHS = NO;
				ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
				CLANG_ANALYZER_NONNULL = YES;
				CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
				CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
				CLANG_ENABLE_MODULES = YES;
				CLANG_ENABLE_OBJC_ARC = YES;
				CLANG_ENABLE_OBJC_WEAK = YES;
				CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
				CLANG_WARN_BOOL_CONVERSION = YES;
				CLANG_WARN_COMMA = YES;
				CLANG_WARN_CONSTANT_CONVERSION = YES;
				CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
				CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
				CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
				CLANG_WARN_EMPTY_BODY = YES;
				CLANG_WARN_ENUM_CONVERSION = YES;
				CLANG_WARN_INFINITE_RECURSION = YES;
				CLANG_WARN_INT_CONVERSION = YES;
				CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
				CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
				CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
				CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
				CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
				CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
				CLANG_WARN_STRICT_PROTOTYPES = YES;
				CLANG_WARN_SUSPICIOUS_MOVE = YES;
				CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
				CLANG_WARN_UNREACHABLE_CODE = YES;
				CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
				COPY_PHASE_STRIP = NO;
				DEBUG_INFORMATION_FORMAT = dwarf;
				ENABLE_STRICT_OBJC_MSGSEND = YES;
				ENABLE_TESTABILITY = YES;
				ENABLE_USER_SCRIPT_SANDBOXING = YES;
				GCC_C_LANGUAGE_STANDARD = gnu17;
				GCC_DYNAMIC_NO_PIC = NO;
				GCC_NO_COMMON_BLOCKS = YES;
				GCC_OPTIMIZATION_LEVEL = 0;
				GCC_PREPROCESSOR_DEFINITIONS = (
					"DEBUG=1",
					"$(inherited)",
				);
				GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
				GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
				GCC_WARN_UNDECLARED_SELECTOR = YES;
				GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
				GCC_WARN_UNUSED_FUNCTION = YES;
				GCC_WARN_UNUSED_VARIABLE = YES;
				LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
				MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;
				MTL_FAST_MATH = YES;
				ONLY_ACTIVE_ARCH = YES;
				SWIFT_ACTIVE_COMPILATION_CONDITIONS = "DEBUG $(inherited)";
				SWIFT_OPTIMIZATION_LEVEL = "-Onone";
			};
			name = Debug;
		};
		CE9A3ACA2C2B750B0085C241 /* Release */ = {
			isa = XCBuildConfiguration;
			buildSettings = {
				ALWAYS_SEARCH_USER_PATHS = NO;
				ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
				CLANG_ANALYZER_NONNULL = YES;
				CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
				CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
				CLANG_ENABLE_MODULES = YES;
				CLANG_ENABLE_OBJC_ARC = YES;
				CLANG_ENABLE_OBJC_WEAK = YES;
				CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
				CLANG_WARN_BOOL_CONVERSION = YES;
				CLANG_WARN_COMMA = YES;
				CLANG_WARN_CONSTANT_CONVERSION = YES;
				CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
				CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
				CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
				CLANG_WARN_EMPTY_BODY = YES;
				CLANG_WARN_ENUM_CONVERSION = YES;
				CLANG_WARN_INFINITE_RECURSION = YES;
				CLANG_WARN_INT_CONVERSION = YES;
				CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
				CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
				CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
				CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
				CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
				CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
				CLANG_WARN_STRICT_PROTOTYPES = YES;
				CLANG_WARN_SUSPICIOUS_MOVE = YES;
				CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
				CLANG_WARN_UNREACHABLE_CODE = YES;
				CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
				COPY_PHASE_STRIP = NO;
				DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
				ENABLE_NS_ASSERTIONS = NO;
				ENABLE_STRICT_OBJC_MSGSEND = YES;
				ENABLE_USER_SCRIPT_SANDBOXING = YES;
				GCC_C_LANGUAGE_STANDARD = gnu17;
				GCC_NO_COMMON_BLOCKS = YES;
				GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
				GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
				GCC_WARN_UNDECLARED_SELECTOR = YES;
				GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
				GCC_WARN_UNUSED_FUNCTION = YES;
				GCC_WARN_UNUSED_VARIABLE = YES;
				LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
				MTL_ENABLE_DEBUG_INFO = NO;
				MTL_FAST_MATH = YES;
				SWIFT_COMPILATION_MODE = wholemodule;
			};
			name = Release;
		};
		CE9A3ACC2C2B750B0085C241 /* Debug */ = {
			isa = XCBuildConfiguration;
			buildSettings = {
				ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
				ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
				CODE_SIGN_ENTITLEMENTS = GenerateSwiftUICoreImage/GenerateSwiftUICoreImage.entitlements;
				"CODE_SIGN_IDENTITY[sdk=macosx*]" = "Apple Development";
				CODE_SIGN_STYLE = Automatic;
				CURRENT_PROJECT_VERSION = 1;
				DEVELOPMENT_ASSET_PATHS = "\"GenerateSwiftUICoreImage/Preview Content\"";
				DEVELOPMENT_TEAM = 3SP7MRA6P9;
				ENABLE_HARDENED_RUNTIME = YES;
				ENABLE_PREVIEWS = YES;
				GENERATE_INFOPLIST_FILE = YES;
				"INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphoneos*]" = YES;
				"INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphonesimulator*]" = YES;
				"INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents[sdk=iphoneos*]" = YES;
				"INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents[sdk=iphonesimulator*]" = YES;
				"INFOPLIST_KEY_UILaunchScreen_Generation[sdk=iphoneos*]" = YES;
				"INFOPLIST_KEY_UILaunchScreen_Generation[sdk=iphonesimulator*]" = YES;
				"INFOPLIST_KEY_UIStatusBarStyle[sdk=iphoneos*]" = UIStatusBarStyleDefault;
				"INFOPLIST_KEY_UIStatusBarStyle[sdk=iphonesimulator*]" = UIStatusBarStyleDefault;
				INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
				INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
				IPHONEOS_DEPLOYMENT_TARGET = 17.5;
				LD_RUNPATH_SEARCH_PATHS = "@executable_path/Frameworks";
				"LD_RUNPATH_SEARCH_PATHS[sdk=macosx*]" = "@executable_path/../Frameworks";
				MACOSX_DEPLOYMENT_TARGET = 14.5;
				MARKETING_VERSION = 1.0;
				PRODUCT_BUNDLE_IDENTIFIER = com.gigliwood.GenerateSwiftUICoreImage;
				PRODUCT_NAME = "$(TARGET_NAME)";
				SDKROOT = auto;
				SUPPORTED_PLATFORMS = "iphoneos iphonesimulator macosx";
				SWIFT_EMIT_LOC_STRINGS = YES;
				SWIFT_VERSION = 5.0;
				TARGETED_DEVICE_FAMILY = "1,2";
			};
			name = Debug;
		};
		CE9A3ACD2C2B750B0085C241 /* Release */ = {
			isa = XCBuildConfiguration;
			buildSettings = {
				ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
				ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
				CODE_SIGN_ENTITLEMENTS = GenerateSwiftUICoreImage/GenerateSwiftUICoreImage.entitlements;
				"CODE_SIGN_IDENTITY[sdk=macosx*]" = "Apple Development";
				CODE_SIGN_STYLE = Automatic;
				CURRENT_PROJECT_VERSION = 1;
				DEVELOPMENT_ASSET_PATHS = "\"GenerateSwiftUICoreImage/Preview Content\"";
				DEVELOPMENT_TEAM = 3SP7MRA6P9;
				ENABLE_HARDENED_RUNTIME = YES;
				ENABLE_PREVIEWS = YES;
				GENERATE_INFOPLIST_FILE = YES;
				"INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphoneos*]" = YES;
				"INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphonesimulator*]" = YES;
				"INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents[sdk=iphoneos*]" = YES;
				"INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents[sdk=iphonesimulator*]" = YES;
				"INFOPLIST_KEY_UILaunchScreen_Generation[sdk=iphoneos*]" = YES;
				"INFOPLIST_KEY_UILaunchScreen_Generation[sdk=iphonesimulator*]" = YES;
				"INFOPLIST_KEY_UIStatusBarStyle[sdk=iphoneos*]" = UIStatusBarStyleDefault;
				"INFOPLIST_KEY_UIStatusBarStyle[sdk=iphonesimulator*]" = UIStatusBarStyleDefault;
				INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
				INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
				IPHONEOS_DEPLOYMENT_TARGET = 17.5;
				LD_RUNPATH_SEARCH_PATHS = "@executable_path/Frameworks";
				"LD_RUNPATH_SEARCH_PATHS[sdk=macosx*]" = "@executable_path/../Frameworks";
				MACOSX_DEPLOYMENT_TARGET = 14.5;
				MARKETING_VERSION = 1.0;
				PRODUCT_BUNDLE_IDENTIFIER = com.gigliwood.GenerateSwiftUICoreImage;
				PRODUCT_NAME = "$(TARGET_NAME)";
				SDKROOT = auto;
				SUPPORTED_PLATFORMS = "iphoneos iphonesimulator macosx";
				SWIFT_EMIT_LOC_STRINGS = YES;
				SWIFT_VERSION = 5.0;
				TARGETED_DEVICE_FAMILY = "1,2";
			};
			name = Release;
		};
/* End XCBuildConfiguration section */

/* Begin XCConfigurationList section */
		CE9A3AB72C2B75090085C241 /* Build configuration list for PBXProject "GenerateSwiftUICoreImage" */ = {
			isa = XCConfigurationList;
			buildConfigurations = (
				CE9A3AC92C2B750B0085C241 /* Debug */,
				CE9A3ACA2C2B750B0085C241 /* Release */,
			);
			defaultConfigurationIsVisible = 0;
			defaultConfigurationName = Release;
		};
		CE9A3ACB2C2B750B0085C241 /* Build configuration list for PBXNativeTarget "GenerateSwiftUICoreImage" */ = {
			isa = XCConfigurationList;
			buildConfigurations = (
				CE9A3ACC2C2B750B0085C241 /* Debug */,
				CE9A3ACD2C2B750B0085C241 /* Release */,
			);
			defaultConfigurationIsVisible = 0;
			defaultConfigurationName = Release;
		};
/* End XCConfigurationList section */
	};
	rootObject = CE9A3AB42C2B75090085C241 /* Project object */;
}


================================================
FILE: GenerateSwiftUICoreImage/GenerateSwiftUICoreImage.xcodeproj/project.xcworkspace/contents.xcworkspacedata
================================================
<?xml version="1.0" encoding="UTF-8"?>
<Workspace
   version = "1.0">
   <FileRef
      location = "self:">
   </FileRef>
</Workspace>


================================================
FILE: GenerateSwiftUICoreImage/GenerateSwiftUICoreImage.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist
================================================
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
	<key>IDEDidComputeMac32BitWarning</key>
	<true/>
</dict>
</plist>


================================================
FILE: Generator/CIImage-Generation.swift
================================================
//
//  CIImage-Generation.swift
//  SwiftUI Core Image
//
//  Created by Dan Wood on 4/27/23.
//
// When executed, this outputs Swift code that can be pasted into the file "CIImage+Generated.swift".
//
// This will run under iOS or macOS and the resulting code is almost the same. Notably in affineClamp and affineTile the default values are not
// the same. Also as noted in the documentation that we generate, the `cubeDimension` parameter has a different range between iOS and macOS.

import Foundation
import CoreImage
import CoreImage.CIFilterBuiltins

#if canImport(UIKit)
private typealias AffineTransform = CGAffineTransform
#elseif canImport(AppKit)
private typealias AffineTransform = NSAffineTransform
#endif

private var unknownProperties: [String: [String: String]] = [:]

func dumpFilters() {

	/*

	 New documentation base found at
	 https://developer.apple.com/documentation/coreimage
	 or
	 https://developer.apple.com/documentation/coreimage/cifilter

	 15 categories. Open each in tab. Select all, copy, paste into rich text TextEdit doc. Save as HTML.

	 Copy this source, then in terminal, grep out the lines I want:

	 pbpaste | grep 'class func' | grep 'any CIFilter ' | sort | uniq > ~/Desktop/AllFunctions.html

	 (There are a few duplicated functions; gonna not worry about right now)

	 In BBEdit, remove the stuff before the

	 From that, in BBEdit, grep replace all lines:

	 ^.+<a href="https://developer.apple.com/documentation/coreimage/cifilter/([^"]+)">class func <span class="[^"]+">([^>]+)</span><span class="[^"]+">\(\) -&gt; any CIFilter &amp; ([^>]+)</span></a></span></p>
	 to:

	 "\2": "\1",

	 and then…

	 ^.+<a href="https://developer.apple.com/documentation/coreimage/cifilter/([^"]+)">class func ([^(]+).+? any CIFilter &amp; ([^<]+)<span class="[^"]+"></span></a></span></p>

	 to:

	 "\2": "\1",


	 Save as RawLookup.json to Desktop

	 cat ~/Desktop/RawLookup.json | sort | uniq > ~/Desktop/docLookup.json

	 Now edit to include { and } and remove last comma

	 This file lets us know the documentation URL fragment to append to https://developer.apple.com/documentation/coreimage/cifilter/
	 */

	guard let url = Bundle.main.url(forResource: "docLookup", withExtension: "json"),
		  let data = try? Data(contentsOf: url),
		  let json = try? JSONSerialization.jsonObject(with: data, options: []),
		  let docLookup: [String: String] = json as? [String: String]
	else { print("// 🛑 can't load docLookup.json"); return }

	
	/*
	 Load abstracts for all functions that are documented on the OLD reference page. Still, some of these descriptions are a bit more descriptive than the built-in descriptions.

	 Possible improvement, scrape the same pages that are used above to generate docLookup.json to get the most up-to-date abstracts from the web.

	 Start with
	 https://developer.apple.com/library/archive/documentation/GraphicsImaging/Reference/CoreImageFilterReference/

	 auto-expand all symbols

	 get HTML source
	 in BBEdit change all instances (with Grep) of:
	 +href="#//apple_ref/doc/filter/ci/([^"]+)"\n +title="([^"]+)">
	 to:
	 •"\1": "\2",

	 Sort, extract lines starting with •
	 Paste and preserve formatting into abstracts.json; fix the last line.
	 Look for any little tweaks that may be needed.

	 */
	guard let url = Bundle.main.url(forResource: "abstracts", withExtension: "json"),
		  let data = try? Data(contentsOf: url),
		  let json = try? JSONSerialization.jsonObject(with: data, options: []),
		  let abstractLookup: [String: String] = json as? [String: String]
	else { print("// 🛑 can't load abstracts.json"); return }

	/*
	 A dictionary mapping filters (pretty function names) to override iOS versions  when we have noted that the core image functions (or occasionally parameters of them) required newer OSs.

	 Not sure where we got this originally! We may need to update some of these.

	 */
	guard let url = Bundle.main.url(forResource: "FunctionMinima", withExtension: "json"),
		  let data = try? Data(contentsOf: url),
		  let json = try? JSONSerialization.jsonObject(with: data, options: []),
		  let functionMinima: [String: String] = json as? [String: String]
	else { print("// 🛑 can't load FunctionMinima.json"); return }

	/* Generate this list by running the code; it finds inputs missing documentation replacing with "_____TODO_____". Update the MissingParameterDocumentation.json file as this is improved. Documentation can come from whatever sources can be scrapped together; use "_NOTE" key just to notate how we found the information.
	 */
	guard let url = Bundle.main.url(forResource: "MissingParameterDocumentation", withExtension: "json"),
		  let data = try? Data(contentsOf: url),
		  let json = try? JSONSerialization.jsonObject(with: data, options: []),
		  let forUnknownProperties = json as? [String: [String: String]]
	else { print("// 🛑 can't load MissingParameterDocumentation.json"); return }
	unknownProperties = forUnknownProperties

	let ciFilterList = CIFilter.filterNames(inCategories: nil)

	var generators: [String: CIFilter] = [:]
	var imageToImage: [String: CIFilter] = [:]

	for filterName in ciFilterList {

		guard let filter = CIFilter(name: filterName) else { print("// 🛑 can't instantiate \(filterName)"); continue }

		if !filter.inputKeys.contains(kCIInputImageKey) {
			generators[filterName] = filter
		} else if filter.outputKeys.contains(kCIOutputImageKey) {
			imageToImage[filterName] = filter
		} else {
			print("// 🛑 Don't know what to do with \(filterName) - outputKeys = \(filter.outputKeys)")
		}
	}

	print("//")
	print("// Automatically generated by CIImage-Generation.swift - do not edit")
	print("//")
	print("")
	print("import Foundation")
	print("import CoreImage")
	print("import CoreImage.CIFilterBuiltins")
	print("import CoreML")
	print("import AVFoundation")
	print("")
	print("public extension CIImage {")
	print("")
	print("//")
	print("// MARK: IMAGE-TO-IMAGE FILTERS")
	print("//")
	for filterName in imageToImage.keys.sorted() {
		guard let filter: CIFilter = imageToImage[filterName] else { continue }
		outputImageToImage(filter, abstractLookup: abstractLookup, docLookup: docLookup, functionMinima: functionMinima)
	}
	print("")
	print("//")
	print("// MARK: GENERATORS")
	print("//")
	for filterName in generators.keys.sorted() {
		guard let filter: CIFilter = generators[filterName] else { continue }
		outputGeneratorFilter(filter, abstractLookup: abstractLookup, docLookup: docLookup, functionMinima: functionMinima)
	}

	// End of class extension
	print("}")
	print("\n\n\n\n\n\n\n")
}

// Use this to start collecting properties needing some documentation, to then put into MissingParameterDocumentation.json
func dumpUnknownProperties() {
	do {
		let theJSONData = try JSONSerialization.data(
			withJSONObject: unknownProperties,
			options: [.sortedKeys, .prettyPrinted]
		)
		if let theJSONText = String(data: theJSONData,
									encoding: String.Encoding.utf8) {
			print("\n\n\n_________________________\n\nDumped properties missing documentation = \n\n\n\(theJSONText)")
		} else {
			print("Unable to convert data to JSON")
		}
	}
	catch {
		print(error)
	}
}

private func outputGeneratorFilter(_ filter: CIFilter, abstractLookup: [String: String], docLookup: [String: String], functionMinima: [String: String]) {
	let filterName = filter.name

	let filtersThatAlreadyHaveInitializer: [String: String] = ["CIConstantColorGenerator": "init(color: CIColor)"]

	if let existingFunction: String = filtersThatAlreadyHaveInitializer[filterName] {
		print("// ℹ️ \(filterName) already has a CIImage initializer: \(existingFunction)")
		return
	}

	outputDocumentation(filter, isGenerator: true, abstractLookup: abstractLookup, docLookup: docLookup)
	outputOSVersion(filter, functionMinima: functionMinima)
	outputImageFunction(filter, isGenerator: true)
}

private func outputDocumentation(_ filter: CIFilter, isGenerator: Bool, abstractLookup: [String: String], docLookup: [String: String]) {

	let filterName = filter.name
	let description: String? = CIFilter.localizedDescription(forFilterName: filterName)
	let categories: Array<String> = filter.attributes[kCIAttributeFilterCategories] as? Array<String> ?? []
	let filterDisplayName: String = filter.attributes[kCIAttributeFilterDisplayName] as? String ?? ""
	let documentationURL: URL? = filter.attributes[kCIAttributeReferenceDocumentation] as? URL

	// https://developer.apple.com/documentation/xcode/writing-symbol-documentation-in-your-source-files
	print("\n/// \(filterDisplayName)")
	print("///")
	if let description {
		if let abstract = abstractLookup[filterName], !abstract.hasPrefix("Returns "), abstract.count > description.count {
			// Replace description with longer abstract scraped from the website, unless it starts with 'Returns ' since we use that for the output.
			print("/// \(abstract)")
		} else {
			print("/// \(description)")
		}
		print("///")
	}

	// Convert, for example, CIAccordionFoldTransition to accordionFoldTransition
	let functionFilterNameCapitalized = filterName.dropFirst(2)
	var functionFilterName = (functionFilterNameCapitalized.first?.lowercased() ?? "") + functionFilterNameCapitalized.dropFirst()

	let manualNameLookup = ["CICMYKHalftone": "cmykHalftone", "CIPDF417BarcodeGenerator": "pdf417BarcodeGenerator", "CIQRCodeGenerator": "qrCodeGenerator"]
	if let foundManualLookup = manualNameLookup[filterName] {
		functionFilterName = foundManualLookup
	}

	// These are still in beta, so I'm not seeing them on the main category lists. https://developer.apple.com/documentation/coreimage/cifilter
	let manualURLLookup = ["CIAreaBoundsRed": "4401847-areaboundsred",
						   "CIMaximumScaleTransform": "4401870-maximumscaletransform",
						   "CIToneMapHeadroom": "4401878-tonemapheadroom",
						   "CIAreaAlphaWeightedHistogram": "4401846-areaalphaweightedhistogram"
	]

	let newDocURLFragment: String?
	if let manualURLFragment = manualURLLookup[filterName] {
		newDocURLFragment = manualURLFragment
	} else {
		newDocURLFragment = docLookup[functionFilterName]
	}

	if let newDocURLFragment {
		print("/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/\(newDocURLFragment))")
	} else {
		let withoutSuffix = functionFilterName.replacingOccurrences(of: "Filter", with: "", options: [.backwards, .anchored])
		if let newDocURLFragment = docLookup[withoutSuffix] {
			print("/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/\(newDocURLFragment))")
		} else {
			print("/// ⚠️ No documentation available for \(filterName)")
		}
	}

	if let documentationURL {
		if nil != abstractLookup[filterName] {
			let urlFragment: String
#if canImport(UIKit)
			urlFragment = "http://developer.apple.com/library/ios"
#elseif canImport(AppKit)
			urlFragment = "http://developer.apple.com/library/mac"
#endif

			var urlString: String = documentationURL.absoluteString.replacingOccurrences(of: urlFragment,
																						 with: "https://developer.apple.com/library/archive",
																						 options: .anchored)
			urlString = urlString.replacingOccurrences(of: "https://developer.apple.com/library/archive/documentation/GraphicsImaging/Reference/CoreImageFilterReference/index.html", with: "https://t.ly/Gyd6")



			print("/// [Classic Documentation](\(urlString))")
		}

		// Special cases for documentation
		if filterName == "CIDepthBlurEffect" {
			// Some helpful hints since this is otherwise undocumented
			print("/// [WWDC Video](https://devstreaming-cdn.apple.com/videos/wwdc/2017/508wdyl5rm2jy9z8/508/508_hd_image_editing_with_depth.mp4)")
			print("/// [WWDC Slides](https://devstreaming-cdn.apple.com/videos/wwdc/2017/508wdyl5rm2jy9z8/508/508_image_editing_with_depth.pdf)")
		} else if filterName == "CICoreMLModelFilter" {
			print("/// [WWDC Video](https://developer.apple.com/videos/play/wwdc2018-719/?time=2378)")
		}
		print("///")
	}
	if categories.count == 1, let category = categories.first {
		print("/// Category: \(CIFilter.localizedName(forCategory: category))")
		print("///")
	} else if categories.count > 1 {
		let prettyList: String = categories.map { CIFilter.localizedName(forCategory: $0) }.joined(separator: ", ")
		print("/// Categories: \(prettyList)")
		print("///")
	}
	print("///")
	print("/// - Parameters:")

	var adjustedInputKeys = filter.inputKeys.filter { $0 != kCIInputImageKey }
	if !isGenerator && filter.identityInputKeys.isEmpty && !filter.inputKeys.contains("inputBackgroundImage") {
		adjustedInputKeys.append("active")
	}
	for inputKey in adjustedInputKeys {
		guard inputKey != "active" else {
			print("///   - active: should this filter be applied")
			continue
		}
		guard let attributes = filter.attributes[inputKey] as? [String: AnyObject],
			  let attributeClass = attributes[kCIAttributeClass] as? String
		else {
			print("///   - \(inputKey): 🛑 couldn't get input attributes")
			continue
		}

		let displayName: String = attributes[kCIAttributeDisplayName] as? String ?? ""	// space-separated
		let longerInput: String = parameterName(displayName: displayName, filterName: filterName)
		var description:  String = attributes[kCIAttributeDescription] as? String ?? "[unknown]"

		if nil == attributes[kCIAttributeDescription] {
			
			// TEMPORARY CODE TO COLLECT UNKNOWN PROPERTIES
			var foundUnknownPropertiesForFilter: [String: String] = unknownProperties[filterName] ?? [:]
			if nil == foundUnknownPropertiesForFilter[longerInput] {
				foundUnknownPropertiesForFilter[longerInput] = "_____TODO_____"
			}
			unknownProperties[filterName] = foundUnknownPropertiesForFilter
			
			if let missingParameters: [String: String] = unknownProperties[filterName],
			   let replacementDocumentation: String = missingParameters[longerInput] {
				description = replacementDocumentation
			}
		}
		// Remove rounding information since we are passing in integers directly.
		description = description.replacing(" The value will be rounded to the nearest odd integer.", with: "")
		description = description.replacing(" Set to nil for automatic.", with: "")
		// Fix this weird ObjC style documentation
		description = description.replacing("Force a compact style Aztec code to @YES or @NO.",
											with: "A Boolean that specifies whether to force a compact style Aztec code.")
		description = description.replacing("Force compaction style to @YES or @NO.",
											with: "A Boolean value specifying whether to force compaction style.")

		print("///   - \(longerInput): \(description)", terminator: "")

		// For numbers, show the range on the same line
		switch attributeClass {
		case "NSNumber":
			guard attributes[kCIAttributeType] as? String != kCIAttributeTypeBoolean, longerInput != "extrapolate" else { break }
			guard longerInput != "cubeDimension" else {
				// Special case. MacOS and iOS report different values so show that here
				print("(2...64 iOS; 2...128 macOS)", terminator: "")
				break
			}
			let minimumValue: Float? = (attributes[kCIAttributeMin] as? NSNumber)?.floatValue
			let maximumValue: Float? = (attributes[kCIAttributeMax] as? NSNumber)?.floatValue
			// Ignore very large maximum value since it's not practical
			if let minimumValue, let maximumValue, maximumValue < 0x0800_0000_00000_0000 {
				print(" (\(minimumValue.format5)...\(maximumValue.format5))", terminator: "")
			} else if let minimumValue {
				print(" (\(minimumValue.format5)...)", terminator: "")
			} else if let maximumValue, maximumValue < 0x0800_0000_00000_0000 {
				print(" (...\(maximumValue.format5))", terminator: "")
			}

		default:
			break
		}
		print("")	// finish up the line

	}


	if filter.outputKeys.contains(kCIOutputImageKey) {
		if isGenerator {
			if let abstract: String = abstractLookup[filterName],
			   let match = abstract.firstMatch(of: /^Generates*\h/) {
				let abstractWithoutReturnsPrefix = abstract[match.range.upperBound...]
				let sentences = Array(abstractWithoutReturnsPrefix.split(separator: /\./))
				let firstSentence = sentences.first ?? abstractWithoutReturnsPrefix
				print("/// - Returns: \(firstSentence)")
			} else if let description,
			   let match = description.firstMatch(of: /^Generates*\h/) {
				let descriptionWithoutReturnsPrefix = description[match.range.upperBound...]
				let sentences = Array(descriptionWithoutReturnsPrefix.split(separator: /\./))
				let firstSentence = sentences.first ?? descriptionWithoutReturnsPrefix
				print("/// - Returns: \(firstSentence)")
			} else {
				print("/// - Returns: new `CIImage`")
			}
		} else {
			var returnInfo: String
			if var abstract = abstractLookup[filterName], abstract.hasPrefix("Returns ") {
				abstract = String(abstract.dropFirst(8))
				abstract = abstract.replacingOccurrences(of: ".", with: "", options: [.anchored, .backwards])	// remove any ending period
				returnInfo = abstract
			} else {
				returnInfo = "processed new `CIImage`"
			}
			if filter.identityInputKeys.isEmpty && filter.inputKeys.contains("inputBackgroundImage") {
				// Append info about when active is false
				returnInfo += ", or identity if `backgroundImage` is nil"
			} else if filter.identityInputKeys.isEmpty {
					// Append info about when active is false
					returnInfo += ", or identity if `active` is false"
			} else {
				// Append info about identity parameters
				returnInfo += " or identity if parameters result in no operation applied"

				// TODO: colorCrossPolynomial broken
			}
			print("/// - Returns: \(returnInfo)")

		}
	}
}

private func outputOSVersion(_ filter: CIFilter, functionMinima: [String: String]) {

	let filterName = filter.name
	var macOSVersion: String? = filter.attributes[kCIAttributeFilterAvailable_Mac]  as? String
	if nil == Float(macOSVersion ?? "") {
		if filterName == "CIHistogramDisplayFilter" {
			macOSVersion = "10.9"		// repair "10.?" with 10.9 from documentation
		}
	}

	if nil != macOSVersion?.firstMatch(of: /10\.[0-9]+/) && macOSVersion != "10.15" {
		macOSVersion = "10.15"		// For minimum version of SwiftUI and most filter functions
	}

	var iOSVersion: String? = filter.attributes[kCIAttributeFilterAvailable_iOS]  as? String
	if Float(iOSVersion ?? "") ?? 0 < 13 {
		iOSVersion = "13"	// minimum version for SwiftUI and most filter functions
	}

	// Override versions of our functions when we have noted that the core image functions (or occasionally parameters of them) required newer OSs
	if let functionMinimum = functionMinima[filter.name.prettyFunction] {
		macOSVersion = functionMinimum
		if let convertedFromMacVersion = ["11.0": "14", "12.0": "15", "13.0": "16"][functionMinimum] {
			iOSVersion = convertedFromMacVersion
		}
	}

	if let macOSVersion, let iOSVersion {
		print("@available(iOS \(iOSVersion), macOS \(macOSVersion), *)")
	}
}

private func outputImageFunctionHeader(_ filter: CIFilter, isGenerator: Bool) {
	let filterName: String = filter.name
	let filterFunction: String = filterName.prettyFunction

	print("\(isGenerator ? "static " : "")func \(filterFunction)(", terminator: "")

	var inputParams: [String] = filter.inputKeys
		.filter { $0 != kCIInputImageKey }
		.map { inputKey in
			(inputKey, (filter.attributes[inputKey] as? [String: AnyObject] ?? [:])) }	// tuple of the inputKey and its attributes
		.compactMap { (inputKey: String, inputAttributes: [String: AnyObject]) in
			parameterStatement(inputKey: inputKey, inputAttributes: inputAttributes, filterName: filterName)
		}

	if !isGenerator && filter.identityInputKeys.isEmpty && !filter.inputKeys.contains("inputBackgroundImage"),
	   let attributesForActiveParam: [String: AnyObject] = .some([kCIAttributeDisplayName: "Active" as NSString,
																	   kCIAttributeClass: "NSNumber" as NSString,
																		kCIAttributeType: kCIAttributeTypeBoolean  as NSString,
																	 kCIAttributeDefault: true as AnyObject,
																	kCIAttributeIdentity: true as AnyObject]),
		let activeParameterStatement: String = parameterStatement(inputKey: "active", inputAttributes: attributesForActiveParam, filterName: filterName) {
		inputParams.append(activeParameterStatement)
	}
	let inputParamsOnOneLine = inputParams.joined(separator: ", ")
	let forceMultiLines: Bool = inputParamsOnOneLine.contains("//")
	if inputParamsOnOneLine.count + filterFunction.count >= 100 || forceMultiLines {
		print(inputParams.joined(separator: ",\n        "), terminator: forceMultiLines ? "\n" : "")
	} else {
		print(inputParamsOnOneLine, terminator: "")
	}
	print(") -> CIImage {")
}

private func outputImageDictionaryFunction(_ filter: CIFilter, isGenerator: Bool) {

	assert(!isGenerator)		// not supported for generators; none known to be needed
	let filterName: String = filter.name

	outputImageFunctionHeader(filter, isGenerator: isGenerator)

	outputIdentityGuards(filter)

	print("    // Filter not included in CoreImage.CIFilterBuiltins; using dictionary-based method.")
	print("    guard let filter = CIFilter(name: \"\(filter.name)\", parameters: [", terminator: "")
	
	let otherInputSettingStatements: [String] = filter.inputKeys
		.filter { $0 != kCIInputImageKey }
		.map { inputKey in
			(inputKey, (filter.attributes[inputKey] as? [String: AnyObject] ?? [:])) }	// tuple of the inputKey and its attributes
		.compactMap { (inputKey: String, inputAttributes: [String: AnyObject]) in
			guard let displayName: String = inputAttributes[kCIAttributeDisplayName] as? String
			else { return nil }
			let inputName: String = parameterName(displayName: displayName, filterName: filterName)
			return "    \"\(inputKey)\": \(inputName),"
		}

	if !otherInputSettingStatements.isEmpty {
		print("\n")
		print(otherInputSettingStatements.joined(separator: "\n"))
		print("    ", terminator: "")
	} else {
		print(":", terminator: "")
	}

	print("]) else { return self }")
	print("    return filter.outputImage ?? CIImage.empty()")

	print("}")

}

private func outputIdentityGuards(_ filter: CIFilter) {
	let filterName: String = filter.name
	// doesn't make sense to have an identity function for generators
	// Guards for identity/inert values
	let identityComparisons: String

	if filter.identityInputKeys.isEmpty {
		if filter.inputKeys.contains("inputBackgroundImage") {
			identityComparisons = "let backgroundImage"
		} else {
			identityComparisons = "active"
		}
	} else {
		identityComparisons = filter.inputKeys
			.filter { $0 != kCIInputImageKey }
			.map { inputKey in
				(inputKey, (filter.attributes[inputKey] as? [String: AnyObject] ?? [:])) }	// tuple of the inputKey and its attributes
			.compactMap { (inputKey: String, inputAttributes: [String: AnyObject]) in
				guard let displayName: String = inputAttributes[kCIAttributeDisplayName] as? String,
					  let identityValue: Any = inputAttributes[kCIAttributeIdentity]
				else { return nil }

				let attributeType: String? = inputAttributes[kCIAttributeType] as? String
				let inputName: String = parameterName(displayName: displayName, filterName: filterName)
				guard hasReasonableDefaultValue(identityValue, attributeType: attributeType, inputName: inputName)
				else { return nil }

				let identityValueFormatted: String = formatSmart(identityValue, attributeType: attributeType, inputName: inputName, filterName: filterName)
				return "\(inputName) != \(identityValueFormatted)"
			}
			.joined(separator: " || ")
	}
	if !identityComparisons.isEmpty {
		print("    guard \(identityComparisons) else { return self }")
		print("")
	}
}

private func outputImageFunction(_ filter: CIFilter, isGenerator: Bool) {
	let filterName: String = filter.name
	let filterFunction: String = filterName.prettyFunction

	outputImageFunctionHeader(filter, isGenerator: isGenerator)

	if !isGenerator {
		outputIdentityGuards(filter)
	}
	print("    let filter = CIFilter.\(filterFunction)() // \(filterName)")
	if !isGenerator {
		print("    filter.inputImage = self")
	}

	let otherInputSettingStatements: String = filter.inputKeys
		.filter { $0 != kCIInputImageKey }
		.map { inputKey in
			(inputKey, (filter.attributes[inputKey] as? [String: AnyObject] ?? [:])) }	// tuple of the inputKey and its attributes
		.compactMap { (inputKey: String, inputAttributes: [String: AnyObject]) in
			guard let displayName: String = inputAttributes[kCIAttributeDisplayName] as? String
			else { return nil }
			let inputName: String = parameterName(displayName: displayName, filterName: filterName)
			let attributeType: String? = inputAttributes[kCIAttributeType] as? String

			// Special case - barcode generators, for some reason, want all their parameters as Float. Let's upgrade it here to keep the API simple.
			if nil != filterFunction.firstMatch(of: /(?i)codeGenerator$/),
			   let className = inputAttributes[kCIAttributeClass] as? String,
			   let attributeType = inputAttributes[kCIAttributeType] as? String,
			   className == "NSNumber" {
				if attributeType == kCIAttributeTypeBoolean {
					return "    filter.\(inputName) = Float(\(inputName) ? 1 : 0)"
				} else {
					return "    filter.\(inputName) = Float(\(inputName))"
				}
			}

			// Annoying to have these negative cases, but the instances where
			// we need to wrap in a float are much more numerous!
			if !(filterFunction == "kMeans" && inputName == "count"),	// this function's parameter wants an integer so leave alone
			   !(filterFunction == "cannyEdgeDetector" && inputName == "hysteresisPasses"),
			   !(filterFunction == "personSegmentation" && inputName == "qualityLevel"),

				attributeType == kCIAttributeTypeInteger || attributeType == kCIAttributeTypeCount {
				return "    filter.\(inputName) = Float(\(inputName))"	// We pass in Int, but function wants a Float
			}
			// fall through
			return "    filter.\(inputName) = \(inputName)"
		}
		.joined(separator: "\n")

	print(otherInputSettingStatements)
	print("    return filter.outputImage ?? CIImage.empty()")
	print("}")
}

private func outputImageToImage(_ filter: CIFilter, abstractLookup: [String: String], docLookup: [String: String], functionMinima: [String: String]) {

	let filterName = filter.name

	let filtersWithoutSwiftAPI: Set<String> = ["CICameraCalibrationLensCorrection", "CIGuidedFilter"]
	let filtersThatAlreadyHaveImageExtension: [String: String] = ["CIAffineTransform": "transformed(by: CGAffineTransform)",
																  "CICrop": "cropped(to: CGRect)",
																  "CIClamp": "clamped(to: CGRect)",
																  "CISampleNearest": "samplingNearest()",
																  // https://developer.apple.com/documentation/coreimage/ciimage/2867429-samplingnearest
	"CIDepthBlurEffect": "depthBlurEffectFilter(for...)"
																  // https://developer.apple.com/documentation/coreimage/cicontext#4375374
]

	let filtersThatAlreadyHaveImageExtensionDoc: [String: String] = ["CISampleNearest": "https://developer.apple.com/documentation/coreimage/ciimage/2867429-samplingnearest",
																  "CIDepthBlurEffect": "https://developer.apple.com/documentation/coreimage/cicontext#4375374"]

	if let existingFunction: String = filtersThatAlreadyHaveImageExtension[filterName] {
		print("")
		print("// ℹ️ \(filterName) already has a CIImage method: func \(existingFunction) -> CIImage")
		if let existingFunctionURL = filtersThatAlreadyHaveImageExtensionDoc[filterName] {
			print("// \(existingFunctionURL)")
		}
		print("")
		return
	}
	outputDocumentation(filter, isGenerator: false, abstractLookup: abstractLookup, docLookup: docLookup)
	outputOSVersion(filter, functionMinima: functionMinima)

	if filtersWithoutSwiftAPI.contains(filterName) {
		outputImageDictionaryFunction(filter, isGenerator: false)
	} else {
		outputImageFunction(filter, isGenerator: false)
	}
}


// convert long name like "Gray Component Replacement" to input name used in CoreImage.CIFilterBuiltins. And fix a bunch of inconsistencies.
private func parameterName(displayName: String, filterName: String) -> String {
	let words: [String] = displayName.components(separatedBy: " ").map { $0.capitalized }
	let removeSpaces: String = words.joined(separator: "")
	var result: String = removeSpaces.prefix(1).lowercased() + removeSpaces.dropFirst()
	if result == "texture" {
		result = "textureImage"
	} else if result == "b" {
		result = "parameterB"
	} else if result == "c" {
		result = "parameterC"
	} else if result == "means" {
		result = "inputMeans"
	} else if result == "redVector" {
		result = "rVector"
	} else if result == "greenVector" {
		result = "gVector"
	} else if result == "blueVector" {
		result = "bVector"
	} else if result == "alphaVector" {
		result = "aVector"
	} else if result == "maximumStriationRadius" {
		result = "maxStriationRadius"
	} else if result == "color1" {
		result = "color0"
	} else if result == "color2" {
		result = "color1"
	} else if result == "radius1" {
		result = "radius0"
	} else if result == "radius2" {
		result = "radius1"
	} else if result == "image2" && filterName == "CIColorAbsoluteDifference" {	// only substitute for this function
		result = "inputImage2"
	} else if result.hasSuffix(".") {
		result = String(result.dropLast(1))	// to deal with data anomoly where "." is at end of parameter
	}
	return result
}

private func parameterStatement(inputKey: String, inputAttributes: [String: AnyObject], filterName: String) -> String? {

	guard let displayName: String = inputAttributes[kCIAttributeDisplayName] as? String,
		  let attributeClass: String = inputAttributes[kCIAttributeClass] as? String
	else { return nil }

	let inputName: String = parameterName(displayName: displayName, filterName: filterName)
	let attributeType: String? = inputAttributes[kCIAttributeType] as? String
	var convertedClass: String
	switch attributeClass {
	case "NSNumber":

		if attributeType == kCIAttributeTypeBoolean
			|| inputName == "extrapolate" { // Hack - missing info
			convertedClass = "Bool"
		} else if attributeType == kCIAttributeTypeInteger || attributeType == kCIAttributeTypeCount
					|| inputName == "qualityLevel" || inputName == "count" { 	// Hack - missing or misleading info
			convertedClass = "Int"
		} else if [kCIAttributeTypeScalar, kCIAttributeTypeAngle, kCIAttributeTypeDistance, kCIAttributeTypeTime].contains(attributeType)
			|| inputName == "preferredAspectRatio"	// missing info
		{
			convertedClass = "Float"
		} else {
			print("\n// 🛑 unknown number type \(inputName): \(attributeType ?? "")")
			convertedClass = "Float"		// seems to be when no type is specified
		}
	case "CIVector":
		guard filterName != "CITemperatureAndTint" && filterName != "CIDepthBlurEffect" else {	// special case, should remain a CIVector
			convertedClass = "CIVector"
			break
		}
		convertedClass = attributeType == kCIAttributeTypeRectangle
		? "CGRect"
		: attributeType == kCIAttributeTypePosition || attributeType == kCIAttributeTypeOffset
		? "CGPoint"
		: "CIVector"		// CIVector tends to have no attribute type
	case "NSAffineTransform":
		convertedClass = "CGAffineTransform"
	case "NSData":
		convertedClass = "Data"
	case "NSString":
		convertedClass = "String"
	case "NSArray":
		convertedClass = "[Any]"
	case "CGImageMetadataRef":
		convertedClass = "CGImageMetadata"
	case "NSObject":
		if inputName == "colorSpace" {
			convertedClass = "CGColorSpace"
		} else {
			convertedClass = attributeClass		// Unexpected case
			print("\n// 🛑 unknown attributeClass \(attributeClass) with \(inputName), \(attributeType ?? "")")
		}
	case "NSValue":
		if attributeType == kCIAttributeTypeTransform {
			convertedClass = "CGAffineTransform"
		} else {
			convertedClass = attributeClass	// Unexpected case
			print("\n// 🛑 unknown attributeClass \(attributeClass) with \(inputName), \(attributeType ?? "")")
		}
	default:
		// Other cases where the class is the same: CIImage, CIColor, etc.
		convertedClass = attributeClass
	}
	if inputName == "backgroundImage" && convertedClass == "CIImage" {
		convertedClass = "CIImage?"		// make optional, for our special identity handling
	}
	var defaultStatement: String = ""
	if let defaultValue: AnyObject = inputAttributes[kCIAttributeDefault] {

		if hasReasonableDefaultValue(defaultValue, attributeType: attributeType, inputName: inputName) {
			let defaultValueString = formatSmart(defaultValue, attributeType: attributeType, inputName: inputName, filterName: filterName)
			if !defaultValueString.isEmpty {
				defaultStatement = " = \(defaultValueString)"
			}
		}
	}
	return "\(inputName): \(convertedClass)\(defaultStatement)"
}

// Look at value and/or context.
private func hasReasonableDefaultValue(_ value: Any, attributeType: String?, inputName: String) -> Bool {
	if nil != value as? Data {
		return false	// Not feasible to have data anyhow
	} else if let number = value as? NSNumber {
		if attributeType == kCIAttributeTypeDistance {
			return number == 0
		} else if attributeType == kCIAttributeTypeInteger {
			return false
		} else if attributeType == kCIAttributeTypeCount {
			return false
		} else if attributeType == kCIAttributeTypeBoolean {
			return true
		} else if attributeType == kCIAttributeTypeAngle {
			return number.doubleValue <= Double.pi	// avoid those weird angles that don't make any sense
		} else if attributeType == kCIAttributeTypeScalar {
			return true	// not sure
		}
	} else if let defaultVector = value as? CIVector {

		if defaultVector.count > 4 {
			return false
		}
		if attributeType == kCIAttributeTypeRectangle {
			return defaultVector == CIVector(x: 0, y: 0, z: 0, w: 0)	// only keep zero rectangle
		} else if attributeType == kCIAttributeTypePosition3 {
			return false
		} else if attributeType == kCIAttributeTypePosition {
			return defaultVector.x < 50 && defaultVector.y < 50		// seems like 50+ values are arbitrary coordinates
		} else if attributeType == kCIAttributeTypeOffset {
			return defaultVector.x != 0 && defaultVector.y != 0		// any non-zero points seem pretty arbitrary
		}
	} else if let color = value as? CIColor {
		return color == CIColor.black
		|| color == CIColor.white
		|| color == CIColor.clear
	} else if nil != value as? AffineTransform {
		return true
	} else if nil != value as? String {
		return true
	} else if inputName == "colorSpace" {	// it's a CFType so not so easy to compare
		return true
	} else {
		print("\n🛑 \(attributeType ?? "") \(inputName) -> \(value) \((value as? AnyObject)?.className)")
		return true	// not sure yet
	}
	return false
}


private func formatSmart(_ value: Any, attributeType: String?, inputName: String, filterName: String?) -> String {
	var result: String = ""
	if let number = value as? NSNumber {
		if attributeType == kCIAttributeTypeBoolean || inputName == "extrapolate" { // Hack - missing info
			result = number.boolValue.description
		} else {
			result = number.formatSmart
		}
	} else if let defaultVector = value as? CIVector {

		if attributeType == kCIAttributeTypeRectangle {
			result = defaultVector.formatRectSmart
		} else if attributeType == kCIAttributeTypePosition {
			result = defaultVector.formatPointSmart
		} else {
			result = defaultVector.formatVectorSmart
		}
	} else if let color = value as? CIColor {
		result = color.formatSmart
	} else if let string = value as? String {
		result = "\"" + string.replacingOccurrences(of: "\"", with: "\\\"") + "\""
	} else if inputName == "colorSpace" {
		if CFGetTypeID(value as AnyObject) == CGColorSpace.typeID {
			let colorspace: CGColorSpace = value as! CGColorSpace
			if let name: String = colorspace.name as? String {
				var newName = name.replacing(/^kCGColorSpace/, with: "")
				newName = newName.prefix(1).lowercased() + newName.dropFirst()
				result = "CGColorSpace(name: CGColorSpace." + newName + ")!"
			}
		}
	} else if let transform = value as? AffineTransform {
		let transformIdentity: AffineTransform
#if canImport(UIKit)
		transformIdentity = CGAffineTransform.identity
#elseif canImport(AppKit)
		transformIdentity = NSAffineTransform()
#endif

		// Special case these filters to default to identity. Their default values are weird!
		if transform == transformIdentity || filterName == "CIAffineClamp" || filterName == "CIAffineTile" {
			result = "CGAffineTransform.identity"
		} else {
#if canImport(UIKit)
			let t: CGAffineTransform = transform
			result = "CGAffineTransform(a: \(t.a.format5), b: \(t.b.format5), c: \(t.c.format5), d: \(t.d.format5), tx: \(t.tx.format5), ty: \(t.tx.format5))"
#elseif canImport(AppKit)
			let t: NSAffineTransformStruct = transform.transformStruct
			result = "CGAffineTransform(a: \(t.m11.format5), b: \(t.m12.format5), c: \(t.m21.format5), d: \(t.m22.format5), tx: \(t.tX.format5), ty: \(t.tY.format5))"
#endif
		}
	} else {
		print("\n🛑 \(attributeType ?? "") \(inputName) -> \(value) \((value as? AnyObject)?.className)")
		result = String(describing: value)
	}
	return result
}

// https://unicode-org.github.io/icu/userguide/strings/regexp.html

private extension String {
	var prettyFunction: String {
		let result: String = self.replacing(/^CI/, with: "").replacing(/Filter$/, with: "")
		return result.fixingCamelCase
	}

	// AbcDef -> abcDef but ABcdef -> aBcdef, ABCDEF -> abcDef - keep the last
	var fixingCamelCase: String {
		if nil != self.firstMatch(of: /^[A-Z][^A-Z]/)
			|| self.hasPrefix("SRGB")	// special case
		{
				// Just one uppercase characters, so make it lowercase and append the rest
			return self.prefix(1).lowercased() + self.dropFirst()
		} else if let foundUppercaseMatch: Regex<Regex<Substring>.RegexOutput>.Match = self.firstMatch(of: /^[A-Z]{2,}/) {
			// FIXME: Might need some tweaking to deal with complex characters. But since we are just modifying ASCII, this simple case is fine.
			// More than one, so make all but the last character lowercased, so that the last character there stays capitalized.
			let lowercasedPrefix = self[foundUppercaseMatch.range].lowercased()
			let remaining = self.dropFirst(lowercasedPrefix.count)
			if nil != remaining.firstMatch(of: /^[a-z]/) {	// lowercase letter after uppercase, the usual. Keep last uppercase from prefix
				return String(lowercasedPrefix.dropLast()) + self.dropFirst(lowercasedPrefix.count - 1)
			} else {
				// Unusual; characters after uppercase is not a lowercase character, e.g. a number. Keep all the uppercase characters.
				return String(lowercasedPrefix) + self.dropFirst(lowercasedPrefix.count)
			}
		}
		return self

	}
}

// Format numbers with UP TO five decimal places

private extension Float {
	var format5: String {
		let formatter = NumberFormatter()
		formatter.numberStyle = .decimal
#if canImport(UIKit)
		formatter.numberStyle = .none
#elseif canImport(AppKit)
		formatter.hasThousandSeparators = false
#endif
		formatter.maximumFractionDigits = 5
		let number = NSNumber(value: self)
		return formatter.string(from: number) ?? ""
	}
}
private extension Double {
	var format5: String {
		let formatter = NumberFormatter()
		formatter.numberStyle = .decimal
#if canImport(UIKit)
		formatter.numberStyle = .none
#elseif canImport(AppKit)
		formatter.hasThousandSeparators = false
#endif
		formatter.maximumFractionDigits = 5
		let number = NSNumber(value: self)
		return formatter.string(from: number) ?? ""
	}
}
private extension CGFloat {
	var format5: String {
		let formatter = NumberFormatter()
		formatter.numberStyle = .decimal
#if canImport(UIKit)
		formatter.numberStyle = .none
#elseif canImport(AppKit)
		formatter.hasThousandSeparators = false
#endif
		formatter.maximumFractionDigits = 5
		let number = NSNumber(value: self)
		return formatter.string(from: number) ?? ""
	}
}

private extension NSNumber {

	var format5: String {
		let formatter = NumberFormatter()
		formatter.numberStyle = .decimal
#if canImport(UIKit)
		formatter.numberStyle = .none
#elseif canImport(AppKit)
		formatter.hasThousandSeparators = false
#endif
		formatter.maximumFractionDigits = 5
		return formatter.string(from: self) ?? ""
	}

	var formatSmart: String {
		let result: String
		switch self.doubleValue {
		case Double.pi:
			result = ".pi"
		case Double.pi/2:
			result = ".pi/2"
		case Double.pi * 18:
			result = ".pi*18"	// for vortexDistortion

			// What about triangleKaleidoscope 5.924285296593801
		default:
			result = self.format5
		}
		return result
	}
}
private extension CIVector {
	var formatPointSmart: String {
		if x == 0 && y == 0 {
			return ".zero"
		} else {
			return ".init(x: \(x.format5), y: \(y.format5))"
		}
	}

	// The CGRect structure’s X, Y, height and width values are stored in the vector’s X, Y, Z and W properties.
	var formatRectSmart: String {
		if x == 0 && y == 0 && z == 0 && w == 0 {
			return ".zero"
		} else {
			return ".init(x: \(x.format5), y: \(y.format5), width: \(w.format5), height: \(z.format5))"
		}
	}
	var formatVectorSmart: String {
		switch count {
		case 0:
			return ".init()"
		case 1:
			return ".init(x: \(x.format5))"
		case 2:
			return ".init(x: \(x.format5), y: \(y.format5))"
		case 3:
			return ".init(x: \(x.format5), y: \(y.format5), z: \(z.format5))"
		case 4:
			return ".init(x: \(x.format5), y: \(y.format5), z: \(z.format5), w: \(w.format5))"
		default:
			return "🛑 no vector initializer for count > 4"
		}
	}
}
private extension CIColor {
	var formatSmart: String {

		switch self {
		case CIColor.black:    return "CIColor.black"	// Include "CIColor." so it's compatible with older OS
		case CIColor.white:    return "CIColor.white"
		case CIColor.gray:     return "CIColor.gray"
		case CIColor.red:      return "CIColor.red"
		case CIColor.green:    return "CIColor.green"
		case CIColor.blue:     return "CIColor.blue"
		case CIColor.cyan:     return "CIColor.cyan"
		case CIColor.magenta:  return "CIColor.magenta"
		case CIColor.yellow:   return "CIColor.yellow"
		case CIColor.clear:    return "CIColor.clear"
		default:
			let colorSpaceName: String = colorSpace.name as? String ?? ""	// e.g. kCGColorSpaceDeviceRGB
			let colorSpaceNameSuffix: String = colorSpaceName.replacing(/^kCGColorSpace/, with: "")
			let colorSpaceNameFormatted = "CGColorSpace." +  colorSpaceNameSuffix.prefix(1).lowercased() + colorSpaceNameSuffix.dropFirst()
			let colorSpaceSRGB: String = CGColorSpace.sRGB as String

			// Some issues with kCGColorSpaceDeviceRGB since we would have to create that. Let's just ignore.
			if alpha != 1.0 && colorSpaceName != colorSpaceSRGB
			&& colorSpaceName != "kCGColorSpaceDeviceRGB" {
				return "CIColor(red: \(red), green: \(green), blue: \(blue), alpha: \(alpha), colorSpace: \(colorSpaceNameFormatted))"
			} else if alpha == 1.0 && colorSpaceName != colorSpaceSRGB
						&& colorSpaceName != "kCGColorSpaceDeviceRGB" {
				return "CIColor(red: \(red), green: \(green), blue: \(blue), colorSpace: \(colorSpaceNameFormatted))"
			} else
			if alpha != 1.0 {
				return "CIColor(red: \(red), green: \(green), blue: \(blue), alpha: \(alpha))"
			} else {
				return "CIColor(red: \(red), green: \(green), blue: \(blue))"
			}
		}
	}

}

private extension CIFilter {
	var identityInputKeys: [String] {
		inputKeys
			.filter { $0 != kCIInputImageKey }
			.map { inputKey in
				(inputKey, (attributes[inputKey] as? [String: AnyObject] ?? [:])) }	// tuple of the inputKey and its attributes
			.compactMap { (inputKey: String, inputAttributes: [String: AnyObject]) in
				guard let displayName: String = inputAttributes[kCIAttributeDisplayName] as? String,
					  let identityValue: Any = inputAttributes[kCIAttributeIdentity]
				else { return nil }

				let attributeType: String? = inputAttributes[kCIAttributeType] as? String
				let inputName: String = parameterName(displayName: displayName, filterName: self.name)
				guard hasReasonableDefaultValue(identityValue, attributeType: attributeType, inputName: inputName)
				else { return nil }

				return inputKey
			}
	}
}



================================================
FILE: Generator/FunctionMinima.json
================================================
{
	"areaAverage": "11.0",
	"areaHistogram": "11.0",
	"areaMaximum": "11.0",
	"areaMaximumAlpha": "11.0",
	"areaMinimum": "11.0",
	"areaMinimumAlpha": "11.0",
	"areaMinMax": "11.0",
	"areaMinMaxRed": "11.0",
	"bumpDistortion": "11.0",
	"bumpDistortionLinear": "11.0",
	"circleSplashDistortion": "11.0",
	"circularWrap": "11.0",
	"colorAbsoluteDifference": "11.0",
	"colorThreshold": "11.0",
	"colorThresholdOtsu": "11.0",
	"columnAverage": "11.0",
	"displacementDistortion": "11.0",
	"droste": "11.0",
	"glassDistortion": "11.0",
	"glassLozenge": "11.0",
	"histogramDisplay": "11.0",
	"holeDistortion": "11.0",
	"kMeans": "11.0",
	"lightTunnel": "11.0",
	"ninePartStretched": "11.0",
	"ninePartTiled": "11.0",
	"pinchDistortion": "11.0",
	"rowAverage": "11.0",
	"stretchCrop": "11.0",
	"torusLensDistortion": "11.0",
	"twirlDistortion": "11.0",
	"vortexDistortion": "11.0",
	"convolutionRGB3X3": "12.0",
	"convolutionRGB5X5": "12.0",
	"convolutionRGB7X7": "12.0",
	"convolutionRGB9Horizontal": "12.0",
	"convolutionRGB9Vertical": "12.0",
	"linearLightBlendMode": "12.0",
	"personSegmentation": "12.0",
	"vividLightBlendMode": "12.0",
	"areaLogarithmicHistogram": "13.0",
	"convertLabToRGB": "13.0",
	"convertRGBtoLab": "13.0",

	"colorCubesMixedWithMask": "13.0",
	"colorCubeWithColorSpace": "13.0",
	"colorCube": "13.0",

	"attributedTextImageGenerator": "13.0",
	"textImageGenerator": "13.0"
}


================================================
FILE: Generator/MissingParameterDocumentation.json
================================================
{
	"CIBokehBlur" : {
		"softness" : "The softness of the bokeh effect"
	},
	"CIDepthBlurEffect" : {
		"_NOTE" : "THIS WAS GLEANED FROM WWDC2017#508 VIDEO; NO OFFICIAL APPLE DOCUMENTATION FOUND. NOT ACTUALLY USED HERE BECAUSE OF SEVERAL BUILT-IN INITIALIZERS.",
		"aperture" : "Simulated lens aperature to adjust blur for unfocused elements",
		"auxDataMetadata" : "UNKNOWN",
		"calibrationData" : "UNKNOWN",
		"chinPositions" : "Vector of up to 4 x,y positions indicating where peoples' chins are",
		"disparityImage" : "Grayscale image indicating depth; lighter is nearer.",
		"focusRectangle" : "Part of the rectangle to make sure to put into focus",
		"gainMap" : "UNKNOWN",
		"leftEyePositions" : "Vector of up to 4 x,y positions indicating where peoples' left eyes are",
		"lumaNoiseScale" : "UNKNOWN",
		"nosePositions" : "Vector of up to 4 x,y positions indicating where peoples' noses are",
		"rightEyePositions" : "Vector of up to 4 x,y positions indicating where peoples' right eyes are",
		"scaleFactor" : "Integrated downsampling (rather than doing it later) since this is computationally expensive",
		"shape" : "UNKNOWN"
	},
	"CIDepthOfField" : {
		"point0" : "A set of coordinates marking the first point to be focused on",
		"point1" : "A set of coordinates marking the second point to be focused on",
		"unsharpMaskIntensity" : "The intensity of the unsharp mask effect",
		"unsharpMaskRadius" : "The radius of the unsharpened mask effect applied to the in-focus area of effect"
	},
	"CIDroste" : {
		"insetPoint0" : "The x and y position that defines the first inset point",
		"insetPoint1" : "The x and y position that defines the second inset point",
		"periodicity" : "The amount of intervals",
		"rotation" : "The angle of the rotation, in radians",
		"strands" : "The amount of strands",
		"zoom" : "The zoom of the effect"
	},
	"CIEdgePreserveUpsampleFilter" : {
		"lumaSigma" : "Influence of the input image’s luma information on the upsampling operation",
		"smallImage" : "An image representing the reference for scaling the input image with the type CIImage",
		"spatialSigma" : "The influence of the input image’s spatial information on the upsampling operation"
	},
	"CIGuidedFilter" : {
		"_NOTE" : "THIS WAS GLEANED FROM DESCRIPTIONS OF THIS FILTER IN GENERAL; NO OFFICIAL APPLE DOCUMENTATION FOUND",
		"epsilon" : "Smoothness. A higher value means more smoothing."
	},
	"CIHueSaturationValueGradient" : {
		"dither" : "A boolean value specifying whether the distort the generated output",
		"softness" : "The softness of the generated color wheel"
	},
	"CILenticularHaloGenerator" : {
		"haloOverlap" : "The overlap of red, green, and blue halos. A value of 1 results in a full overlap."
	},
	"CIPerspectiveCorrection" : {
		"crop" : "A rectangle that specifies the extent of the corrected image"
	},
	"CIToneCurve" : {
		"point0" : "A vector containing the position of the first point of the tone curve",
		"point1" : "A vector containing the position of the second point of the tone curve",
		"point2" : "A vector containing the position of the third point of the tone curve",
		"point3" : "A vector containing the position of the fourth point of the tone curve",
		"point4" : "A vector containing the position of the fifth point of the tone curve"
	}
}


================================================
FILE: Generator/abstracts.json
================================================
{
"CIAccordionFoldTransition": "Transitions from one image to another of differing dimensions by unfolding and crossfading.",
"CIAdditionCompositing": "Adds color components to achieve a brightening effect.",
"CIAffineClamp": "Performs an affine transform on a source image and then clamps the pixels at the edge of the transformed image, extending them outwards.",
"CIAffineTile": "Applies an affine transform to an image and then tiles the transformed image.",
"CIAffineTransform": "Applies an affine transform to an image.",
"CIAreaAverage": "Returns a single-pixel image  that contains the average color for the region of interest.",
"CIAreaHistogram": "Returns a 1D image (inputCount wide by one pixel high) that contains the component-wise histogram computed for the specified rectangular area.",
"CIAreaMaximum": "Returns a single-pixel image that contains the maximum color components for the region of interest.",
"CIAreaMaximumAlpha": "Returns a single-pixel image that contains the color vector with the maximum alpha value for the region of interest.",
"CIAreaMinimum": "Returns a single-pixel image that contains the minimum color components for the region of interest.",
"CIAreaMinimumAlpha": "Returns a single-pixel image that contains the color vector with the minimum alpha value for the region of interest.",
"CIAztecCodeGenerator": "Generates an Aztec code (two-dimensional barcode) from input data.",
"CIBarsSwipeTransition": "Transitions from one image to another by passing a bar over the source image.",
"CIBlendWithAlphaMask": "Uses alpha values from a mask to interpolate between an image and the background.",
"CIBlendWithMask": "Uses values from a grayscale mask to interpolate between an image and the background.",
"CIBloom": "Softens edges and applies a pleasant glow to an image.",
"CIBoxBlur": "Blurs an image using a box-shaped convolution kernel.",
"CIBumpDistortion": "Creates a bump that originates at a specified point in the image.",
"CIBumpDistortionLinear": "Creates a concave or convex distortion that originates from a line in the image.",
"CICheckerboardGenerator": "Generates a checkerboard pattern.",
"CICircleSplashDistortion": "Distorts the pixels starting at the circumference of a circle and emanating outward.",
"CICircularScreen": "Simulates a circular-shaped halftone screen.",
"CICircularWrap": "Wraps an image around a transparent circle.",
"CICMYKHalftone": "Creates a color, halftoned rendition of the source image, using cyan, magenta, yellow, and black inks over a white page.",
"CICode128BarcodeGenerator": "Generates a Code 128 one-dimensional barcode from input data.",
"CIColorBlendMode": "Uses the luminance values of the background with the hue and saturation values of the source image.",
"CIColorBurnBlendMode": "Darkens the background image samples to reflect the source image samples.",
"CIColorClamp": "Modifies color values to keep them within a specified range.",
"CIColorControls": "Adjusts saturation, brightness, and contrast values.",
"CIColorCrossPolynomial": "Modifies the pixel values in an image by applying a set of polynomial cross-products.",
"CIColorCube": "Uses a three-dimensional color table to transform the source image pixels.",
"CIColorCubeWithColorSpace": "Uses a three-dimensional color table to transform the source image pixels and maps the result to a specified color space.",
"CIColorDodgeBlendMode": "Brightens the background image samples to reflect the source image samples.",
"CIColorInvert": "Inverts the colors in an image.",
"CIColorMap": "Performs a nonlinear transformation of source color values using mapping values provided in a table.",
"CIColorMatrix": "Multiplies source color values and adds a bias factor to each color component.",
"CIColorMonochrome": "Remaps colors so they fall within shades of a single color.",
"CIColorPolynomial": "Modifies the pixel values in an image by applying a set of cubic polynomials.",
"CIColorPosterize": "Remaps red, green, and blue color components to the number of brightness values you specify for each color component.",
"CIColumnAverage": "Returns a 1-pixel high image that contains the average color for each scan column.",
"CIComicEffect": "Simulates a comic book drawing by outlining edges and applying a color halftone effect.",
"CIConstantColorGenerator": "Generates a solid color.",
"CIConvolution3X3": "Modifies pixel values by performing a 3x3 matrix convolution.",
"CIConvolution5X5": "Modifies pixel values by performing a 5x5 matrix convolution.",
"CIConvolution7X7": "Modifies pixel values by performing a 7x7 matrix convolution.",
"CIConvolution9Horizontal": "Modifies pixel values by performing a 9-element horizontal convolution.",
"CIConvolution9Vertical": "Modifies pixel values by performing a 9-element vertical convolution.",
"CICopyMachineTransition": "Transitions from one image to another by simulating the effect of a copy machine.",
"CICrop": "Applies a crop to an image.",
"CICrystallize": "Creates polygon-shaped color blocks by aggregating source pixel-color values.",
"CIDarkenBlendMode": "Creates composite image samples by choosing the darker samples (from either the source image or the background).",
"CIDepthOfField": "Simulates a depth of field effect.",
"CIDifferenceBlendMode": "Subtracts either the source image sample color from the background image sample color, or the reverse, depending on which sample has the greater brightness value.",
"CIDiscBlur": "Blurs an image using a disc-shaped convolution kernel.",
"CIDisintegrateWithMaskTransition": "Transitions from one image to another using the shape defined by a mask.",
"CIDisplacementDistortion": "Applies the grayscale values of the second image to the first image.",
"CIDissolveTransition": "Uses a dissolve to transition from one image to another.",
"CIDivideBlendMode": "Divides the background image sample color from the source image sample color.",
"CIDotScreen": "Simulates the dot patterns of a halftone screen.",
"CIDroste": "Recursively draws a portion of an image in imitation of an M. C. Escher drawing.",
"CIEdges": "Finds all edges in an image and displays them in color.",
"CIEdgeWork": "Produces a stylized black-and-white rendition of an image that looks similar to a woodblock cutout.",
"CIEightfoldReflectedTile": "Produces a tiled image from a source image by applying an 8-way reflected symmetry.",
"CIExclusionBlendMode": "Produces an effect similar to that produced by the CIDifferenceBlendMode filter but with lower contrast.",
"CIExposureAdjust": "Adjusts the exposure setting for an image similar to the way you control exposure for a camera when you change the F-stop.",
"CIFalseColor": "Maps luminance to a color ramp of two colors.",
"CIFlashTransition": "Transitions from one image to another by creating a flash.",
"CIFourfoldReflectedTile": "Produces a tiled image from a source image by applying a 4-way reflected symmetry.",
"CIFourfoldRotatedTile": "Produces a tiled image from a source image by rotating the source image at increments of 90 degrees.",
"CIFourfoldTranslatedTile": "Produces a tiled image from a source image by applying 4 translation operations.",
"CIGammaAdjust": "Adjusts midtone brightness.",
"CIGaussianBlur": "Spreads source pixels by an amount specified by a Gaussian distribution.",
"CIGaussianGradient": "Generates a gradient that varies from one color to another using a Gaussian distribution.",
"CIGlassDistortion": "Distorts an image by applying a glass-like texture.",
"CIGlassLozenge": "Creates a lozenge-shaped lens and distorts the portion of the image over which the lens is placed.",
"CIGlideReflectedTile": "Produces a tiled image from a source image by translating and smearing the image.",
"CIGloom": "Dulls the highlights of an image.",
"CIHardLightBlendMode": "Either multiplies or screens colors, depending on the source image sample color.",
"CIHatchedScreen": "Simulates the hatched pattern of a halftone screen.",
"CIHeightFieldFromMask": "Produces a continuous three-dimensional, loft-shaped height field from a grayscale mask.",
"CIHexagonalPixellate": "Maps an image to colored hexagons whose color is defined by the replaced pixels.",
"CIHighlightShadowAdjust": "Adjust the tonal mapping of an image while preserving spatial detail.",
"CIHistogramDisplayFilter": "Generates a histogram image from the output of the CIAreaHistogram filter.",
"CIHoleDistortion": "Creates a circular area that pushes the image pixels outward, distorting those pixels closest to the circle the most.",
"CIHueAdjust": "Changes the overall hue, or tint, of the source pixels.",
"CIHueBlendMode": "Uses the luminance and saturation values of the background image with the hue of the input image.",
"CIKaleidoscope": "Produces a kaleidoscopic image from a source image by applying 12-way symmetry.",
"CILanczosScaleTransform": "Produces a high-quality, scaled version of a source image.",
"CILenticularHaloGenerator": "Simulates a lens flare.",
"CILightenBlendMode": "Creates composite image samples by choosing the lighter samples (either from the source image or the background).",
"CILightTunnel": "Rotates a portion of the input image specified by the center and radius parameters to give a tunneling effect.",
"CILinearBurnBlendMode": "Darkens the background image samples to reflect the source image samples while also increasing contrast.",
"CILinearDodgeBlendMode": "Brightens the background image samples to reflect the source image samples while also increasing contrast.",
"CILinearGradient": "Generates a gradient that varies along a linear axis between two defined endpoints.",
"CILinearToSRGBToneCurve": "Maps color intensity from a linear gamma curve to the sRGB color space.",
"CILineOverlay": "Creates a sketch that outlines the edges of an image in black.",
"CILineScreen": "Simulates the line pattern of a halftone screen.",
"CILuminosityBlendMode": "Uses the hue and saturation of the background image with the luminance of the input image.",
"CIMaskedVariableBlur": "Blurs the source image according to the brightness levels in a mask image.",
"CIMaskToAlpha": "Converts a grayscale image to a white image that is masked by alpha.",
"CIMaximumComponent": "Returns a grayscale image from max(r,g,b).",
"CIMaximumCompositing": "Computes the maximum value, by color component, of two input images and creates an output image using the maximum values.",
"CIMedianFilter": "Computes the median value for a group of neighboring pixels and replaces each pixel value with the median.",
"CIMinimumComponent": "Returns a grayscale image from min(r,g,b).",
"CIMinimumCompositing": "Computes the minimum value, by color component, of two input images and creates an output image using the minimum values.",
"CIModTransition": "Transitions from one image to another by revealing the target image through irregularly shaped holes.",
"CIMotionBlur": "Blurs an image to simulate the effect of using a camera that moves a specified angle and distance while capturing the image.",
"CIMultiplyBlendMode": "Multiplies the input image samples with the background image samples.",
"CIMultiplyCompositing": "Multiplies the color component of two input images and creates an output image using the multiplied values.",
"CINoiseReduction": "Reduces noise using a threshold value to define what is considered noise.",
"CIOpTile": "Segments an image, applying any specified scaling and rotation, and then assembles the image again to give an op art appearance.",
"CIOverlayBlendMode": "Either multiplies or screens the input image samples with the background image samples, depending on the background color.",
"CIPageCurlTransition": "Transitions from one image to another by simulating a curling page, revealing the new image as the page curls.",
"CIPageCurlWithShadowTransition": "Transitions from one image to another by simulating a curling page, revealing the new image as the page curls.",
"CIParallelogramTile": "Warps an image by reflecting it in a parallelogram, and then tiles the result.",
"CIPDF417BarcodeGenerator": "Generates a PDF417 code (two-dimensional barcode) from input data.",
"CIPerspectiveCorrection": "Applies a perspective correction, transforming an arbitrary quadrilateral region in the source image to a rectangular output image.",
"CIPerspectiveTile": "Applies a perspective transform to an image and then tiles the result.",
"CIPerspectiveTransform": "Alters the geometry of an image to simulate the observer changing viewing position.",
"CIPerspectiveTransformWithExtent": "Alters the geometry of a portion of an image to simulate the observer changing viewing position.",
"CIPhotoEffectChrome": "Applies a preconfigured set of effects that imitate vintage photography film with exaggerated color.",
"CIPhotoEffectFade": "Applies a preconfigured set of effects that imitate vintage photography film with diminished color.",
"CIPhotoEffectInstant": "Applies a preconfigured set of effects that imitate vintage photography film with distorted colors.",
"CIPhotoEffectMono": "Applies a preconfigured set of effects that imitate black-and-white photography film with low contrast.",
"CIPhotoEffectNoir": "Applies a preconfigured set of effects that imitate black-and-white photography film with exaggerated contrast.",
"CIPhotoEffectProcess": "Applies a preconfigured set of effects that imitate vintage photography film with emphasized cool colors.",
"CIPhotoEffectTonal": "Applies a preconfigured set of effects that imitate black-and-white photography film without significantly altering contrast.",
"CIPhotoEffectTransfer": "Applies a preconfigured set of effects that imitate vintage photography film with emphasized warm colors.",
"CIPinchDistortion": "Creates a rectangular area that pinches source pixels inward, distorting those pixels closest to the rectangle the most.",
"CIPinLightBlendMode": "Conditionally replaces background image samples with source image samples depending on the brightness of the source image samples.",
"CIPixellate": "Makes an image blocky by mapping the image to colored squares whose color is defined by the replaced pixels.",
"CIPointillize": "Renders the source image in a pointillistic style.",
"CIQRCodeGenerator": "Generates a Quick Response code (two-dimensional barcode) from input data.",
"CIRadialGradient": "Generates a gradient that varies radially between two circles having the same center.",
"CIRandomGenerator": "Generates an image of infinite extent whose pixel values are made up of four independent, uniformly-distributed random numbers in the 0 to 1 range.",
"CIRippleTransition": "Transitions from one image to another by creating a circular wave that expands from the center point, revealing the new image in the wake of the wave.",
"CIRowAverage": "Returns a 1-pixel high image that contains the average color for each scan row.",
"CISaturationBlendMode": "Uses the luminance and hue values of the background image with the saturation of the input image.",
"CIScreenBlendMode": "Multiplies the inverse of the input image samples with the inverse of the background image samples.",
"CISepiaTone": "Maps the colors of an image to various shades of brown.",
"CIShadedMaterial": "Produces a shaded image from a height field.",
"CISharpenLuminance": "Increases image detail by sharpening.",
"CISixfoldReflectedTile": "Produces a tiled image from a source image by applying a 6-way reflected symmetry.",
"CISixfoldRotatedTile": "Produces a tiled image from a source image by rotating the source image at increments of 60 degrees.",
"CISmoothLinearGradient": "Generates a gradient that uses an S-curve function to blend colors along a linear axis between two defined endpoints.",
"CISoftLightBlendMode": "Either darkens or lightens colors, depending on the input image sample color.",
"CISourceAtopCompositing": "Places the input image over the background image, then uses the luminance of the background image to determine what to show.",
"CISourceInCompositing": "Uses the background image to define what to leave in the input image, effectively cropping the input image.",
"CISourceOutCompositing": "Uses the background image to define what to take out of the input image.",
"CISourceOverCompositing": "Places the input image over the input background image.",
"CISpotColor": "Replaces one or more color ranges with spot colors.",
"CISpotLight": "Applies a directional spotlight effect to an image.",
"CISRGBToneCurveToLinear": "Maps color intensity from the sRGB color space to a linear gamma curve.",
"CIStarShineGenerator": "Generates a starburst pattern that is similar to a supernova; can be used to simulate a lens flare.",
"CIStraightenFilter": "Rotates the source image by the specified angle in radians.",
"CIStretchCrop": "Distorts an image by stretching and or cropping it to fit a target size.",
"CIStripesGenerator": "Generates a stripe pattern.",
"CISubtractBlendMode": "Subtracts the background image sample color from the source image sample color.",
"CISunbeamsGenerator": "Generates a sun effect.",
"CISwipeTransition": "Transitions from one image to another by simulating a swiping action.",
"CITemperatureAndTint": "Adapts the reference white point for an image.",
"CIToneCurve": "Adjusts tone response of the R, G, and B channels of an image.",
"CITorusLensDistortion": "Creates a torus-shaped lens and distorts the portion of the image over which the lens is placed.",
"CITriangleKaleidoscope": "Maps a triangular portion of an input image to create a kaleidoscope effect.",
"CITriangleTile": "Maps a triangular portion of image to a triangular area and then tiles the result.",
"CITwelvefoldReflectedTile": "Produces a tiled image from a source image by rotating the source image at increments of 30 degrees.",
"CITwirlDistortion": "Rotates pixels around a point to give a twirling effect.",
"CIUnsharpMask": "Increases the contrast of the edges between pixels of different colors in an image.",
"CIVibrance": "Adjusts the saturation of an image while keeping pleasing skin tones.",
"CIVignette": "Reduces the brightness of an image at the periphery.",
"CIVignetteEffect": "Modifies the brightness of an image around the periphery of a specified region.",
"CIVortexDistortion": "Rotates pixels around a point to simulate a vortex.",
"CIWhitePointAdjust": "Adjusts the reference white point for an image and maps all colors in the source using the new reference.",
"CIZoomBlur": "Simulates the effect of zooming the camera while capturing the image."
}


================================================
FILE: Generator/docLookup.json
================================================
{
"accordionFoldTransition": "3228263-accordionfoldtransition",
"additionCompositing": "3228264-additioncompositing",
"affineClamp": "3228265-affineclamp",
"affineTile": "3228266-affinetile",
"areaAverage": "3547111-areaaverage",
"areaHistogram": "3547112-areahistogram",
"areaLogarithmicHistogram": "4401848-arealogarithmichistogram",
"areaMaximum": "3547114-areamaximum",
"areaMaximumAlpha": "3547113-areamaximumalpha",
"areaMinMax": "3547115-areaminmax",
"areaMinMaxRed": "3547116-areaminmaxred",
"areaMinimum": "3547118-areaminimum",
"areaMinimumAlpha": "3547117-areaminimumalpha",
"attributedTextImageGenerator": "3228267-attributedtextimagegenerator",
"aztecCodeGenerator": "3228268-azteccodegenerator",
"barcodeGenerator": "3228269-barcodegenerator",
"barsSwipeTransition": "3228270-barsswipetransition",
"bicubicScaleTransform": "3228271-bicubicscaletransform",
"blendWithAlphaMask": "3228272-blendwithalphamask",
"blendWithBlueMask": "3228273-blendwithbluemask",
"blendWithMask": "3228274-blendwithmask",
"blendWithRedMask": "3228275-blendwithredmask",
"bloom": "3228276-bloom",
"blurredRectangleGenerator": "4401849-blurredrectanglegenerator",
"bokehBlur": "3228277-bokehblur",
"boxBlur": "3228278-boxblur",
"bumpDistortion": "4401850-bumpdistortion",
"bumpDistortionLinear": "4401851-bumpdistortionlinear",
"cannyEdgeDetector": "4401852-cannyedgedetector",
"checkerboardGenerator": "3228279-checkerboardgenerator",
"circleSplashDistortion": "4401853-circlesplashdistortion",
"circularScreen": "3228280-circularscreen",
"circularWrap": "4401854-circularwrap",
"cmykHalftone": "3228259-cmykhalftone",
"code128BarcodeGenerator": "3228281-code128barcodegenerator",
"colorAbsoluteDifference": "3547119-colorabsolutedifference",
"colorBlendMode": "3228282-colorblendmode",
"colorBurnBlendMode": "3228283-colorburnblendmode",
"colorClamp": "3228284-colorclamp",
"colorControls": "3228285-colorcontrols",
"colorCrossPolynomial": "3228286-colorcrosspolynomial",
"colorCube": "3228287-colorcube",
"colorCubeWithColorSpace": "3228288-colorcubewithcolorspace",
"colorCubesMixedWithMask": "3228289-colorcubesmixedwithmask",
"colorCurves": "3228290-colorcurves",
"colorDodgeBlendMode": "3228291-colordodgeblendmode",
"colorInvert": "3228292-colorinvert",
"colorMap": "3228293-colormap",
"colorMatrix": "3228294-colormatrix",
"colorMonochrome": "3228295-colormonochrome",
"colorPolynomial": "3228296-colorpolynomial",
"colorPosterize": "3228297-colorposterize",
"colorThreshold": "3547120-colorthreshold",
"colorThresholdOtsu": "4401855-colorthresholdotsu",
"columnAverage": "3547121-columnaverage",
"comicEffect": "3228298-comiceffect",
"convertLabToRGB": "4401856-convertlabtorgb",
"convertRGBtoLab": "4401857-convertrgbtolab",
"convolution3X3": "3228299-convolution3x3",
"convolution5X5": "3228300-convolution5x5",
"convolution7X7": "3228301-convolution7x7",
"convolution9Horizontal": "3228302-convolution9horizontal",
"convolution9Vertical": "3228303-convolution9vertical",
"convolutionRGB3X3": "4401858-convolutionrgb3x3",
"convolutionRGB5X5": "4401859-convolutionrgb5x5",
"convolutionRGB7X7": "4401860-convolutionrgb7x7",
"convolutionRGB9Horizontal": "4401861-convolutionrgb9horizontal",
"convolutionRGB9Vertical": "4401862-convolutionrgb9vertical",
"copyMachineTransition": "3228304-copymachinetransition",
"coreMLModel": "3228305-coremlmodel",
"crystallize": "3228306-crystallize",
"darkenBlendMode": "3228307-darkenblendmode",
"depthOfField": "3228308-depthoffield",
"depthToDisparity": "3228309-depthtodisparity",
"differenceBlendMode": "3228310-differenceblendmode",
"discBlur": "3228311-discblur",
"disintegrateWithMaskTransition": "3228312-disintegratewithmasktransition",
"disparityToDepth": "3228313-disparitytodepth",
"displacementDistortion": "4401863-displacementdistortion",
"dissolveTransition": "3228314-dissolvetransition",
"dither": "3228315-dither",
"divideBlendMode": "3228316-divideblendmode",
"documentEnhancer": "3228317-documentenhancer",
"dotScreen": "3228318-dotscreen",
"droste": "4401864-droste",
"edgePreserveUpsample": "3228319-edgepreserveupsample",
"edgeWork": "3228320-edgework",
"edges": "3228321-edges",
"eightfoldReflectedTile": "3228322-eightfoldreflectedtile",
"exclusionBlendMode": "3228323-exclusionblendmode",
"exposureAdjust": "3228324-exposureadjust",
"falseColor": "3228325-falsecolor",
"flashTransition": "3228326-flashtransition",
"fourfoldReflectedTile": "3228327-fourfoldreflectedtile",
"fourfoldRotatedTile": "3228328-fourfoldrotatedtile",
"fourfoldTranslatedTile": "3228329-fourfoldtranslatedtile",
"gaborGradients": "3325508-gaborgradients",
"gammaAdjust": "3228330-gammaadjust",
"gaussianBlur": "3228331-gaussianblur",
"gaussianGradient": "3228332-gaussiangradient",
"glassDistortion": "4401865-glassdistortion",
"glassLozenge": "4401866-glasslozenge",
"glideReflectedTile": "3228333-glidereflectedtile",
"gloom": "3228334-gloom",
"hardLightBlendMode": "3228335-hardlightblendmode",
"hatchedScreen": "3228336-hatchedscreen",
"heightFieldFromMask": "3228337-heightfieldfrommask",
"hexagonalPixellate": "3228338-hexagonalpixellate",
"highlightShadowAdjust": "3228339-highlightshadowadjust",
"histogramDisplay": "3547122-histogramdisplay",
"holeDistortion": "4401867-holedistortion",
"hueAdjust": "3228340-hueadjust",
"hueBlendMode": "3228341-hueblendmode",
"hueSaturationValueGradient": "3228342-huesaturationvaluegradient",
"kMeans": "3547110-kmeans",
"kaleidoscope": "3228343-kaleidoscope",
"keystoneCorrectionCombined": "3325509-keystonecorrectioncombined",
"keystoneCorrectionHorizontal": "3325510-keystonecorrectionhorizontal",
"keystoneCorrectionVertical": "3325511-keystonecorrectionvertical",
"labDeltaE": "3228260-labdeltae",
"lanczosScaleTransform": "3228344-lanczosscaletransform",
"lenticularHaloGenerator": "3228345-lenticularhalogenerator",
"lightTunnel": "4401868-lighttunnel",
"lightenBlendMode": "3228346-lightenblendmode",
"lineOverlay": "3228347-lineoverlay",
"lineScreen": "3228348-linescreen",
"linearBurnBlendMode": "3228349-linearburnblendmode",
"linearDodgeBlendMode": "3228350-lineardodgeblendmode",
"linearGradient": "3228351-lineargradient",
"linearLightBlendMode": "4401869-linearlightblendmode",
"linearToSRGBToneCurve": "3228352-lineartosrgbtonecurve",
"luminosityBlendMode": "3228353-luminosityblendmode",
"maskToAlpha": "3228354-masktoalpha",
"maskedVariableBlur": "3228355-maskedvariableblur",
"maximumComponent": "3228356-maximumcomponent",
"maximumCompositing": "3228357-maximumcompositing",
"median": "3228358-median",
"meshGenerator": "3228359-meshgenerator",
"minimumComponent": "3228360-minimumcomponent",
"minimumCompositing": "3228361-minimumcompositing",
"mix": "3228362-mix",
"modTransition": "3228363-modtransition",
"morphologyGradient": "3228364-morphologygradient",
"morphologyMaximum": "3228365-morphologymaximum",
"morphologyMinimum": "3228366-morphologyminimum",
"morphologyRectangleMaximum": "3228367-morphologyrectanglemaximum",
"morphologyRectangleMinimum": "3228368-morphologyrectangleminimum",
"motionBlur": "3228369-motionblur",
"multiplyBlendMode": "3228370-multiplyblendmode",
"multiplyCompositing": "3228371-multiplycompositing",
"ninePartStretched": "4401871-ninepartstretched",
"ninePartTiled": "4401872-nineparttiled",
"noiseReduction": "3228372-noisereduction",
"opTile": "3228373-optile",
"overlayBlendMode": "3228374-overlayblendmode",
"pageCurlTransition": "3228375-pagecurltransition",
"pageCurlWithShadowTransition": "3228376-pagecurlwithshadowtransition",
"paletteCentroid": "3228377-palettecentroid",
"palettize": "3228378-palettize",
"parallelogramTile": "3228379-parallelogramtile",
"pdf417BarcodeGenerator": "3228261-pdf417barcodegenerator",
"personSegmentation": "4401873-personsegmentation",
"perspectiveCorrection": "3228380-perspectivecorrection",
"perspectiveRotate": "3325512-perspectiverotate",
"perspectiveTile": "3228381-perspectivetile",
"perspectiveTransform": "3228382-perspectivetransform",
"perspectiveTransformWithExtent": "3228383-perspectivetransformwithextent",
"photoEffectChrome": "3228384-photoeffectchrome",
"photoEffectFade": "3228385-photoeffectfade",
"photoEffectInstant": "3228386-photoeffectinstant",
"photoEffectMono": "3228387-photoeffectmono",
"photoEffectNoir": "3228388-photoeffectnoir",
"photoEffectProcess": "3228389-photoeffectprocess",
"photoEffectTonal": "3228390-photoeffecttonal",
"photoEffectTransfer": "3228391-photoeffecttransfer",
"pinLightBlendMode": "3228392-pinlightblendmode",
"pinchDistortion": "4401874-pinchdistortion",
"pixellate": "3228393-pixellate",
"pointillize": "3228394-pointillize",
"qrCodeGenerator": "3228262-qrcodegenerator",
"radialGradient": "3228395-radialgradient",
"randomGenerator": "3228396-randomgenerator",
"rippleTransition": "3228397-rippletransition",
"roundedRectangleGenerator": "3335007-roundedrectanglegenerator",
"roundedRectangleStrokeGenerator": "4401875-roundedrectanglestrokegenerator",
"rowAverage": "3547123-rowaverage",
"sRGBToneCurveToLinear": "3228398-srgbtonecurvetolinear",
"saliencyMap": "3228399-saliencymap",
"saturationBlendMode": "3228400-saturationblendmode",
"screenBlendMode": "3228401-screenblendmode",
"sepiaTone": "3228402-sepiatone",
"shadedMaterial": "3228403-shadedmaterial",
"sharpenLuminance": "3228404-sharpenluminance",
"sixfoldReflectedTile": "3228405-sixfoldreflectedtile",
"sixfoldRotatedTile": "3228406-sixfoldrotatedtile",
"smoothLinearGradient": "3228407-smoothlineargradient",
"sobelGradients": "4401876-sobelgradients",
"softLightBlendMode": "3228408-softlightblendmode",
"sourceAtopCompositing": "3228409-sourceatopcompositing",
"sourceInCompositing": "3228410-sourceincompositing",
"sourceOutCompositing": "3228411-sourceoutcompositing",
"sourceOverCompositing": "3228412-sourceovercompositing",
"spotColor": "3228413-spotcolor",
"spotLight": "3228414-spotlight",
"starShineGenerator": "3228415-starshinegenerator",
"straighten": "3228416-straighten",
"stretchCrop": "4401877-stretchcrop",
"stripesGenerator": "3228417-stripesgenerator",
"subtractBlendMode": "3228418-subtractblendmode",
"sunbeamsGenerator": "3228419-sunbeamsgenerator",
"swipeTransition": "3228420-swipetransition",
"temperatureAndTint": "3228421-temperatureandtint",
"textImageGenerator": "3228422-textimagegenerator",
"thermal": "3228423-thermal",
"toneCurve": "3228424-tonecurve",
"torusLensDistortion": "4401879-toruslensdistortion",
"triangleKaleidoscope": "3228425-trianglekaleidoscope",
"triangleTile": "3228426-triangletile",
"twelvefoldReflectedTile": "3228427-twelvefoldreflectedtile",
"twirlDistortion": "4401880-twirldistortion",
"unsharpMask": "3228428-unsharpmask",
"vibrance": "3228429-vibrance",
"vignette": "3228431-vignette",
"vignetteEffect": "3228430-vignetteeffect",
"vividLightBlendMode": "4401881-vividlightblendmode",
"vortexDistortion": "4401882-vortexdistortion",
"whitePointAdjust": "3228432-whitepointadjust",
"xRay": "3228433-xray",
"zoomBlur": "3228434-zoomblur"
}


================================================
FILE: LICENSE.txt
================================================
MIT License

Copyright (c) 2023 Dan Wood

Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:

The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.

THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

================================================
FILE: Package.swift
================================================
// swift-tools-version: 5.7
// The swift-tools-version declares the minimum version of Swift required to build this package.

import PackageDescription

let package = Package(
    name: "SwiftUICoreImage",
    platforms: [
        .macOS(.v10_15),
        .iOS(.v13)
    ],
    products: [
        // Products define the executables and libraries a package produces, and make them visible to other packages.
        .library(
            name: "SwiftUICoreImage",
            targets: ["SwiftUICoreImage"]),
    ],
    dependencies: [
        // Dependencies declare other packages that this package depends on.
        // .package(url: /* package url */, from: "1.0.0"),
    ],
    targets: [
        // Targets are the basic building blocks of a package. A target can define a module or a test suite.
        // Targets can depend on other targets in this package, and on products in packages this package depends on.
        .target(
            name: "SwiftUICoreImage",
            dependencies: [],
            path: "Sources")
    ]
)


================================================
FILE: README.md
================================================
# SwiftUICoreImage

Help for using Core Image within the context of SwiftUI. Also useful even without SwiftUI.

## Introduction

Core Image is a wonderful image-processsing toolkit in macOS and iOS, but it's a bit clunky to use. Even after Apple added Swift APIs to many of the filters ([CoreImage.CIFilterBuiltins](https://developer.apple.com/documentation/coreimage/methods_and_protocols_for_filter_creation)), it's still pretty tedious to chain filters to images.

The purpose of this package is to provide an easier way to chain multiple filters to CIImage instances and then render them into SwiftUI (or any other context — SwiftUI is not needed).

```Swift
    Image(ciImage: CIImage("Bernie.jpeg")
        .sepiaTone(intensity: sepia)
        .recropping { image in
            image
                .clampedToExtent(active: clamped)
                .gaussianBlur(radius: gaussianBlurRadius)
        }
    )
        .resizable()
        .aspectRatio(contentMode: .fit)
```

## Manifest

Included in this package is:

 * CIImage-Filters.swift
    * about 200 modifiers on `CIImage` that return a new modified `CIImage` (or the original if unmodified)
    * 20 static functions that return a newly generated `CIImage`
    * Includes filters up to iOS 18, macOS 15
* CIImage-Extensions.swift
    * Convenience initializers for `CIImage` from a resource name and from an `NSImage`/`UIImage`
    * Modifiers for `CIImage` to return cropped, scaled, etc. to be easier to work with SwiftUI
    * Overloads of several built-in `CIImage` modifier functions that take an `active` boolean parameter
* Image-Extensions.swift
    * Convenience initializer to create a SwiftUI `Image` from a `CIImage`

## How This Works

Similarly to how SwiftUI view modifiers each return a modified `View` instance, these modifiers on `CIImage` take care of the core image chaining by creating a corresponding `CIFilter`, hooking up the `inputImage` for you, and returning the resulting `outputImage`. 

When creating SwiftUI code, I think it's important that you can use [Inert Modifiers](https://developer.apple.com/videos/play/wwdc2021/10022/?time=2303) in which you pass in some parameter that causes the modifier to have no effect. (For instance, specifying opacity of 1.0 or padding of 0.0 to a view.)  

In this code, I've made sure that each of our image modifiers come with inert modifiers: in some cases it's passing in a parameter that clearly has no effect (e.g. zero intensity, zero radius); or it's a nil background image when combining with another image; or a boolean `active` parameter. If the parameter(s) specified would cause no change in the image, then the identity (self) is returned forthwith.

The contents of CIImage-Filters.swift are generated source code, using code that I've included in this repository (`CIImage-Generation.swift`, not included in the package import). This loops through the core image metadata that Apple provides (`CIFilter.filterNames(inCategories: nil)`). Unfortunately this list is somewhat out of date and contains a number of inconsistencies that I've done my best to overcome.  There are some JSON files that provide additional metadata such as a list of the functions that actually do have online documentation — 56 functions aren't documented so some guesswork is needed — or repairs to missing or obsolete documentation. You probably won't need to run this code unless you have some special requirements or the list has been updated in a future OS release.

## Using With SwiftUI

Remember that Core Image operations are really just a "recipe" for the processing steps; the actual work is not performed until the image needs to be rendered to a bitmap. 

Instead of creating a SwiftUI `Image` using a [built-in initializer](https://developer.apple.com/documentation/swiftui/image) from a resource name or other image type (`CGImage`, `NSImage`, `UIImage`), this code provides a new initializer to create an `Image` from a `CIImage`. When SwiftUI needs to render the image, the Core Image is rendered to the screen.

Your typical approach, then, will be to create an `Image`, passing in a `CIImage` created using one of the [built-in initializers](https://developer.apple.com/documentation/coreimage/ciimage) or the convenience methods included here to create from a resource name or another image type.

Then, just chain modifiers to that `CIImage` to indicate what to modify.

Many modifiers are simple. For instance:

```Swift
    Image(ciImage: CIImage("Halloween.jpeg")
        .xRay()
    )
```

If you wish to toggle whether the filter is applied, use the `active` parameter (default value of `true`):

```Swift
    Image(ciImage: CIImage("Halloween.jpeg")
        .xRay(active: isMachineOn)
    )
```

Chain any number of modifiers found in `CIImage-Filters.swift` to construct the desired result.

### Image Scaling

Many Core Image filters use pixel values for parameters. Therefore, it may be needed to get an image scaled to an appropriate size _before_ applying operations. For example, applying a 10-pixel-radius blur to a 6000⨉4000 image that is then scaled down to 300⨉200 might not yield what you want; perhaps you want to first scale the image to 300⨉200 and then apply the 10-pixel-radius blur.

Core Image provides a scaling operation (`CILanczosScaleTransform` and `lanczosScaleTransform()`) but this package also includes more convenient alternatives: `scaledToFill()` and `scaledToFit()` where you pass in the dimensions you want.

A typical use of this works well in conjunction with `GeometryReader`. For example:

```Swift
    GeometryReader { geo in
        let geoSize: CGSize = geo.frame(in: .local).integral.size
        // Resize image to double the frame size, assuming we are on a retina display
        let newSize: CGSize = CGSize(width: geoSize.width * 2,
                                    height: geoSize.height * 2)

        Image(ciImage: CIImage("M83.jpeg")
            .scaledToFit(newSize)
            .sharpenLuminance(sharpness: 1.0, radius: 5)
        )
            .resizable()    // Make sure retina image is scaled to fit
            .aspectRatio(contentMode: .fit)
    }
``` 
![Compare original, sharpened without pre-scaling, sharpened after pre-scaled](./Resources/sharpening.jpeg)


## Using Without SwiftUI

SwiftUI is not needed at all. Just create a `CIImage` and perform operations. Then, render to a bitmap.

```Swift
    let tiledImage: CIImage = CIImage("HeyGoodMorning.png").
        .triangleTile(center: .zero, angle: 0.356, width: 2.0)

    imageView.image = UIImage(CIImage: tiledImage)
```

## Other Notes

If you've used Core Image, you'll know that sometimes you need to play with the extent of an image, e.g. clamping an image to have infinite edges before applying a gaussian blur, then re-cropping to the image's original extent. To accomplish this, you can use the **`recropping`** modifier which is followed by a closure. The operation saves the extent of the image, applies whatever is in the closure, and then re-crops to that extent. In the example below, the image in `ciImage` is converted into an image with the pixel colors along its edges extend infinitely in all directions, then it is blurred, and then upon exit from the closure, the returned image is re-cropped. 

```Swift
    ciImage
        .recropping { image in
            image
                .clampedToExtent()
                .gaussianBlur(radius: 10)
        }
```

![Compare unblurred, improper blurring, and proper blurring](./Resources/blurring.jpeg)

The `recropping` modifier is also useful if you find that a filter (e.g. `comicEffect()` has grown your image's extent slightly and you want to clamp it to its original size.

Another useful operation is **`replacing`**. Much like `recropping` except that it does not mess with the extent of the image. You pass in a closure, which starts with the image you were working with; your closure returns a new image. This can be useful when working with the compositing operations in Core Image, which require a *background* image to be passed in.  What if your chain of operations is on the background image, and you want to overlay something on top? Just wrap your operation in `.replacing` and return the composited image.

```Swift
    ciImage
        .replacing { backgroundImage in
            ciImage2
                .sourceAtopCompositing(backgroundImage: backgroundImage)
        }
```

In this case, the image in `ciImage2` is the foreground image, placed atop the `backgroundImage`, then returned to the chain of operations.

## Using Package

In Xcode, Choose File > Add Packages… then enter the URL of this repository into the search bar, and continue from there.

In your code:

```Swift
import SwiftUICoreImage
```

That's it!

---

## Future Improvements

Rather than generating repetitive code, it would be nice to define some macros that expand to the repetitive code!
The advantage of this is that one could just import the macro package and just define only the filters they want,
rather than defining all 200+ mostly-unused filters.

[Apparently](https://forums.swift.org/t/macros-attached-macros-to-methods-and-functions/65531/6) 
this would require [Function Body Macros](https://github.com/swiftlang/swift-evolution/blob/main/proposals/0415-function-body-macros.md) 
which are not available in Swift 5.x but might make it into Swift 6.0.

Ideally we would specify something like this:
```
 	@CoreImageExtension
	func pixellate(center: CGPoint, scale: Float, active: Bool = true) -> CIImage
```

And this would fill in the body with some code that does the following:
 * Guard statement to return self if active flag is false (or other inert modifier; maybe we'd have to indicate which parameter is this?)
 * Create built-in CIFilter object based off of name of the function being expanded
 * Set all the parameters as specified in the function parameters. Maybe need some way to indicate parameters that need to be cast to another type
 * return the outputImage from the filter

---

Please file an issue or pull request if you can think of an improvement to the code or documentation of the generated filters, 
or find any other helpful utilities for manipulating Core Images in this toolkit!


================================================
FILE: Sources/CIImage-Extensions.swift
================================================
//
//  CIImage-Extensions.swift
//  SwiftUI Core Image
//
//  Created by Dan Wood on 5/9/23.
//

import SwiftUI
import CoreImage
import CoreImage.CIFilterBuiltins

public extension CIImage {

	// Pretty fast. Subsequent invocations are cached.
	convenience init(_ name: String, bundle: Bundle? = nil) {
#if canImport(UIKit)
		if let uiImage = UIImage(named: name, in: bundle, with: nil) {
			self.init(uiImage: uiImage)
		} else {
			self.init()
		}
#elseif canImport(AppKit)
		let nsImage: NSImage?
		if let bundle {
			nsImage = bundle.image(forResource: name)
		} else {
			nsImage = NSImage(named: name)
		}
		if let nsImage {
			self.init(nsImage: nsImage)
		} else {
			self.init()
		}
#endif
	}

#if canImport(UIKit)
	convenience init(uiImage: UIImage) {
		if let cgImage = uiImage.cgImage {
			self.init(cgImage: cgImage)
		} else {
			self.init()
		}
	}
#elseif canImport(AppKit)
	convenience init(nsImage: NSImage) {
		if let cgImage = nsImage.cgImage(forProposedRect: nil, context: nil, hints: nil) {	// TODO: Maybe consider NSGraphicsContext
			self.init(cgImage: cgImage)
		} else {
			self.init()
		}
	}
#endif

	/// Useful for debugging when chaining multiple CIImage modifiers together.
	func logExtent(file: String = #file, line: Int = #line) -> CIImage {
		NSLog("\(file):\(line) \(self.extent)")
		return self
	}

}

// MARK: USEFUL EXTENSIONS FOR WORKING IN A SWIFTUI-LIKE FASHION

public extension CIImage {

	/// Save the extent and then re-crop to that extent after applying whatever is in the closure
	func recropping(apply: (CIImage) -> CIImage) -> CIImage {
		let savedExtent: CGRect = extent
		let newCIImage = apply(self)
		let cropped = newCIImage.cropped(to: savedExtent)
		return cropped
	}

	/// Apply to whatever is in the closure. Useful if the current image is used as a parameter to a new image process.
	func replacing(apply: (CIImage) -> CIImage) -> CIImage {
		let newCIImage = apply(self)
		return newCIImage
	}

	/// Resize an image down so it fully fills the container, cropping in the center as needed.
	@available(macOS 10.15, *)
	func scaledToFill(_ size: CGSize?) -> CIImage {
		guard let size else { return self }
		let currentSize = extent.size
		let largerRatio: CGFloat = max(size.width / currentSize.width, size.height / currentSize.height)
		let newSize: CGSize = CGSize(width: currentSize.width * largerRatio, height: currentSize.height * largerRatio)
		// Scale to the larger of two ratios so it fills
		let scaled = self.lanczosScaleTransform(scale: Float(largerRatio))
		let clamped = scaled.clampedToExtent()
		let cropped = clamped.cropped(to: CGRect(x: (newSize.width - size.width) / 2,
												 y: (newSize.height - size.height) / 2,
												 width: size.width, height: size.height))
		return cropped
	}

	/// Resize an image down so it fully fits in container, centered as needed. No cropping.
	@available(macOS 10.15, *)
	func scaledToFit(_ size: CGSize?) -> CIImage {
		guard let size else { return self }
		let currentSize = extent.size
		let smallerRatio: CGFloat = min(size.width / currentSize.width, size.height / currentSize.height)
		let newSize: CGSize = CGSize(width: currentSize.width * smallerRatio, height: currentSize.height * smallerRatio)
		// Scale to the smaller of two ratios so it fits
		let scaled = self.lanczosScaleTransform(scale: Float(smallerRatio))
		let clamped = scaled.clampedToExtent()
		let cropped = clamped.cropped(to: CGRect(origin: .zero, size: newSize))
		return cropped
	}

	/// convenience, to be similar to SwiftUI view offset
	func offset(by offset: CGSize) -> CIImage {
		guard offset != .zero else { return self }
		return self.transformed(by: CGAffineTransform(translationX: offset.width, y: offset.height))
	}

}

// MARK: OVERLOADS OF EXISTING CIIMAGE OPERATIONS SO WE CAN PASS IN 'ACTIVE' BOOLEAN TO BE ABLE TO HAVE INERT MODIFIER

public extension CIImage {

	// Don't overload these; already a way to pass in arguments to get an inert modifier
	//open func transformed(by matrix: CGAffineTransform) -> CIImage // pass in CGAffineTransform.identity
	//open func transformed(by matrix: CGAffineTransform, highQualityDownsample: Bool) -> CIImage // pass in CGAffineTransform.identity
	//open func composited(over dest: CIImage) -> CIImage // pass in empty image
	//open func cropped(to rect: CGRect) -> CIImage // Pass in CGRect.infinite
	//open func clamped(to rect: CGRect) -> CIImage // Pass in CGRect.infinite
	//open func settingProperties(_ properties: [AnyHashable : Any]) -> CIImage // Pass in empty to add no properties

	// Maybe not worth dealing with.
	//open func oriented(forExifOrientation orientation: Int32) -> CIImage
	//open func oriented(_ orientation: CGImagePropertyOrientation) -> CIImage
	//open func matchedToWorkingSpace(from colorSpace: CGColorSpace) -> CIImage?
	//open func matchedFromWorkingSpace(to colorSpace: CGColorSpace) -> CIImage?
	//open func insertingIntermediate() -> CIImage
	//open func insertingIntermediate(cache: Bool) -> CIImage
	//open func convertingWorkingSpaceToLab() -> CIImage
	//open func convertingLabToWorkingSpace() -> CIImage

	// Doesn't really apply since the whole point is to have image modifiers for all the filters.
	//open func applyingFilter(_ filterName: String, parameters params: [String : Any]) -> CIImage
	//open func applyingFilter(_ filterName: String) -> CIImage

	// Don't implement because we have an equivalent operation already. Sigma is just the pixel radius.
	//open func applyingGaussianBlur(sigma: Double) -> CIImage

	// OK these get an active overload.

	/* Return a new infinite image by replicating the edge pixels of the receiver image. */
	@available(macOS 10.10, *)
	func clampedToExtent(active: Bool = true) -> CIImage {
		guard active else { return self }
		return clampedToExtent()
	}

	/* Return a new image by multiplying the receiver's RGB values by its alpha. */
	@available(macOS 10.12, *)
	func premultiplyingAlpha(active: Bool = true) -> CIImage {
		guard active else { return self }
		return premultiplyingAlpha()
	}

	/* Return a new image by dividing the receiver's RGB values by its alpha. */
	@available(macOS 10.12, *)
	func unpremultiplyingAlpha(active: Bool = true) -> CIImage {
		guard active else { return self }
		return unpremultiplyingAlpha()
	}

	/* Return a new image with alpha set to 1 within the rectangle and 0 outside. */
	@available(macOS 10.12, *)
	func settingAlphaOne(in extent: CGRect, active: Bool = true) -> CIImage {
		guard active else { return self }
		return settingAlphaOne(in: extent)
	}

	/* Returns a new image by changing the receiver's sample mode to bilinear interpolation. */
	@available(macOS 10.13, *)
	func samplingLinear(active: Bool = true) -> CIImage {
		guard active else { return self }
		return samplingLinear()
	}

	/* Returns a new image by changing the receiver's sample mode to nearest neighbor. */
	@available(macOS 10.13, *)
	func samplingNearest(active: Bool = true) -> CIImage {	// equivalent to CISampleNearest filter
		guard active else { return self }
		return samplingNearest()
	}
}



================================================
FILE: Sources/CIImage-Filters.swift
================================================
//
// Automatically generated by CIImage-Generation.swift - do not edit
//

import Foundation
import CoreImage
import CoreImage.CIFilterBuiltins
import CoreML
import AVFoundation

public extension CIImage {

	//
	// MARK: IMAGE-TO-IMAGE FILTERS
	//

	/// Accordion Fold Transition
	///
	/// Transitions from one image to another of differing dimensions by unfolding and crossfading.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228263-accordionfoldtransition)
	/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIAccordionFoldTransition)
	///
	/// Categories: Transition, Video, Still Image, Built-In, High Dynamic Range
	///
	///
	/// - Parameters:
	///   - targetImage: The target image for a transition.
	///   - bottomHeight: The height in pixels from the bottom of the image to the bottom of the folded part of the transition. (0...)
	///   - numberOfFolds: The number of folds used in the transition. (1...50)
	///   - foldShadowAmount: A value that specifies the intensity of the shadow in the transition. (0...1)
	///   - time: The duration of the effect. (0...1)
	///   - active: should this filter be applied
	/// - Returns: processed new `CIImage`, or identity if `active` is false
	@available(iOS 13, macOS 10.15, *)
	func accordionFoldTransition(targetImage: CIImage,
								 bottomHeight: Float = 0,
								 numberOfFolds: Float = 3,
								 foldShadowAmount: Float = 0.1,
								 time: Float,
								 active: Bool = true) -> CIImage {
		guard active else { return self }

		let filter = CIFilter.accordionFoldTransition() // CIAccordionFoldTransition
		filter.inputImage = self
		filter.targetImage = targetImage
		filter.bottomHeight = bottomHeight
		filter.numberOfFolds = numberOfFolds
		filter.foldShadowAmount = foldShadowAmount
		filter.time = time
		return filter.outputImage ?? CIImage.empty()
	}

	/// Addition
	///
	/// Adds color components to achieve a brightening effect. This filter is typically used to add highlights and lens flare effects.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228264-additioncompositing)
	/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIAdditionCompositing)
	///
	/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, High Dynamic Range, Built-In
	///
	///
	/// - Parameters:
	///   - backgroundImage: The image to use as a background image.
	/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil
	@available(iOS 13, macOS 10.15, *)
	func additionCompositing(backgroundImage: CIImage?) -> CIImage {
		guard let backgroundImage else { return self }

		let filter = CIFilter.additionCompositing() // CIAdditionCompositing
		filter.inputImage = self
		filter.backgroundImage = backgroundImage
		return filter.outputImage ?? CIImage.empty()
	}

	/// Affine Clamp
	///
	/// Performs an affine transformation on a source image and then clamps the pixels at the edge of the transformed image, extending them outwards. This filter performs similarly to the “Affine Transform” filter except that it produces an image with infinite extent. You can use this filter when you need to blur an image but you want to avoid a soft, black fringe along the edges.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228265-affineclamp)
	/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIAffineClamp)
	///
	/// Categories: Tile Effect, Video, Still Image, Built-In, High Dynamic Range
	///
	///
	/// - Parameters:
	///   - transform: The transform to apply to the image.
	/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
	@available(iOS 13, macOS 10.15, *)
	func affineClamp(transform: CGAffineTransform = CGAffineTransform.identity) -> CIImage {
		guard transform != CGAffineTransform.identity else { return self }

		let filter = CIFilter.affineClamp() // CIAffineClamp
		filter.inputImage = self
		filter.transform = transform
		return filter.outputImage ?? CIImage.empty()
	}

	/// Affine Tile
	///
	/// Applies an affine transformation to an image and then tiles the transformed image.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228266-affinetile)
	/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIAffineTile)
	///
	/// Categories: Tile Effect, Video, Still Image, Built-In, High Dynamic Range
	///
	///
	/// - Parameters:
	///   - transform: The transform to apply to the image.
	/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
	@available(iOS 13, macOS 10.15, *)
	func affineTile(transform: CGAffineTransform = CGAffineTransform.identity) -> CIImage {
		guard transform != CGAffineTransform.identity else { return self }

		let filter = CIFilter.affineTile() // CIAffineTile
		filter.inputImage = self
		filter.transform = transform
		return filter.outputImage ?? CIImage.empty()
	}

	// ℹ️ CIAffineTransform already has a CIImage method: func transformed(by: CGAffineTransform) -> CIImage


	/// Area Alpha Weighted Histogram
	///
	/// Calculates alpha-weighted histograms of the unpremultiplied R, G, B channels for the specified area of an image. The output image is a one pixel tall image containing the histogram data for the RGB channels.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401846-areaalphaweightedhistogram)
	///
	/// Categories: Reduction, Video, Still Image, Built-In
	///
	///
	/// - Parameters:
	///   - extent: A rectangle that defines the extent of the effect.
	///   - scale: The scale value to use for the histogram values. If the scale is 1.0 and the image is opaque, then the bins in the resulting image will add up to 1.0. (0...)
	///   - count: The number of bins for the histogram. This value will determine the width of the output image. (1...2048)
	///   - active: should this filter be applied
	/// - Returns: processed new `CIImage`, or identity if `active` is false
	@available(iOS 18, macOS 15.0, *)
	func areaAlphaWeightedHistogram(extent: CGRect, scale: Float = 1, count: Int = 64, active: Bool = true) -> CIImage {
		guard active else { return self }

		let filter = CIFilter.areaAlphaWeightedHistogram() // CIAreaAlphaWeightedHistogram
		filter.inputImage = self
		filter.extent = extent
		filter.scale = scale
		filter.count = count
		return filter.outputImage ?? CIImage.empty()
	}

	/// Area Average
	///
	/// Calculates the average color for the specified area in an image, returning the result in a pixel.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3547111-areaaverage)
	/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIAreaAverage)
	///
	/// Categories: Reduction, Video, Still Image, High Dynamic Range, Built-In
	///
	///
	/// - Parameters:
	///   - extent: A rectangle that specifies the subregion of the image that you want to process.
	///   - active: should this filter be applied
	/// - Returns: a single-pixel image  that contains the average color for the region of interest, or identity if `active` is false
	@available(iOS 14, macOS 11.0, *)
	func areaAverage(extent: CGRect, active: Bool = true) -> CIImage {
		guard active else { return self }

		let filter = CIFilter.areaAverage() // CIAreaAverage
		filter.inputImage = self
		filter.extent = extent
		return filter.outputImage ?? CIImage.empty()
	}

	/// Area Bounds Red
	///
	/// Calculates the approximate bounding box of pixels within the specified area of an image where the red component values are non-zero. The result is 1x1 pixel image where the RGBA values contain the normalized X,Y,W,H dimensions of the bounding box.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401847-areaboundsred)
	///
	/// Categories: Reduction, Video, Still Image, High Dynamic Range, Built-In
	///
	///
	/// - Parameters:
	///   - extent: A rectangle that specifies the subregion of the image that you want to process.
	///   - active: should this filter be applied
	/// - Returns: processed new `CIImage`, or identity if `active` is false
	@available(iOS 18, macOS 15.0, *)
	func areaBoundsRed(extent: CGRect, active: Bool = true) -> CIImage {
		guard active else { return self }

		let filter = CIFilter.areaBoundsRed() // CIAreaBoundsRed
		filter.inputImage = self
		filter.extent = extent
		return filter.outputImage ?? CIImage.empty()
	}

	/// Area Histogram
	///
	/// Calculates histograms of the R, G, B, and A channels of the specified area of an image. The output image is a one pixel tall image containing the histogram data for all four channels.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3547112-areahistogram)
	/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIAreaHistogram)
	///
	/// Categories: Reduction, Video, Still Image, Built-In
	///
	///
	/// - Parameters:
	///   - extent: A rectangle that, after intersection with the image extent, specifies the subregion of the image that you want to process.
	///   - scale: The scale value to use for the histogram values. If the scale is 1.0, then the bins in the resulting image will add up to 1.0. (0...)
	///   - count: The number of bins for the histogram. This value will determine the width of the output image. (1...2048)
	///   - active: should this filter be applied
	/// - Returns: a 1D image (inputCount wide by one pixel high) that contains the component-wise histogram computed for the specified rectangular area, or identity if `active` is false
	@available(iOS 14, macOS 11.0, *)
	func areaHistogram(extent: CGRect, scale: Float = 1, count: Int = 64, active: Bool = true) -> CIImage {
		guard active else { return self }

		let filter = CIFilter.areaHistogram() // CIAreaHistogram
		filter.inputImage = self
		filter.extent = extent
		filter.scale = scale
		filter.count = count
		return filter.outputImage ?? CIImage.empty()
	}

	/// Area Logarithmic Histogram
	///
	/// Calculates histogram of the R, G, B, and A channels of the specified area of an image. Before binning, the R, G, and B channel values are transformed by the log base two function. The output image is a one pixel tall image containing the histogram data for all four channels.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401848-arealogarithmichistogram)
	///
	/// Categories: Reduction, Video, Still Image, High Dynamic Range, Built-In
	///
	///
	/// - Parameters:
	///   - extent: A rectangle that defines the extent of the effect.
	///   - scale: The amount of the effect. (0...)
	///   - count: The number of bins for the histogram. This value will determine the width of the output image. (1...2048)
	///   - minimumStop: The minimum of the range of color channel values to be in the logarithmic histogram image.
	///   - maximumStop: The maximum of the range of color channel values to be in the logarithmic histogram image.
	///   - active: should this filter be applied
	/// - Returns: processed new `CIImage`, or identity if `active` is false
	@available(iOS 16, macOS 13.0, *)
	func areaLogarithmicHistogram(extent: CGRect,
								  scale: Float = 1,
								  count: Int = 64,
								  minimumStop: Float = -10,
								  maximumStop: Float = 4,
								  active: Bool = true) -> CIImage {
		guard active else { return self }

		let filter = CIFilter.areaLogarithmicHistogram() // CIAreaLogarithmicHistogram
		filter.inputImage = self
		filter.extent = extent
		filter.scale = scale
		filter.count = count
		filter.minimumStop = minimumStop
		filter.maximumStop = maximumStop
		return filter.outputImage ?? CIImage.empty()
	}

	/// Area Maximum
	///
	/// Calculates the maximum component values for the specified area in an image, returning the result in a pixel.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3547114-areamaximum)
	/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIAreaMaximum)
	///
	/// Categories: Reduction, Video, Still Image, High Dynamic Range, Built-In
	///
	///
	/// - Parameters:
	///   - extent: A rectangle that specifies the subregion of the image that you want to process.
	///   - active: should this filter be applied
	/// - Returns: a single-pixel image that contains the maximum color components for the region of interest, or identity if `active` is false
	@available(iOS 14, macOS 11.0, *)
	func areaMaximum(extent: CGRect, active: Bool = true) -> CIImage {
		guard active else { return self }

		let filter = CIFilter.areaMaximum() // CIAreaMaximum
		filter.inputImage = self
		filter.extent = extent
		return filter.outputImage ?? CIImage.empty()
	}

	/// Area Maximum Alpha
	///
	/// Finds and returns the pixel with the maximum alpha value.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3547113-areamaximumalpha)
	/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIAreaMaximumAlpha)
	///
	/// Categories: Reduction, Video, Still Image, High Dynamic Range, Built-In
	///
	///
	/// - Parameters:
	///   - extent: A rectangle that specifies the subregion of the image that you want to process.
	///   - active: should this filter be applied
	/// - Returns: a single-pixel image that contains the color vector with the maximum alpha value for the region of interest, or identity if `active` is false
	@available(iOS 14, macOS 11.0, *)
	func areaMaximumAlpha(extent: CGRect, active: Bool = true) -> CIImage {
		guard active else { return self }

		let filter = CIFilter.areaMaximumAlpha() // CIAreaMaximumAlpha
		filter.inputImage = self
		filter.extent = extent
		return filter.outputImage ?? CIImage.empty()
	}

	/// Area Min and Max
	///
	/// Calculates the per-component minimum and maximum value for the specified area in an image. The result is returned in a 2x1 image where the component minimum values are stored in the pixel on the left.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3547115-areaminmax)
	///
	/// Categories: Reduction, Video, Still Image, High Dynamic Range, Built-In
	///
	///
	/// - Parameters:
	///   - extent: A rectangle that specifies the subregion of the image that you want to process.
	///   - active: should this filter be applied
	/// - Returns: processed new `CIImage`, or identity if `active` is false
	@available(iOS 14, macOS 11.0, *)
	func areaMinMax(extent: CGRect, active: Bool = true) -> CIImage {
		guard active else { return self }

		let filter = CIFilter.areaMinMax() // CIAreaMinMax
		filter.inputImage = self
		filter.extent = extent
		return filter.outputImage ?? CIImage.empty()
	}

	/// Area Min and Max Red
	///
	/// Calculates the minimum and maximum red component value for the specified area in an image. The result is returned in the red and green channels of a one pixel image.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3547116-areaminmaxred)
	///
	/// Categories: Reduction, Video, Still Image, High Dynamic Range, Built-In
	///
	///
	/// - Parameters:
	///   - extent: A rectangle that specifies the subregion of the image that you want to process.
	///   - active: should this filter be applied
	/// - Returns: processed new `CIImage`, or identity if `active` is false
	@available(iOS 14, macOS 11.0, *)
	func areaMinMaxRed(extent: CGRect, active: Bool = true) -> CIImage {
		guard active else { return self }

		let filter = CIFilter.areaMinMaxRed() // CIAreaMinMaxRed
		filter.inputImage = self
		filter.extent = extent
		return filter.outputImage ?? CIImage.empty()
	}

	/// Area Minimum
	///
	/// Calculates the minimum component values for the specified area in an image, returning the result in a pixel.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3547118-areaminimum)
	/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIAreaMinimum)
	///
	/// Categories: Reduction, Video, Still Image, High Dynamic Range, Built-In
	///
	///
	/// - Parameters:
	///   - extent: A rectangle that specifies the subregion of the image that you want to process.
	///   - active: should this filter be applied
	/// - Returns: a single-pixel image that contains the minimum color components for the region of interest, or identity if `active` is false
	@available(iOS 14, macOS 11.0, *)
	func areaMinimum(extent: CGRect, active: Bool = true) -> CIImage {
		guard active else { return self }

		let filter = CIFilter.areaMinimum() // CIAreaMinimum
		filter.inputImage = self
		filter.extent = extent
		return filter.outputImage ?? CIImage.empty()
	}

	/// Area Minimum Alpha
	///
	/// Finds and returns the pixel with the minimum alpha value.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3547117-areaminimumalpha)
	/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIAreaMinimumAlpha)
	///
	/// Categories: Reduction, Video, Still Image, High Dynamic Range, Built-In
	///
	///
	/// - Parameters:
	///   - extent: A rectangle that specifies the subregion of the image that you want to process.
	///   - active: should this filter be applied
	/// - Returns: a single-pixel image that contains the color vector with the minimum alpha value for the region of interest, or identity if `active` is false
	@available(iOS 14, macOS 11.0, *)
	func areaMinimumAlpha(extent: CGRect, active: Bool = true) -> CIImage {
		guard active else { return self }

		let filter = CIFilter.areaMinimumAlpha() // CIAreaMinimumAlpha
		filter.inputImage = self
		filter.extent = extent
		return filter.outputImage ?? CIImage.empty()
	}

	/// Bars Swipe Transition
	///
	/// Transitions from one image to another by swiping rectangular portions of the foreground image to disclose the target image.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228270-barsswipetransition)
	/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIBarsSwipeTransition)
	///
	/// Categories: Transition, Video, Still Image, Built-In, High Dynamic Range
	///
	///
	/// - Parameters:
	///   - targetImage: The target image for a transition.
	///   - angle: The angle in radians of the bars.
	///   - width: The width of each bar. (2...)
	///   - barOffset: The offset of one bar with respect to another. (1...)
	///   - time: The parametric time of the transition. This value drives the transition from start (at time 0) to end (at time 1). (0...1)
	/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
	@available(iOS 13, macOS 10.15, *)
	func barsSwipeTransition(targetImage: CIImage,
							 angle: Float = .pi,
							 width: Float,
							 barOffset: Float = 10,
							 time: Float) -> CIImage {
		guard angle != 0 else { return self }

		let filter = CIFilter.barsSwipeTransition() // CIBarsSwipeTransition
		filter.inputImage = self
		filter.targetImage = targetImage
		filter.angle = angle
		filter.width = width
		filter.barOffset = barOffset
		filter.time = time
		return filter.outputImage ?? CIImage.empty()
	}

	/// Bicubic Scale Transform
	///
	/// Produces a high-quality, scaled version of a source image. The parameters of B and C for this filter determine the sharpness or softness of the resampling. The most commonly used B and C values are 0.0 and 0.75, respectively.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228271-bicubicscaletransform)
	///
	/// Categories: Geometry Adjustment, Video, Still Image, Non-Square Pixels, Built-In, High Dynamic Range
	///
	///
	/// - Parameters:
	///   - scale: The scaling factor to use on the image. Values less than 1.0 scale down the images. Values greater than 1.0 scale up the image. (0...)
	///   - aspectRatio: The additional horizontal scaling factor to use on the image. (0...)
	///   - parameterB: Specifies the value of B to use for the cubic resampling function. (0...1)
	///   - parameterC: Specifies the value of C to use for the cubic resampling function. (0...1)
	/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
	@available(iOS 13, macOS 10.15, *)
	func bicubicScaleTransform(scale: Float = 1,
							   aspectRatio: Float = 1,
							   parameterB: Float = 0,
							   parameterC: Float = 0.75) -> CIImage {
		guard scale != 1 || aspectRatio != 1 else { return self }

		let filter = CIFilter.bicubicScaleTransform() // CIBicubicScaleTransform
		filter.inputImage = self
		filter.scale = scale
		filter.aspectRatio = aspectRatio
		filter.parameterB = parameterB
		filter.parameterC = parameterC
		return filter.outputImage ?? CIImage.empty()
	}

	/// Blend With Alpha Mask
	///
	/// Uses values from a mask image to interpolate between an image and the background. When a mask alpha value is 0.0, the result is the background. When the mask alpha value is 1.0, the result is the image.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228272-blendwithalphamask)
	/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIBlendWithAlphaMask)
	///
	/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In
	///
	///
	/// - Parameters:
	///   - backgroundImage: The image to use as a background image.
	///   - maskImage: A masking image.
	/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil
	@available(iOS 13, macOS 10.15, *)
	func blendWithAlphaMask(backgroundImage: CIImage?, maskImage: CIImage) -> CIImage {
		guard let backgroundImage else { return self }

		let filter = CIFilter.blendWithAlphaMask() // CIBlendWithAlphaMask
		filter.inputImage = self
		filter.backgroundImage = backgroundImage
		filter.maskImage = maskImage
		return filter.outputImage ?? CIImage.empty()
	}

	/// Blend With Blue Mask
	///
	/// Uses values from a mask image to interpolate between an image and the background. When a mask blue value is 0.0, the result is the background. When the mask blue value is 1.0, the result is the image.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228273-blendwithbluemask)
	///
	/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In
	///
	///
	/// - Parameters:
	///   - backgroundImage: The image to use as a background image.
	///   - maskImage: A masking image.
	/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil
	@available(iOS 13, macOS 10.15, *)
	func blendWithBlueMask(backgroundImage: CIImage?, maskImage: CIImage) -> CIImage {
		guard let backgroundImage else { return self }

		let filter = CIFilter.blendWithBlueMask() // CIBlendWithBlueMask
		filter.inputImage = self
		filter.backgroundImage = backgroundImage
		filter.maskImage = maskImage
		return filter.outputImage ?? CIImage.empty()
	}

	/// Blend With Mask
	///
	/// Uses values from a grayscale mask to interpolate between an image and the background. When a mask green value is 0.0, the result is the background. When the mask green value is 1.0, the result is the image.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228274-blendwithmask)
	/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIBlendWithMask)
	///
	/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In
	///
	///
	/// - Parameters:
	///   - backgroundImage: The image to use as a background image.
	///   - maskImage: A grayscale mask. When a mask value is 0.0, the result is the background. When the mask value is 1.0, the result is the image.
	/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil
	@available(iOS 13, macOS 10.15, *)
	func blendWithMask(backgroundImage: CIImage?, maskImage: CIImage) -> CIImage {
		guard let backgroundImage else { return self }

		let filter = CIFilter.blendWithMask() // CIBlendWithMask
		filter.inputImage = self
		filter.backgroundImage = backgroundImage
		filter.maskImage = maskImage
		return filter.outputImage ?? CIImage.empty()
	}

	/// Blend With Red Mask
	///
	/// Uses values from a mask image to interpolate between an image and the background. When a mask red value is 0.0, the result is the background. When the mask red value is 1.0, the result is the image.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228275-blendwithredmask)
	///
	/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In
	///
	///
	/// - Parameters:
	///   - backgroundImage: The image to use as a background image.
	///   - maskImage: A masking image.
	/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil
	@available(iOS 13, macOS 10.15, *)
	func blendWithRedMask(backgroundImage: CIImage?, maskImage: CIImage) -> CIImage {
		guard let backgroundImage else { return self }

		let filter = CIFilter.blendWithRedMask() // CIBlendWithRedMask
		filter.inputImage = self
		filter.backgroundImage = backgroundImage
		filter.maskImage = maskImage
		return filter.outputImage ?? CIImage.empty()
	}

	/// Bloom
	///
	/// Softens edges and applies a pleasant glow to an image.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228276-bloom)
	/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIBloom)
	///
	/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In
	///
	///
	/// - Parameters:
	///   - radius: The radius determines how many pixels are used to create the effect. The larger the radius, the greater the effect. (0...)
	///   - intensity: The intensity of the effect. A value of 0.0 is no effect. A value of 1.0 is the maximum effect. (0...)
	/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
	@available(iOS 13, macOS 10.15, *)
	func bloom(radius: Float, intensity: Float = 0.5) -> CIImage {
		guard radius != 0 || intensity != 0 else { return self }

		let filter = CIFilter.bloom() // CIBloom
		filter.inputImage = self
		filter.radius = radius
		filter.intensity = intensity
		return filter.outputImage ?? CIImage.empty()
	}

	/// Bokeh Blur
	///
	/// Smooths an image using a disc-shaped convolution kernel.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228277-bokehblur)
	///
	/// Categories: Blur, Video, Still Image, Built-In, High Dynamic Range
	///
	///
	/// - Parameters:
	///   - radius: The radius determines how many pixels are used to create the blur. The larger the radius, the blurrier the result. (0...500)
	///   - ringAmount: The amount of extra emphasis at the ring of the bokeh. (0...1)
	///   - ringSize: The size of extra emphasis at the ring of the bokeh. (0...0.2)
	///   - softness: The softness of the bokeh effect (0...10)
	///   - active: should this filter be applied
	/// - Returns: processed new `CIImage`, or identity if `active` is false
	@available(iOS 13, macOS 10.15, *)
	func bokehBlur(radius: Float,
				   ringAmount: Float = 0,
				   ringSize: Float = 0.1,
				   softness: Float = 1,
				   active: Bool = true) -> CIImage {
		guard active else { return self }

		let filter = CIFilter.bokehBlur() // CIBokehBlur
		filter.inputImage = self
		filter.radius = radius
		filter.ringAmount = ringAmount
		filter.ringSize = ringSize
		filter.softness = softness
		return filter.outputImage ?? CIImage.empty()
	}

	/// Box Blur
	///
	/// Smooths or sharpens an image using a box-shaped convolution kernel.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228278-boxblur)
	/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIBoxBlur)
	///
	/// Categories: Blur, Video, Still Image, Built-In, High Dynamic Range
	///
	///
	/// - Parameters:
	///   - radius: The radius determines how many pixels are used to create the blur. The larger the radius, the blurrier the result. (1...)
	///   - active: should this filter be applied
	/// - Returns: processed new `CIImage`, or identity if `active` is false
	@available(iOS 13, macOS 10.15, *)
	func boxBlur(radius: Float, active: Bool = true) -> CIImage {
		guard active else { return self }

		let filter = CIFilter.boxBlur() // CIBoxBlur
		filter.inputImage = self
		filter.radius = radius
		return filter.outputImage ?? CIImage.empty()
	}

	/// Bump Distortion
	///
	/// Creates a concave or convex bump that originates at a specified point in the image.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401850-bumpdistortion)
	/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIBumpDistortion)
	///
	/// Categories: Distortion Effect, Video, Still Image, Built-In, High Dynamic Range
	///
	///
	/// - Parameters:
	///   - center: The center of the effect as x and y pixel coordinates.
	///   - radius: The radius determines how many pixels are used to create the distortion. The larger the radius, the wider the extent of the distortion. (0...)
	///   - scale: The scale of the effect determines the curvature of the bump. A value of 0.0 has no effect. Positive values create an outward bump; negative values create an inward bump.
	/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
	@available(iOS 14, macOS 11.0, *)
	func bumpDistortion(center: CGPoint, radius: Float, scale: Float = 0.5) -> CIImage {
		guard scale != 0 else { return self }

		let filter = CIFilter.bumpDistortion() // CIBumpDistortion
		filter.inputImage = self
		filter.center = center
		filter.radius = radius
		filter.scale = scale
		return filter.outputImage ?? CIImage.empty()
	}

	/// Bump Distortion Linear
	///
	/// Creates a concave or convex distortion that originates from a line in the image.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401851-bumpdistortionlinear)
	/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIBumpDistortionLinear)
	///
	/// Categories: Distortion Effect, Video, Still Image, Built-In, High Dynamic Range
	///
	///
	/// - Parameters:
	///   - center: The center of the effect as x and y pixel coordinates.
	///   - radius: The radius determines how many pixels are used to create the distortion. The larger the radius, the wider the extent of the distortion. (0...)
	///   - angle: The angle in radians of the line around which the distortion occurs.
	///   - scale: The scale of the effect. (-1...)
	/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
	@available(iOS 14, macOS 11.0, *)
	func bumpDistortionLinear(center: CGPoint, radius: Float, angle: Float = 0, scale: Float = 0.5) -> CIImage {
		guard radius != 0 || angle != 0 || scale != 1 else { return self }

		let filter = CIFilter.bumpDistortionLinear() // CIBumpDistortionLinear
		filter.inputImage = self
		filter.center = center
		filter.radius = radius
		filter.angle = angle
		filter.scale = scale
		return filter.outputImage ?? CIImage.empty()
	}

	/// CMYK Halftone
	///
	/// Creates a color, halftoned rendition of the source image, using cyan, magenta, yellow, and black inks over a white page.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228259-cmykhalftone)
	/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CICMYKHalftone)
	///
	/// Categories: Halftone Effect, Video, Still Image, Built-In
	///
	///
	/// - Parameters:
	///   - center: The center of the effect as x and y pixel coordinates.
	///   - width: The distance between dots in the pattern. (-2...)
	///   - angle: The angle in radians of the pattern.
	///   - sharpness: The sharpness of the pattern. The larger the value, the sharper the pattern. (0...)
	///   - grayComponentReplacement: The gray component replacement value. The value can vary from 0.0 (none) to 1.0. (0...)
	///   - underColorRemoval: The under color removal value. The value can vary from 0.0 to 1.0.  (0...)
	/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
	@available(iOS 13, macOS 10.15, *)
	func cmykHalftone(center: CGPoint,
					  width: Float,
					  angle: Float = 0,
					  sharpness: Float,
					  grayComponentReplacement: Float = 1,
					  underColorRemoval: Float = 0.5) -> CIImage {
		guard angle != 0 || grayComponentReplacement != 1 || underColorRemoval != 0.5 else { return self }

		let filter = CIFilter.cmykHalftone() // CICMYKHalftone
		filter.inputImage = self
		filter.center = center
		filter.width = width
		filter.angle = angle
		filter.sharpness = sharpness
		filter.grayComponentReplacement = grayComponentReplacement
		filter.underColorRemoval = underColorRemoval
		return filter.outputImage ?? CIImage.empty()
	}

	/// Lens Correction for AVC
	///
	/// Geometrically distorts an image by altering the magnification based on the radial distance from the optical center to the farthest radius.
	///
	/// ⚠️ No documentation available for CICameraCalibrationLensCorrection
	///
	/// Categories: Distortion Effect, Video, Still Image, Built-In, High Dynamic Range
	///
	///
	/// - Parameters:
	///   - avcameracalibrationdata: AVCameraCalibrationData for the correction. Will be set from the input image if available and can be overridden here.
	///   - useInverseLookupTable: Boolean value used to select the Look Up Table from the AVCameraCalibrationData.
	///   - active: should this filter be applied
	/// - Returns: processed new `CIImage`, or identity if `active` is false
	@available(iOS 13, macOS 10.15, *)
	func cameraCalibrationLensCorrection(avcameracalibrationdata: AVCameraCalibrationData,
										 useInverseLookupTable: Bool = false,
										 active: Bool = true) -> CIImage {
		guard active else { return self }

		// Filter not included in CoreImage.CIFilterBuiltins; using dictionary-based method.
		guard let filter = CIFilter(name: "CICameraCalibrationLensCorrection", parameters: [

			"inputAVCameraCalibrationData": avcameracalibrationdata,
			"inputUseInverseLookUpTable": useInverseLookupTable,
		]) else { return self }
		return filter.outputImage ?? CIImage.empty()
	}

	/// Canny Edge Detector
	///
	/// Applies the Canny Edge Detection algorithm to an image.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401852-cannyedgedetector)
	///
	/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In
	///
	///
	/// - Parameters:
	///   - gaussianSigma: The gaussian sigma of blur to apply to the image to reduce high-frequency noise. (0...)
	///   - perceptual: Specifies whether the edge thresholds should be computed in a perceptual color space.
	///   - thresholdHigh: The threshold that determines if gradient magnitude is a strong edge. (0...)
	///   - thresholdLow: The threshold that determines if gradient magnitude is a weak edge. (0...)
	///   - hysteresisPasses: The number of hysteresis passes to apply to promote weak edge pixels. (0...20)
	///   - active: should this filter be applied
	/// - Returns: processed new `CIImage`, or identity if `active` is false
	@available(iOS 17, macOS 14.0, *)
	func cannyEdgeDetector(gaussianSigma: Float = 1.6,
						   perceptual: Bool = false,
						   thresholdHigh: Float = 0.05,
						   thresholdLow: Float = 0.02,
						   hysteresisPasses: Int,
						   active: Bool = true) -> CIImage {
		guard active else { return self }

		let filter = CIFilter.cannyEdgeDetector() // CICannyEdgeDetector
		filter.inputImage = self
		filter.gaussianSigma = gaussianSigma
		filter.perceptual = perceptual
		filter.thresholdHigh = thresholdHigh
		filter.thresholdLow = thresholdLow
		filter.hysteresisPasses = hysteresisPasses
		return filter.outputImage ?? CIImage.empty()
	}

	/// Circle Splash Distortion
	///
	/// Distorts the pixels starting at the circumference of a circle and emanating outward.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401853-circlesplashdistortion)
	/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CICircleSplashDistortion)
	///
	/// Categories: Distortion Effect, Video, Still Image, Built-In, High Dynamic Range
	///
	///
	/// - Parameters:
	///   - center: The center of the effect as x and y pixel coordinates.
	///   - radius: The radius determines how many pixels are used to create the distortion. The larger the radius, the wider the extent of the distortion. (0...)
	///   - active: should this filter be applied
	/// - Returns: processed new `CIImage`, or identity if `active` is false
	@available(iOS 14, macOS 11.0, *)
	func circleSplashDistortion(center: CGPoint, radius: Float, active: Bool = true) -> CIImage {
		guard active else { return self }

		let filter = CIFilter.circleSplashDistortion() // CICircleSplashDistortion
		filter.inputImage = self
		filter.center = center
		filter.radius = radius
		return filter.outputImage ?? CIImage.empty()
	}

	/// Circular Screen
	///
	/// Simulates a circular-shaped halftone screen.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228280-circularscreen)
	/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CICircularScreen)
	///
	/// Categories: Halftone Effect, Video, Still Image, Built-In
	///
	///
	/// - Parameters:
	///   - center: The center of the effect as x and y pixel coordinates.
	///   - width: The distance between each circle in the pattern. (1...)
	///   - sharpness: The sharpness of the circles. The larger the value, the sharper the circles. (0...1)
	///   - active: should this filter be applied
	/// - Returns: processed new `CIImage`, or identity if `active` is false
	@available(iOS 13, macOS 10.15, *)
	func circularScreen(center: CGPoint, width: Float, sharpness: Float = 0.7, active: Bool = true) -> CIImage {
		guard active else { return self }

		let filter = CIFilter.circularScreen() // CICircularScreen
		filter.inputImage = self
		filter.center = center
		filter.width = width
		filter.sharpness = sharpness
		return filter.outputImage ?? CIImage.empty()
	}

	/// Circular Wrap Distortion
	///
	/// Wraps an image around a transparent circle. The distortion of the image increases with the distance from the center of the circle.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401854-circularwrap)
	/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CICircularWrap)
	///
	/// Categories: Distortion Effect, Video, Still Image, Built-In, High Dynamic Range
	///
	///
	/// - Parameters:
	///   - center: The center of the effect as x and y pixel coordinates.
	///   - radius: The radius determines how many pixels are used to create the distortion. The larger the radius, the wider the extent of the distortion. (0...)
	///   - angle: The angle in radians of the effect.
	/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
	@available(iOS 14, macOS 11.0, *)
	func circularWrap(center: CGPoint, radius: Float, angle: Float = 0) -> CIImage {
		guard angle != 0 else { return self }

		let filter = CIFilter.circularWrap() // CICircularWrap
		filter.inputImage = self
		filter.center = center
		filter.radius = radius
		filter.angle = angle
		return filter.outputImage ?? CIImage.empty()
	}

	// ℹ️ CIClamp already has a CIImage method: func clamped(to: CGRect) -> CIImage


	/// Color Absolute Difference
	///
	/// Produces an image that is the absolute value of the color difference between two images. The alpha channel of the result will be the product of the two image alpha channels.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3547119-colorabsolutedifference)
	///
	/// Categories: Color Adjustment, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In
	///
	///
	/// - Parameters:
	///   - inputImage2: The second input image for differencing.
	///   - active: should this filter be applied
	/// - Returns: processed new `CIImage`, or identity if `active` is false
	@available(iOS 14, macOS 11.0, *)
	func colorAbsoluteDifference(inputImage2: CIImage, active: Bool = true) -> CIImage {
		guard active else { return self }

		let filter = CIFilter.colorAbsoluteDifference() // CIColorAbsoluteDifference
		filter.inputImage = self
		filter.inputImage2 = inputImage2
		return filter.outputImage ?? CIImage.empty()
	}

	/// Color Blend Mode
	///
	/// Uses the luminance values of the background with the hue and saturation values of the source image. This mode preserves the gray levels in the image.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228282-colorblendmode)
	/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIColorBlendMode)
	///
	/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, Built-In
	///
	///
	/// - Parameters:
	///   - backgroundImage: The image to use as a background image.
	/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil
	@available(iOS 13, macOS 10.15, *)
	func colorBlendMode(backgroundImage: CIImage?) -> CIImage {
		guard let backgroundImage else { return self }

		let filter = CIFilter.colorBlendMode() // CIColorBlendMode
		filter.inputImage = self
		filter.backgroundImage = backgroundImage
		return filter.outputImage ?? CIImage.empty()
	}

	/// Color Burn Blend Mode
	///
	/// Darkens the background image samples to reflect the source image samples. Source image sample values that specify white do not produce a change.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228283-colorburnblendmode)
	/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIColorBurnBlendMode)
	///
	/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, Built-In
	///
	///
	/// - Parameters:
	///   - backgroundImage: The image to use as a background image.
	/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil
	@available(iOS 13, macOS 10.15, *)
	func colorBurnBlendMode(backgroundImage: CIImage?) -> CIImage {
		guard let backgroundImage else { return self }

		let filter = CIFilter.colorBurnBlendMode() // CIColorBurnBlendMode
		filter.inputImage = self
		filter.backgroundImage = backgroundImage
		return filter.outputImage ?? CIImage.empty()
	}

	/// Color Clamp
	///
	/// Modifies color values to keep them within a specified range.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228284-colorclamp)
	/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIColorClamp)
	///
	/// Categories: Color Adjustment, Video, Interlaced, Non-Square Pixels, Still Image, Built-In
	///
	///
	/// - Parameters:
	///   - minComponents: Lower clamping values.
	///   - maxComponents: Higher clamping values.
	///   - active: should this filter be applied
	/// - Returns: processed new `CIImage`, or identity if `active` is false
	@available(iOS 13, macOS 10.15, *)
	func colorClamp(minComponents: CIVector, maxComponents: CIVector, active: Bool = true) -> CIImage {
		guard active else { return self }

		let filter = CIFilter.colorClamp() // CIColorClamp
		filter.inputImage = self
		filter.minComponents = minComponents
		filter.maxComponents = maxComponents
		return filter.outputImage ?? CIImage.empty()
	}

	/// Color Controls
	///
	/// Adjusts saturation, brightness, and contrast values.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228285-colorcontrols)
	/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIColorControls)
	///
	/// Categories: Color Adjustment, Video, Still Image, Interlaced, Non-Square Pixels, High Dynamic Range, Built-In
	///
	///
	/// - Parameters:
	///   - saturation: The amount of saturation to apply. The larger the value, the more saturated the result. (0...)
	///   - brightness: The amount of brightness to apply. The larger the value, the brighter the result. (-1...)
	///   - contrast: The amount of contrast to apply. The larger the value, the more contrast in the resulting image. (0...)
	/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
	@available(iOS 13, macOS 10.15, *)
	func colorControls(saturation: Float = 1, brightness: Float = 0, contrast: Float = 1) -> CIImage {
		guard saturation != 1 || brightness != 0 || contrast != 1 else { return self }

		let filter = CIFilter.colorControls() // CIColorControls
		filter.inputImage = self
		filter.saturation = saturation
		filter.brightness = brightness
		filter.contrast = contrast
		return filter.outputImage ?? CIImage.empty()
	}

	/// Color Cross Polynomial
	///
	/// Modifies the pixel values in an image by applying a set of polynomial cross-products.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228286-colorcrosspolynomial)
	/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIColorCrossPolynomial)
	///
	/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In
	///
	///
	/// - Parameters:
	///   - redCoefficients: Polynomial coefficients for red channel.
	///   - greenCoefficients: Polynomial coefficients for green channel.
	///   - blueCoefficients: Polynomial coefficients for blue channel.
	///   - active: should this filter be applied
	/// - Returns: processed new `CIImage`, or identity if `active` is false
	@available(iOS 13, macOS 10.15, *)
	func colorCrossPolynomial(redCoefficients: CIVector,
							  greenCoefficients: CIVector,
							  blueCoefficients: CIVector,
							  active: Bool = true) -> CIImage {
		guard active else { return self }

		let filter = CIFilter.colorCrossPolynomial() // CIColorCrossPolynomial
		filter.inputImage = self
		filter.redCoefficients = redCoefficients
		filter.greenCoefficients = greenCoefficients
		filter.blueCoefficients = blueCoefficients
		return filter.outputImage ?? CIImage.empty()
	}

	/// Color Cube
	///
	/// Uses a three-dimensional color table to transform the source image pixels.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228287-colorcube)
	/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIColorCube)
	///
	/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In
	///
	///
	/// - Parameters:
	///   - cubeDimension: The dimension of the color cube.(2...64 iOS; 2...128 macOS)
	///   - cubeData: Data containing a 3-dimensional color table of floating-point premultiplied RGBA values. The cells are organized in a standard ordering. The columns and rows of the data are indexed by red and green, respectively. Each data plane is followed by the next higher plane in the data, with planes indexed by blue.
	///   - extrapolate: If true, then the color cube will be extrapolated if the input image contains RGB component values outside the range 0.0 to 1.0.
	///   - active: should this filter be applied
	/// - Returns: processed new `CIImage`, or identity if `active` is false
	@available(iOS 16, macOS 13.0, *)
	func colorCube(cubeDimension: Int, cubeData: Data, extrapolate: Bool = false, active: Bool = true) -> CIImage {
		guard active else { return self }

		let filter = CIFilter.colorCube() // CIColorCube
		filter.inputImage = self
		filter.cubeDimension = Float(cubeDimension)
		filter.cubeData = cubeData
		filter.extrapolate = extrapolate
		return filter.outputImage ?? CIImage.empty()
	}

	/// Color Cube with ColorSpace
	///
	/// Uses a three-dimensional color table to transform the source image pixels and maps the result to a specified color space.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228288-colorcubewithcolorspace)
	/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIColorCubeWithColorSpace)
	///
	/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In
	///
	///
	/// - Parameters:
	///   - cubeDimension: The dimension of the color cube.(2...64 iOS; 2...128 macOS)
	///   - cubeData: Data containing a 3-dimensional color table of floating-point premultiplied RGBA values. The cells are organized in a standard ordering. The columns and rows of the data are indexed by red and green, respectively. Each data plane is followed by the next higher plane in the data, with planes indexed by blue.
	///   - extrapolate: If true, then the color cube will be extrapolated if the input image contains RGB component values outside the range 0.0 to 1.0.
	///   - colorSpace: The CGColorSpace that defines the RGB values in the color table.
	///   - active: should this filter be applied
	/// - Returns: processed new `CIImage`, or identity if `active` is false
	@available(iOS 16, macOS 13.0, *)
	func colorCubeWithColorSpace(cubeDimension: Int,
								 cubeData: Data,
								 extrapolate: Bool,
								 colorSpace: CGColorSpace,
								 active: Bool = true) -> CIImage {
		guard active else { return self }

		let filter = CIFilter.colorCubeWithColorSpace() // CIColorCubeWithColorSpace
		filter.inputImage = self
		filter.cubeDimension = Float(cubeDimension)
		filter.cubeData = cubeData
		filter.extrapolate = extrapolate
		filter.colorSpace = colorSpace
		return filter.outputImage ?? CIImage.empty()
	}

	/// Color Cubes Mixed With Mask
	///
	/// Uses two three-dimensional color tables in a specified colorspace to transform the source image pixels. The mask image is used as an interpolant to mix the output of the two cubes.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228289-colorcubesmixedwithmask)
	///
	/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In
	///
	///
	/// - Parameters:
	///   - maskImage: A masking image.
	///   - cubeDimension: The dimension of the color cubes.(2...64 iOS; 2...128 macOS)
	///   - cube0Data: Data containing a 3-dimensional color table of floating-point premultiplied RGBA values. The cells are organized in a standard ordering. The columns and rows of the data are indexed by red and green, respectively. Each data plane is followed by the next higher plane in the data, with planes indexed by blue.
	///   - cube1Data: Data containing a 3-dimensional color table of floating-point premultiplied RGBA values. The cells are organized in a standard ordering. The columns and rows of the data are indexed by red and green, respectively. Each data plane is followed by the next higher plane in the data, with planes indexed by blue.
	///   - colorSpace: The CGColorSpace that defines the RGB values in the color table.
	///   - extrapolate: If true, then the color cube will be extrapolated if the input image contains RGB component values outside the range 0 to 1.
	///   - active: should this filter be applied
	/// - Returns: processed new `CIImage`, or identity if `active` is false
	@available(iOS 16, macOS 13.0, *)
	func colorCubesMixedWithMask(maskImage: CIImage,
								 cubeDimension: Int,
								 cube0Data: Data,
								 cube1Data: Data,
								 colorSpace: CGColorSpace,
								 extrapolate: Bool = false,
								 active: Bool = true) -> CIImage {
		guard active else { return self }

		let filter = CIFilter.colorCubesMixedWithMask() // CIColorCubesMixedWithMask
		filter.inputImage = self
		filter.maskImage = maskImage
		filter.cubeDimension = Float(cubeDimension)
		filter.cube0Data = cube0Data
		filter.cube1Data = cube1Data
		filter.colorSpace = colorSpace
		filter.extrapolate = extrapolate
		return filter.outputImage ?? CIImage.empty()
	}

	/// Color Curves
	///
	/// Uses a three-channel one-dimensional color table to transform the source image pixels.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228290-colorcurves)
	///
	/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, Built-In
	///
	///
	/// - Parameters:
	///   - curvesData: Data containing a color table of floating-point RGB values.
	///   - curvesDomain: A two-element vector that defines the minimum and maximum RGB component values that are used to look up result values from the color table.
	///   - colorSpace: The CGColorSpace that defines the RGB values in the color table.
	///   - active: should this filter be applied
	/// - Returns: processed new `CIImage`, or identity if `active` is false
	@available(iOS 13, macOS 10.15, *)
	func colorCurves(curvesData: Data, curvesDomain: CIVector, colorSpace: CGColorSpace, active: Bool = true) -> CIImage {
		guard active else { return self }

		let filter = CIFilter.colorCurves() // CIColorCurves
		filter.inputImage = self
		filter.curvesData = curvesData
		filter.curvesDomain = curvesDomain
		filter.colorSpace = colorSpace
		return filter.outputImage ?? CIImage.empty()
	}

	/// Color Dodge Blend Mode
	///
	/// Brightens the background image samples to reflect the source image samples. Source image sample values that specify black do not produce a change.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228291-colordodgeblendmode)
	/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIColorDodgeBlendMode)
	///
	/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, Built-In
	///
	///
	/// - Parameters:
	///   - backgroundImage: The image to use as a background image.
	/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil
	@available(iOS 13, macOS 10.15, *)
	func colorDodgeBlendMode(backgroundImage: CIImage?) -> CIImage {
		guard let backgroundImage else { return self }

		let filter = CIFilter.colorDodgeBlendMode() // CIColorDodgeBlendMode
		filter.inputImage = self
		filter.backgroundImage = backgroundImage
		return filter.outputImage ?? CIImage.empty()
	}

	/// Color Invert
	///
	/// Inverts the colors in an image.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228292-colorinvert)
	/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIColorInvert)
	///
	/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In
	///
	///
	/// - Parameters:
	///   - active: should this filter be applied
	/// - Returns: processed new `CIImage`, or identity if `active` is false
	@available(iOS 13, macOS 10.15, *)
	func colorInvert(active: Bool = true) -> CIImage {
		guard active else { return self }

		let filter = CIFilter.colorInvert() // CIColorInvert
		filter.inputImage = self

		return filter.outputImage ?? CIImage.empty()
	}

	/// Color Map
	///
	/// Performs a nonlinear transformation of source color values using mapping values provided in a table.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228293-colormap)
	/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIColorMap)
	///
	/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, Built-In
	///
	///
	/// - Parameters:
	///   - gradientImage: The image data from this image transforms the source image values.
	///   - active: should this filter be applied
	/// - Returns: processed new `CIImage`, or identity if `active` is false
	@available(iOS 13, macOS 10.15, *)
	func colorMap(gradientImage: CIImage, active: Bool = true) -> CIImage {
		guard active else { return self }

		let filter = CIFilter.colorMap() // CIColorMap
		filter.inputImage = self
		filter.gradientImage = gradientImage
		return filter.outputImage ?? CIImage.empty()
	}

	/// Color Matrix
	///
	/// Multiplies source color values and adds a bias factor to each color component.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228294-colormatrix)
	/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIColorMatrix)
	///
	/// Categories: Color Adjustment, Video, Still Image, Interlaced, Non-Square Pixels, High Dynamic Range, Built-In
	///
	///
	/// - Parameters:
	///   - rVector: The amount of red to multiply the source color values by.
	///   - gVector: The amount of green to multiply the source color values by.
	///   - bVector: The amount of blue to multiply the source color values by.
	///   - aVector: The amount of alpha to multiply the source color values by.
	///   - biasVector: A vector that’s added to each color component.
	///   - active: should this filter be applied
	/// - Returns: processed new `CIImage`, or identity if `active` is false
	@available(iOS 13, macOS 10.15, *)
	func colorMatrix(rVector: CIVector,
					 gVector: CIVector,
					 bVector: CIVector,
					 aVector: CIVector,
					 biasVector: CIVector,
					 active: Bool = true) -> CIImage {
		guard active else { return self }

		let filter = CIFilter.colorMatrix() // CIColorMatrix
		filter.inputImage = self
		filter.rVector = rVector
		filter.gVector = gVector
		filter.bVector = bVector
		filter.aVector = aVector
		filter.biasVector = biasVector
		return filter.outputImage ?? CIImage.empty()
	}

	/// Color Monochrome
	///
	/// Remaps colors so they fall within shades of a single color.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228295-colormonochrome)
	/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIColorMonochrome)
	///
	/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In
	///
	///
	/// - Parameters:
	///   - color: The monochrome color to apply to the image.
	///   - intensity: The intensity of the monochrome effect. A value of 1.0 creates a monochrome image using the supplied color. A value of 0.0 has no effect on the image. (0...)
	/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
	@available(iOS 13, macOS 10.15, *)
	func colorMonochrome(color: CIColor, intensity: Float = 1) -> CIImage {
		guard intensity != 0 else { return self }

		let filter = CIFilter.colorMonochrome() // CIColorMonochrome
		filter.inputImage = self
		filter.color = color
		filter.intensity = intensity
		return filter.outputImage ?? CIImage.empty()
	}

	/// Color Polynomial
	///
	/// Modifies the pixel values in an image by applying a set of cubic polynomials.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228296-colorpolynomial)
	/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIColorPolynomial)
	///
	/// Categories: Color Adjustment, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In
	///
	///
	/// - Parameters:
	///   - redCoefficients: Polynomial coefficients for red channel.
	///   - greenCoefficients: Polynomial coefficients for green channel.
	///   - blueCoefficients: Polynomial coefficients for blue channel.
	///   - alphaCoefficients: Polynomial coefficients for alpha channel.
	///   - active: should this filter be applied
	/// - Returns: processed new `CIImage`, or identity if `active` is false
	@available(iOS 13, macOS 10.15, *)
	func colorPolynomial(redCoefficients: CIVector,
						 greenCoefficients: CIVector,
						 blueCoefficients: CIVector,
						 alphaCoefficients: CIVector,
						 active: Bool = true) -> CIImage {
		guard active else { return self }

		let filter = CIFilter.colorPolynomial() // CIColorPolynomial
		filter.inputImage = self
		filter.redCoefficients = redCoefficients
		filter.greenCoefficients = greenCoefficients
		filter.blueCoefficients = blueCoefficients
		filter.alphaCoefficients = alphaCoefficients
		return filter.outputImage ?? CIImage.empty()
	}

	/// Color Posterize
	///
	/// Remaps red, green, and blue color components to the number of brightness values you specify for each color component. This filter flattens colors to achieve a look similar to that of a silk-screened poster.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228297-colorposterize)
	/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIColorPosterize)
	///
	/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In
	///
	///
	/// - Parameters:
	///   - levels: The number of brightness levels to use for each color component. Lower values result in a more extreme poster effect. (1...)
	/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
	@available(iOS 13, macOS 10.15, *)
	func colorPosterize(levels: Float = 6) -> CIImage {
		guard levels != 300 else { return self }

		let filter = CIFilter.colorPosterize() // CIColorPosterize
		filter.inputImage = self
		filter.levels = levels
		return filter.outputImage ?? CIImage.empty()
	}

	/// Color Threshold
	///
	/// Produces a binarized image from an image and a threshold value. The red, green and blue channels of the resulting image will be one if its value is greater than the threshold and zero otherwise.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3547120-colorthreshold)
	///
	/// Categories: Color Adjustment, Video, Interlaced, Non-Square Pixels, Still Image, Built-In
	///
	///
	/// - Parameters:
	///   - threshold: The threshold value that governs if the RGB channels of the resulting image will be zero or one.
	///   - active: should this filter be applied
	/// - Returns: processed new `CIImage`, or identity if `active` is false
	@available(iOS 14, macOS 11.0, *)
	func colorThreshold(threshold: Float = 0.5, active: Bool = true) -> CIImage {
		guard active else { return self }

		let filter = CIFilter.colorThreshold() // CIColorThreshold
		filter.inputImage = self
		filter.threshold = threshold
		return filter.outputImage ?? CIImage.empty()
	}

	/// Color Threshold Otsu
	///
	/// Produces a binarized image from an image with finite extent. The threshold is calculated from the image histogram using Otsu’s method. The red, green and blue channels of the resulting image will be one if its value is greater than the threshold and zero otherwise.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401855-colorthresholdotsu)
	///
	/// Categories: Color Adjustment, Video, Interlaced, Non-Square Pixels, Still Image, Built-In
	///
	///
	/// - Parameters:
	///   - active: should this filter be applied
	/// - Returns: processed new `CIImage`, or identity if `active` is false
	@available(iOS 14, macOS 11.0, *)
	func colorThresholdOtsu(active: Bool = true) -> CIImage {
		guard active else { return self }

		let filter = CIFilter.colorThresholdOtsu() // CIColorThresholdOtsu
		filter.inputImage = self

		return filter.outputImage ?? CIImage.empty()
	}

	/// Column Average
	///
	/// Calculates the average color for each column of the specified area in an image, returning the result in a 1D image.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3547121-columnaverage)
	/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIColumnAverage)
	///
	/// Categories: Reduction, Video, Still Image, High Dynamic Range, Built-In
	///
	///
	/// - Parameters:
	///   - extent: A rectangle that specifies the subregion of the image that you want to process.
	///   - active: should this filter be applied
	/// - Returns: a 1-pixel high image that contains the average color for each scan column, or identity if `active` is false
	@available(iOS 14, macOS 11.0, *)
	func columnAverage(extent: CGRect, active: Bool = true) -> CIImage {
		guard active else { return self }

		let filter = CIFilter.columnAverage() // CIColumnAverage
		filter.inputImage = self
		filter.extent = extent
		return filter.outputImage ?? CIImage.empty()
	}

	/// Comic Effect
	///
	/// Simulates a comic book drawing by outlining edges and applying a color halftone effect.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228298-comiceffect)
	/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIComicEffect)
	///
	/// Categories: Stylize, Video, Still Image, Built-In
	///
	///
	/// - Parameters:
	///   - active: should this filter be applied
	/// - Returns: processed new `CIImage`, or identity if `active` is false
	@available(iOS 13, macOS 10.15, *)
	func comicEffect(active: Bool = true) -> CIImage {
		guard active else { return self }

		let filter = CIFilter.comicEffect() // CIComicEffect
		filter.inputImage = self

		return filter.outputImage ?? CIImage.empty()
	}

	/// Convert Lab to RGB
	///
	/// Converts an image from La*b* color space to the Core Image RGB working space.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401856-convertlabtorgb)
	///
	/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In
	///
	///
	/// - Parameters:
	///   - normalize: If normalize is false then the L channel is in the range 0 to 100 and the a*b* channels are in the range -128 to 128. If normalize is true then the La*b* channels are in the range 0 to 1.
	///   - active: should this filter be applied
	/// - Returns: processed new `CIImage`, or identity if `active` is false
	@available(iOS 16, macOS 13.0, *)
	func convertLabToRGB(normalize: Bool = false, active: Bool = true) -> CIImage {
		guard active else { return self }

		let filter = CIFilter.convertLabToRGB() // CIConvertLabToRGB
		filter.inputImage = self
		filter.normalize = normalize
		return filter.outputImage ?? CIImage.empty()
	}

	/// Convert RGB to Lab
	///
	/// Converts an image from the Core Image RGB working space to La*b* color space.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401857-convertrgbtolab)
	///
	/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In
	///
	///
	/// - Parameters:
	///   - normalize: If normalize is false then the L channel is in the range 0 to 100 and the a*b* channels are in the range -128 to 128. If normalize is true then the La*b* channels are in the range 0 to 1.
	///   - active: should this filter be applied
	/// - Returns: processed new `CIImage`, or identity if `active` is false
	@available(iOS 16, macOS 13.0, *)
	func convertRGBtoLab(normalize: Bool = false, active: Bool = true) -> CIImage {
		guard active else { return self }

		let filter = CIFilter.convertRGBtoLab() // CIConvertRGBtoLab
		filter.inputImage = self
		filter.normalize = normalize
		return filter.outputImage ?? CIImage.empty()
	}

	/// 3 by 3 Convolution
	///
	/// Modifies pixel values by performing a 3x3 matrix convolution.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228299-convolution3x3)
	/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIConvolution3X3)
	///
	/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In
	///
	///
	/// - Parameters:
	///   - weights: A vector containing the 9 weights of the convolution kernel.
	///   - bias: A value that is added to the RGBA components of the output pixel.
	/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
	@available(iOS 13, macOS 10.15, *)
	func convolution3X3(weights: CIVector, bias: Float = 0) -> CIImage {
		guard bias != 0 else { return self }

		let filter = CIFilter.convolution3X3() // CIConvolution3X3
		filter.inputImage = self
		filter.weights = weights
		filter.bias = bias
		return filter.outputImage ?? CIImage.empty()
	}

	/// 5 by 5 Convolution
	///
	/// Modifies pixel values by performing a 5x5 matrix convolution.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228300-convolution5x5)
	/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIConvolution5X5)
	///
	/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In
	///
	///
	/// - Parameters:
	///   - weights: A vector containing the 25 weights of the convolution kernel.
	///   - bias: A value that is added to the RGBA components of the output pixel.
	/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
	@available(iOS 13, macOS 10.15, *)
	func convolution5X5(weights: CIVector, bias: Float = 0) -> CIImage {
		guard bias != 0 else { return self }

		let filter = CIFilter.convolution5X5() // CIConvolution5X5
		filter.inputImage = self
		filter.weights = weights
		filter.bias = bias
		return filter.outputImage ?? CIImage.empty()
	}

	/// 7 by 7 Convolution
	///
	/// Modifies pixel values by performing a 7x7 matrix convolution.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228301-convolution7x7)
	/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIConvolution7X7)
	///
	/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In
	///
	///
	/// - Parameters:
	///   - weights: A vector containing the 49 weights of the convolution kernel.
	///   - bias: A value that is added to the RGBA components of the output pixel.
	/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
	@available(iOS 13, macOS 10.15, *)
	func convolution7X7(weights: CIVector, bias: Float = 0) -> CIImage {
		guard bias != 0 else { return self }

		let filter = CIFilter.convolution7X7() // CIConvolution7X7
		filter.inputImage = self
		filter.weights = weights
		filter.bias = bias
		return filter.outputImage ?? CIImage.empty()
	}

	/// Horizontal 9 Convolution
	///
	/// Modifies pixel values by performing a 9-element horizontal convolution.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228302-convolution9horizontal)
	/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIConvolution9Horizontal)
	///
	/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In
	///
	///
	/// - Parameters:
	///   - weights: A vector containing the 9 weights of the convolution kernel.
	///   - bias: A value that is added to the RGBA components of the output pixel.
	/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
	@available(iOS 13, macOS 10.15, *)
	func convolution9Horizontal(weights: CIVector, bias: Float = 0) -> CIImage {
		guard bias != 0 else { return self }

		let filter = CIFilter.convolution9Horizontal() // CIConvolution9Horizontal
		filter.inputImage = self
		filter.weights = weights
		filter.bias = bias
		return filter.outputImage ?? CIImage.empty()
	}

	/// Vertical 9 Convolution
	///
	/// Modifies pixel values by performing a 9-element vertical convolution.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228303-convolution9vertical)
	/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIConvolution9Vertical)
	///
	/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In
	///
	///
	/// - Parameters:
	///   - weights: A vector containing the 9 weights of the convolution kernel.
	///   - bias: A value that is added to the RGBA components of the output pixel.
	/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
	@available(iOS 13, macOS 10.15, *)
	func convolution9Vertical(weights: CIVector, bias: Float = 0) -> CIImage {
		guard bias != 0 else { return self }

		let filter = CIFilter.convolution9Vertical() // CIConvolution9Vertical
		filter.inputImage = self
		filter.weights = weights
		filter.bias = bias
		return filter.outputImage ?? CIImage.empty()
	}

	/// 3 by 3 RGB Convolution
	///
	/// Convolution of RGB channels with 3 by 3 matrix.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401858-convolutionrgb3x3)
	///
	/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In
	///
	///
	/// - Parameters:
	///   - weights: A vector containing the 9 weights of the convolution kernel.
	///   - bias: A value that is added to the RGB components of the output pixel.
	/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
	@available(iOS 15, macOS 12.0, *)
	func convolutionRGB3X3(weights: CIVector, bias: Float = 0) -> CIImage {
		guard bias != 0 else { return self }

		let filter = CIFilter.convolutionRGB3X3() // CIConvolutionRGB3X3
		filter.inputImage = self
		filter.weights = weights
		filter.bias = bias
		return filter.outputImage ?? CIImage.empty()
	}

	/// 5 by 5 RGB Convolution
	///
	/// Convolution of RGB channels with 5 by 5 matrix.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401859-convolutionrgb5x5)
	///
	/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In
	///
	///
	/// - Parameters:
	///   - weights: A vector containing the 25 weights of the convolution kernel.
	///   - bias: A value that is added to the RGB components of the output pixel.
	/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
	@available(iOS 15, macOS 12.0, *)
	func convolutionRGB5X5(weights: CIVector, bias: Float = 0) -> CIImage {
		guard bias != 0 else { return self }

		let filter = CIFilter.convolutionRGB5X5() // CIConvolutionRGB5X5
		filter.inputImage = self
		filter.weights = weights
		filter.bias = bias
		return filter.outputImage ?? CIImage.empty()
	}

	/// 7 by 7 RGB Convolution
	///
	/// Convolution of RGB channels with 7 by 7 matrix.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401860-convolutionrgb7x7)
	///
	/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In
	///
	///
	/// - Parameters:
	///   - weights: A vector containing the 49 weights of the convolution kernel.
	///   - bias: A value that is added to the RGB components of the output pixel.
	/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
	@available(iOS 15, macOS 12.0, *)
	func convolutionRGB7X7(weights: CIVector, bias: Float = 0) -> CIImage {
		guard bias != 0 else { return self }

		let filter = CIFilter.convolutionRGB7X7() // CIConvolutionRGB7X7
		filter.inputImage = self
		filter.weights = weights
		filter.bias = bias
		return filter.outputImage ?? CIImage.empty()
	}

	/// Horizontal 9 RGB Convolution
	///
	/// Horizontal Convolution of RGB channels with 9 values.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401861-convolutionrgb9horizontal)
	///
	/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In
	///
	///
	/// - Parameters:
	///   - weights: A vector containing the 9 weights of the convolution kernel.
	///   - bias: A value that is added to the RGB components of the output pixel.
	/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
	@available(iOS 15, macOS 12.0, *)
	func convolutionRGB9Horizontal(weights: CIVector, bias: Float = 0) -> CIImage {
		guard bias != 0 else { return self }

		let filter = CIFilter.convolutionRGB9Horizontal() // CIConvolutionRGB9Horizontal
		filter.inputImage = self
		filter.weights = weights
		filter.bias = bias
		return filter.outputImage ?? CIImage.empty()
	}

	/// Vertical 9 RGB Convolution
	///
	/// Vertical Convolution of RGB channels with 9 values.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401862-convolutionrgb9vertical)
	///
	/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In
	///
	///
	/// - Parameters:
	///   - weights: A vector containing the 9 weights of the convolution kernel.
	///   - bias: A value that is added to the RGB components of the output pixel.
	/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
	@available(iOS 15, macOS 12.0, *)
	func convolutionRGB9Vertical(weights: CIVector, bias: Float = 0) -> CIImage {
		guard bias != 0 else { return self }

		let filter = CIFilter.convolutionRGB9Vertical() // CIConvolutionRGB9Vertical
		filter.inputImage = self
		filter.weights = weights
		filter.bias = bias
		return filter.outputImage ?? CIImage.empty()
	}

	/// Copy Machine
	///
	/// Transitions from one image to another by simulating the effect of a copy machine.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228304-copymachinetransition)
	/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CICopyMachineTransition)
	///
	/// Categories: Transition, Video, Still Image, Built-In, High Dynamic Range
	///
	///
	/// - Parameters:
	///   - targetImage: The target image for a transition.
	///   - extent: A rectangle that defines the extent of the effect.
	///   - color: The color of the copier light.
	///   - time: The parametric time of the transition. This value drives the transition from start (at time 0) to end (at time 1). (0...1)
	///   - angle: The angle in radians of the copier light. (0...)
	///   - width: The width of the copier light.  (0.1...)
	///   - opacity: The opacity of the copier light. A value of 0.0 is transparent. A value of 1.0 is opaque. (0...)
	/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
	@available(iOS 13, macOS 10.15, *)
	func copyMachineTransition(targetImage: CIImage,
							   extent: CGRect,
							   color: CIColor,
							   time: Float,
							   angle: Float = 0,
							   width: Float,
							   opacity: Float = 1.3) -> CIImage {
		guard angle != 0 || opacity != 1.3 else { return self }

		let filter = CIFilter.copyMachineTransition() // CICopyMachineTransition
		filter.inputImage = self
		filter.targetImage = targetImage
		filter.extent = extent
		filter.color = color
		filter.time = time
		filter.angle = angle
		filter.width = width
		filter.opacity = opacity
		return filter.outputImage ?? CIImage.empty()
	}

	/// CoreML Model Filter
	///
	/// Generates output image by applying input CoreML model to the input image.
	///
	/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228305-coremlmodel)
	/// [WWDC Video](https://developer.apple.com/videos/play/wwdc2018-719/?time=2378)
	///
	/// Categories: Still Image, Built-In, Stylize
	///
	///
	/// - Parameters:
	///   - model: The CoreML model to be used for applying effect on the image.
	///   - headIndex: A number to specify which output of a multi-head CoreML model should be used for applying effect on the image. (0...10)
	///   - softmaxNormalization: A boolean value to specify that Softmax normalization should be applied to the output of the model.
	///   - active: should this filter be applied
	/// - Returns: processed new `CIImage`, or identity if `active` is false
	@available(iOS 13, macOS 10.15, *)
	func coreMLModel(model: MLModel, headIndex: Int, softmaxNormalization: Bool = false, active: Bool = true) -> CIImage {
		guard active else { return self }

		let filter = CIFilter.coreMLModel() // CICoreMLMode
Download .txt
gitextract_syle2tou/

├── .gitignore
├── .swiftpm/
│   └── xcode/
│       └── package.xcworkspace/
│           └── xcshareddata/
│               └── IDEWorkspaceChecks.plist
├── GenerateSwiftUICoreImage/
│   ├── GenerateSwiftUICoreImage/
│   │   ├── ContentView.swift
│   │   ├── GenerateSwiftUICoreImage.entitlements
│   │   └── GenerateSwiftUICoreImageApp.swift
│   └── GenerateSwiftUICoreImage.xcodeproj/
│       ├── project.pbxproj
│       └── project.xcworkspace/
│           ├── contents.xcworkspacedata
│           └── xcshareddata/
│               └── IDEWorkspaceChecks.plist
├── Generator/
│   ├── CIImage-Generation.swift
│   ├── FunctionMinima.json
│   ├── MissingParameterDocumentation.json
│   ├── abstracts.json
│   └── docLookup.json
├── LICENSE.txt
├── Package.swift
├── README.md
└── Sources/
    ├── CIImage-Extensions.swift
    ├── CIImage-Filters.swift
    └── Image-Extensions.swift
Condensed preview — 19 files, each showing path, character count, and a content snippet. Download the .json file or copy for the full structured content (424K chars).
[
  {
    "path": ".gitignore",
    "chars": 165,
    "preview": ".DS_Store\n/.build\n/Packages\n/*.xcodeproj\nxcuserdata/\nDerivedData/\n.swiftpm/config/registries.json\n.swiftpm/xcode/package"
  },
  {
    "path": ".swiftpm/xcode/package.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist",
    "chars": 238,
    "preview": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/P"
  },
  {
    "path": "GenerateSwiftUICoreImage/GenerateSwiftUICoreImage/ContentView.swift",
    "chars": 406,
    "preview": "//\n//  ContentView.swift\n//  GenerateSwiftUICoreImage\n//\n//  Created by Dan Wood on 6/25/24.\n//\n\nimport SwiftUI\n\nstruct "
  },
  {
    "path": "GenerateSwiftUICoreImage/GenerateSwiftUICoreImage/GenerateSwiftUICoreImage.entitlements",
    "chars": 322,
    "preview": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/P"
  },
  {
    "path": "GenerateSwiftUICoreImage/GenerateSwiftUICoreImage/GenerateSwiftUICoreImageApp.swift",
    "chars": 334,
    "preview": "//\n//  GenerateSwiftUICoreImageApp.swift\n//  GenerateSwiftUICoreImage\n//\n//  Created by Dan Wood on 6/25/24.\n//\n\nimport "
  },
  {
    "path": "GenerateSwiftUICoreImage/GenerateSwiftUICoreImage.xcodeproj/project.pbxproj",
    "chars": 18735,
    "preview": "// !$*UTF8*$!\n{\n\tarchiveVersion = 1;\n\tclasses = {\n\t};\n\tobjectVersion = 56;\n\tobjects = {\n\n/* Begin PBXBuildFile section *"
  },
  {
    "path": "GenerateSwiftUICoreImage/GenerateSwiftUICoreImage.xcodeproj/project.xcworkspace/contents.xcworkspacedata",
    "chars": 135,
    "preview": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<Workspace\n   version = \"1.0\">\n   <FileRef\n      location = \"self:\">\n   </FileRef"
  },
  {
    "path": "GenerateSwiftUICoreImage/GenerateSwiftUICoreImage.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist",
    "chars": 238,
    "preview": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/P"
  },
  {
    "path": "Generator/CIImage-Generation.swift",
    "chars": 43595,
    "preview": "//\n//  CIImage-Generation.swift\n//  SwiftUI Core Image\n//\n//  Created by Dan Wood on 4/27/23.\n//\n// When executed, this "
  },
  {
    "path": "Generator/FunctionMinima.json",
    "chars": 1395,
    "preview": "{\n\t\"areaAverage\": \"11.0\",\n\t\"areaHistogram\": \"11.0\",\n\t\"areaMaximum\": \"11.0\",\n\t\"areaMaximumAlpha\": \"11.0\",\n\t\"areaMinimum\":"
  },
  {
    "path": "Generator/MissingParameterDocumentation.json",
    "chars": 3289,
    "preview": "{\n\t\"CIBokehBlur\" : {\n\t\t\"softness\" : \"The softness of the bokeh effect\"\n\t},\n\t\"CIDepthBlurEffect\" : {\n\t\t\"_NOTE\" : \"THIS WA"
  },
  {
    "path": "Generator/abstracts.json",
    "chars": 18461,
    "preview": "{\n\"CIAccordionFoldTransition\": \"Transitions from one image to another of differing dimensions by unfolding and crossfadi"
  },
  {
    "path": "Generator/docLookup.json",
    "chars": 10905,
    "preview": "{\n\"accordionFoldTransition\": \"3228263-accordionfoldtransition\",\n\"additionCompositing\": \"3228264-additioncompositing\",\n\"a"
  },
  {
    "path": "LICENSE.txt",
    "chars": 1064,
    "preview": "MIT License\n\nCopyright (c) 2023 Dan Wood\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\no"
  },
  {
    "path": "Package.swift",
    "chars": 1042,
    "preview": "// swift-tools-version: 5.7\n// The swift-tools-version declares the minimum version of Swift required to build this pack"
  },
  {
    "path": "README.md",
    "chars": 10251,
    "preview": "# SwiftUICoreImage\n\nHelp for using Core Image within the context of SwiftUI. Also useful even without SwiftUI.\n\n## Intro"
  },
  {
    "path": "Sources/CIImage-Extensions.swift",
    "chars": 7057,
    "preview": "//\n//  CIImage-Extensions.swift\n//  SwiftUI Core Image\n//\n//  Created by Dan Wood on 5/9/23.\n//\n\nimport SwiftUI\nimport C"
  },
  {
    "path": "Sources/CIImage-Filters.swift",
    "chars": 278756,
    "preview": "//\n// Automatically generated by CIImage-Generation.swift - do not edit\n//\n\nimport Foundation\nimport CoreImage\nimport Co"
  },
  {
    "path": "Sources/Image-Extensions.swift",
    "chars": 1185,
    "preview": "//\n//  Image-Extensions.swift\n//  SwiftUI Core Image\n//\n//  Created by Dan Wood on 5/9/23.\n//\n\nimport Foundation\nimport "
  }
]

About this extraction

This page contains the full source code of the danwood/SwiftUICoreImage GitHub repository, extracted and formatted as plain text for AI agents and large language models (LLMs). The extraction includes 19 files (388.3 KB), approximately 104.1k tokens. Use this with OpenClaw, Claude, ChatGPT, Cursor, Windsurf, or any other AI tool that accepts text input. You can copy the full output to your clipboard or download it as a .txt file.

Extracted by GitExtract — free GitHub repo to text converter for AI. Built by Nikandr Surkov.

Copied to clipboard!