Repository: danwood/SwiftUICoreImage
Branch: main
Commit: 9b1b38ad99ff
Files: 19
Total size: 388.3 KB
Directory structure:
gitextract_syle2tou/
├── .gitignore
├── .swiftpm/
│ └── xcode/
│ └── package.xcworkspace/
│ └── xcshareddata/
│ └── IDEWorkspaceChecks.plist
├── GenerateSwiftUICoreImage/
│ ├── GenerateSwiftUICoreImage/
│ │ ├── ContentView.swift
│ │ ├── GenerateSwiftUICoreImage.entitlements
│ │ └── GenerateSwiftUICoreImageApp.swift
│ └── GenerateSwiftUICoreImage.xcodeproj/
│ ├── project.pbxproj
│ └── project.xcworkspace/
│ ├── contents.xcworkspacedata
│ └── xcshareddata/
│ └── IDEWorkspaceChecks.plist
├── Generator/
│ ├── CIImage-Generation.swift
│ ├── FunctionMinima.json
│ ├── MissingParameterDocumentation.json
│ ├── abstracts.json
│ └── docLookup.json
├── LICENSE.txt
├── Package.swift
├── README.md
└── Sources/
├── CIImage-Extensions.swift
├── CIImage-Filters.swift
└── Image-Extensions.swift
================================================
FILE CONTENTS
================================================
================================================
FILE: .gitignore
================================================
.DS_Store
/.build
/Packages
/*.xcodeproj
xcuserdata/
DerivedData/
.swiftpm/config/registries.json
.swiftpm/xcode/package.xcworkspace/contents.xcworkspacedata
.netrc
================================================
FILE: .swiftpm/xcode/package.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist
================================================
IDEDidComputeMac32BitWarning
================================================
FILE: GenerateSwiftUICoreImage/GenerateSwiftUICoreImage/ContentView.swift
================================================
//
// ContentView.swift
// GenerateSwiftUICoreImage
//
// Created by Dan Wood on 6/25/24.
//
import SwiftUI
struct ContentView: View {
var body: some View {
VStack {
Image(systemName: "globe")
.imageScale(.large)
.foregroundStyle(.tint)
Text("See console output")
}
.padding()
}
}
#Preview {
ContentView()
}
================================================
FILE: GenerateSwiftUICoreImage/GenerateSwiftUICoreImage/GenerateSwiftUICoreImage.entitlements
================================================
com.apple.security.app-sandbox
com.apple.security.files.user-selected.read-only
================================================
FILE: GenerateSwiftUICoreImage/GenerateSwiftUICoreImage/GenerateSwiftUICoreImageApp.swift
================================================
//
// GenerateSwiftUICoreImageApp.swift
// GenerateSwiftUICoreImage
//
// Created by Dan Wood on 6/25/24.
//
import SwiftUI
@main
struct GenerateSwiftUICoreImageApp: App {
var body: some Scene {
let _ = dumpFilters()
let _ = dumpUnknownProperties()
WindowGroup {
ContentView()
}
}
}
================================================
FILE: GenerateSwiftUICoreImage/GenerateSwiftUICoreImage.xcodeproj/project.pbxproj
================================================
// !$*UTF8*$!
{
archiveVersion = 1;
classes = {
};
objectVersion = 56;
objects = {
/* Begin PBXBuildFile section */
CE67B0D12C2C82A4003E692B /* docLookup.json in Resources */ = {isa = PBXBuildFile; fileRef = CE67B0D02C2C82A4003E692B /* docLookup.json */; };
CE9A3AC02C2B75090085C241 /* GenerateSwiftUICoreImageApp.swift in Sources */ = {isa = PBXBuildFile; fileRef = CE9A3ABF2C2B75090085C241 /* GenerateSwiftUICoreImageApp.swift */; };
CE9A3AC22C2B75090085C241 /* ContentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = CE9A3AC12C2B75090085C241 /* ContentView.swift */; };
CE9A3AE82C2B75260085C241 /* abstracts.json in Resources */ = {isa = PBXBuildFile; fileRef = CE9A3AD12C2B75260085C241 /* abstracts.json */; };
CE9A3AE92C2B75260085C241 /* CIImage-Generation.swift in Sources */ = {isa = PBXBuildFile; fileRef = CE9A3AD22C2B75260085C241 /* CIImage-Generation.swift */; };
CE9A3AEB2C2B75260085C241 /* FunctionMinima.json in Resources */ = {isa = PBXBuildFile; fileRef = CE9A3AD42C2B75260085C241 /* FunctionMinima.json */; };
CE9A3AEC2C2B75260085C241 /* MissingParameterDocumentation.json in Resources */ = {isa = PBXBuildFile; fileRef = CE9A3AD52C2B75260085C241 /* MissingParameterDocumentation.json */; };
CE9A3AED2C2B75260085C241 /* CIImage-Extensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = CE9A3AD72C2B75260085C241 /* CIImage-Extensions.swift */; };
CE9A3AEE2C2B75260085C241 /* CIImage-Filters.swift in Sources */ = {isa = PBXBuildFile; fileRef = CE9A3AD82C2B75260085C241 /* CIImage-Filters.swift */; };
CE9A3AEF2C2B75260085C241 /* Image-Extensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = CE9A3AD92C2B75260085C241 /* Image-Extensions.swift */; };
/* End PBXBuildFile section */
/* Begin PBXFileReference section */
CE67B0D02C2C82A4003E692B /* docLookup.json */ = {isa = PBXFileReference; lastKnownFileType = text.json; path = docLookup.json; sourceTree = ""; };
CE9A3ABC2C2B75090085C241 /* GenerateSwiftUICoreImage.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = GenerateSwiftUICoreImage.app; sourceTree = BUILT_PRODUCTS_DIR; };
CE9A3ABF2C2B75090085C241 /* GenerateSwiftUICoreImageApp.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = GenerateSwiftUICoreImageApp.swift; sourceTree = ""; };
CE9A3AC12C2B75090085C241 /* ContentView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContentView.swift; sourceTree = ""; };
CE9A3AD12C2B75260085C241 /* abstracts.json */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.json; path = abstracts.json; sourceTree = ""; };
CE9A3AD22C2B75260085C241 /* CIImage-Generation.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "CIImage-Generation.swift"; sourceTree = ""; };
CE9A3AD42C2B75260085C241 /* FunctionMinima.json */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.json; path = FunctionMinima.json; sourceTree = ""; };
CE9A3AD52C2B75260085C241 /* MissingParameterDocumentation.json */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.json; path = MissingParameterDocumentation.json; sourceTree = ""; };
CE9A3AD72C2B75260085C241 /* CIImage-Extensions.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "CIImage-Extensions.swift"; sourceTree = ""; };
CE9A3AD82C2B75260085C241 /* CIImage-Filters.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "CIImage-Filters.swift"; sourceTree = ""; };
CE9A3AD92C2B75260085C241 /* Image-Extensions.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "Image-Extensions.swift"; sourceTree = ""; };
CE9A3AF42C2B7B6A0085C241 /* GenerateSwiftUICoreImage.entitlements */ = {isa = PBXFileReference; lastKnownFileType = text.plist.entitlements; path = GenerateSwiftUICoreImage.entitlements; sourceTree = ""; };
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
CE9A3AB92C2B75090085C241 /* Frameworks */ = {
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXFrameworksBuildPhase section */
/* Begin PBXGroup section */
CE9A3AB32C2B75090085C241 = {
isa = PBXGroup;
children = (
CE9A3ABE2C2B75090085C241 /* GenerateSwiftUICoreImage */,
CE9A3AD62C2B75260085C241 /* Generator */,
CE9A3ADA2C2B75260085C241 /* Sources */,
CE9A3ABD2C2B75090085C241 /* Products */,
);
sourceTree = "";
};
CE9A3ABD2C2B75090085C241 /* Products */ = {
isa = PBXGroup;
children = (
CE9A3ABC2C2B75090085C241 /* GenerateSwiftUICoreImage.app */,
);
name = Products;
sourceTree = "";
};
CE9A3ABE2C2B75090085C241 /* GenerateSwiftUICoreImage */ = {
isa = PBXGroup;
children = (
CE9A3ABF2C2B75090085C241 /* GenerateSwiftUICoreImageApp.swift */,
CE9A3AC12C2B75090085C241 /* ContentView.swift */,
CE9A3AF42C2B7B6A0085C241 /* GenerateSwiftUICoreImage.entitlements */,
);
path = GenerateSwiftUICoreImage;
sourceTree = "";
};
CE9A3AD62C2B75260085C241 /* Generator */ = {
isa = PBXGroup;
children = (
CE67B0D02C2C82A4003E692B /* docLookup.json */,
CE9A3AD12C2B75260085C241 /* abstracts.json */,
CE9A3AD22C2B75260085C241 /* CIImage-Generation.swift */,
CE9A3AD42C2B75260085C241 /* FunctionMinima.json */,
CE9A3AD52C2B75260085C241 /* MissingParameterDocumentation.json */,
);
name = Generator;
path = ../Generator;
sourceTree = "";
};
CE9A3ADA2C2B75260085C241 /* Sources */ = {
isa = PBXGroup;
children = (
CE9A3AD72C2B75260085C241 /* CIImage-Extensions.swift */,
CE9A3AD82C2B75260085C241 /* CIImage-Filters.swift */,
CE9A3AD92C2B75260085C241 /* Image-Extensions.swift */,
);
name = Sources;
path = ../Sources;
sourceTree = "";
};
/* End PBXGroup section */
/* Begin PBXNativeTarget section */
CE9A3ABB2C2B75090085C241 /* GenerateSwiftUICoreImage */ = {
isa = PBXNativeTarget;
buildConfigurationList = CE9A3ACB2C2B750B0085C241 /* Build configuration list for PBXNativeTarget "GenerateSwiftUICoreImage" */;
buildPhases = (
CE9A3AB82C2B75090085C241 /* Sources */,
CE9A3AB92C2B75090085C241 /* Frameworks */,
CE9A3ABA2C2B75090085C241 /* Resources */,
);
buildRules = (
);
dependencies = (
);
name = GenerateSwiftUICoreImage;
productName = GenerateSwiftUICoreImage;
productReference = CE9A3ABC2C2B75090085C241 /* GenerateSwiftUICoreImage.app */;
productType = "com.apple.product-type.application";
};
/* End PBXNativeTarget section */
/* Begin PBXProject section */
CE9A3AB42C2B75090085C241 /* Project object */ = {
isa = PBXProject;
attributes = {
BuildIndependentTargetsInParallel = 1;
LastSwiftUpdateCheck = 1540;
LastUpgradeCheck = 1540;
TargetAttributes = {
CE9A3ABB2C2B75090085C241 = {
CreatedOnToolsVersion = 15.4;
};
};
};
buildConfigurationList = CE9A3AB72C2B75090085C241 /* Build configuration list for PBXProject "GenerateSwiftUICoreImage" */;
compatibilityVersion = "Xcode 14.0";
developmentRegion = en;
hasScannedForEncodings = 0;
knownRegions = (
en,
Base,
);
mainGroup = CE9A3AB32C2B75090085C241;
productRefGroup = CE9A3ABD2C2B75090085C241 /* Products */;
projectDirPath = "";
projectRoot = "";
targets = (
CE9A3ABB2C2B75090085C241 /* GenerateSwiftUICoreImage */,
);
};
/* End PBXProject section */
/* Begin PBXResourcesBuildPhase section */
CE9A3ABA2C2B75090085C241 /* Resources */ = {
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
CE9A3AE82C2B75260085C241 /* abstracts.json in Resources */,
CE9A3AEC2C2B75260085C241 /* MissingParameterDocumentation.json in Resources */,
CE9A3AEB2C2B75260085C241 /* FunctionMinima.json in Resources */,
CE67B0D12C2C82A4003E692B /* docLookup.json in Resources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXResourcesBuildPhase section */
/* Begin PBXSourcesBuildPhase section */
CE9A3AB82C2B75090085C241 /* Sources */ = {
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
CE9A3AE92C2B75260085C241 /* CIImage-Generation.swift in Sources */,
CE9A3AC22C2B75090085C241 /* ContentView.swift in Sources */,
CE9A3AEE2C2B75260085C241 /* CIImage-Filters.swift in Sources */,
CE9A3AEF2C2B75260085C241 /* Image-Extensions.swift in Sources */,
CE9A3AC02C2B75090085C241 /* GenerateSwiftUICoreImageApp.swift in Sources */,
CE9A3AED2C2B75260085C241 /* CIImage-Extensions.swift in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXSourcesBuildPhase section */
/* Begin XCBuildConfiguration section */
CE9A3AC92C2B750B0085C241 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
CLANG_ANALYZER_NONNULL = YES;
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_ENABLE_OBJC_WEAK = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = dwarf;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_TESTABILITY = YES;
ENABLE_USER_SCRIPT_SANDBOXING = YES;
GCC_C_LANGUAGE_STANDARD = gnu17;
GCC_DYNAMIC_NO_PIC = NO;
GCC_NO_COMMON_BLOCKS = YES;
GCC_OPTIMIZATION_LEVEL = 0;
GCC_PREPROCESSOR_DEFINITIONS = (
"DEBUG=1",
"$(inherited)",
);
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;
MTL_FAST_MATH = YES;
ONLY_ACTIVE_ARCH = YES;
SWIFT_ACTIVE_COMPILATION_CONDITIONS = "DEBUG $(inherited)";
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
};
name = Debug;
};
CE9A3ACA2C2B750B0085C241 /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
CLANG_ANALYZER_NONNULL = YES;
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_ENABLE_OBJC_WEAK = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
ENABLE_NS_ASSERTIONS = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_USER_SCRIPT_SANDBOXING = YES;
GCC_C_LANGUAGE_STANDARD = gnu17;
GCC_NO_COMMON_BLOCKS = YES;
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
MTL_ENABLE_DEBUG_INFO = NO;
MTL_FAST_MATH = YES;
SWIFT_COMPILATION_MODE = wholemodule;
};
name = Release;
};
CE9A3ACC2C2B750B0085C241 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
CODE_SIGN_ENTITLEMENTS = GenerateSwiftUICoreImage/GenerateSwiftUICoreImage.entitlements;
"CODE_SIGN_IDENTITY[sdk=macosx*]" = "Apple Development";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
DEVELOPMENT_ASSET_PATHS = "\"GenerateSwiftUICoreImage/Preview Content\"";
DEVELOPMENT_TEAM = 3SP7MRA6P9;
ENABLE_HARDENED_RUNTIME = YES;
ENABLE_PREVIEWS = YES;
GENERATE_INFOPLIST_FILE = YES;
"INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphoneos*]" = YES;
"INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphonesimulator*]" = YES;
"INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents[sdk=iphoneos*]" = YES;
"INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents[sdk=iphonesimulator*]" = YES;
"INFOPLIST_KEY_UILaunchScreen_Generation[sdk=iphoneos*]" = YES;
"INFOPLIST_KEY_UILaunchScreen_Generation[sdk=iphonesimulator*]" = YES;
"INFOPLIST_KEY_UIStatusBarStyle[sdk=iphoneos*]" = UIStatusBarStyleDefault;
"INFOPLIST_KEY_UIStatusBarStyle[sdk=iphonesimulator*]" = UIStatusBarStyleDefault;
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
IPHONEOS_DEPLOYMENT_TARGET = 17.5;
LD_RUNPATH_SEARCH_PATHS = "@executable_path/Frameworks";
"LD_RUNPATH_SEARCH_PATHS[sdk=macosx*]" = "@executable_path/../Frameworks";
MACOSX_DEPLOYMENT_TARGET = 14.5;
MARKETING_VERSION = 1.0;
PRODUCT_BUNDLE_IDENTIFIER = com.gigliwood.GenerateSwiftUICoreImage;
PRODUCT_NAME = "$(TARGET_NAME)";
SDKROOT = auto;
SUPPORTED_PLATFORMS = "iphoneos iphonesimulator macosx";
SWIFT_EMIT_LOC_STRINGS = YES;
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Debug;
};
CE9A3ACD2C2B750B0085C241 /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
CODE_SIGN_ENTITLEMENTS = GenerateSwiftUICoreImage/GenerateSwiftUICoreImage.entitlements;
"CODE_SIGN_IDENTITY[sdk=macosx*]" = "Apple Development";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
DEVELOPMENT_ASSET_PATHS = "\"GenerateSwiftUICoreImage/Preview Content\"";
DEVELOPMENT_TEAM = 3SP7MRA6P9;
ENABLE_HARDENED_RUNTIME = YES;
ENABLE_PREVIEWS = YES;
GENERATE_INFOPLIST_FILE = YES;
"INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphoneos*]" = YES;
"INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphonesimulator*]" = YES;
"INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents[sdk=iphoneos*]" = YES;
"INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents[sdk=iphonesimulator*]" = YES;
"INFOPLIST_KEY_UILaunchScreen_Generation[sdk=iphoneos*]" = YES;
"INFOPLIST_KEY_UILaunchScreen_Generation[sdk=iphonesimulator*]" = YES;
"INFOPLIST_KEY_UIStatusBarStyle[sdk=iphoneos*]" = UIStatusBarStyleDefault;
"INFOPLIST_KEY_UIStatusBarStyle[sdk=iphonesimulator*]" = UIStatusBarStyleDefault;
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
IPHONEOS_DEPLOYMENT_TARGET = 17.5;
LD_RUNPATH_SEARCH_PATHS = "@executable_path/Frameworks";
"LD_RUNPATH_SEARCH_PATHS[sdk=macosx*]" = "@executable_path/../Frameworks";
MACOSX_DEPLOYMENT_TARGET = 14.5;
MARKETING_VERSION = 1.0;
PRODUCT_BUNDLE_IDENTIFIER = com.gigliwood.GenerateSwiftUICoreImage;
PRODUCT_NAME = "$(TARGET_NAME)";
SDKROOT = auto;
SUPPORTED_PLATFORMS = "iphoneos iphonesimulator macosx";
SWIFT_EMIT_LOC_STRINGS = YES;
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Release;
};
/* End XCBuildConfiguration section */
/* Begin XCConfigurationList section */
CE9A3AB72C2B75090085C241 /* Build configuration list for PBXProject "GenerateSwiftUICoreImage" */ = {
isa = XCConfigurationList;
buildConfigurations = (
CE9A3AC92C2B750B0085C241 /* Debug */,
CE9A3ACA2C2B750B0085C241 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
CE9A3ACB2C2B750B0085C241 /* Build configuration list for PBXNativeTarget "GenerateSwiftUICoreImage" */ = {
isa = XCConfigurationList;
buildConfigurations = (
CE9A3ACC2C2B750B0085C241 /* Debug */,
CE9A3ACD2C2B750B0085C241 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
/* End XCConfigurationList section */
};
rootObject = CE9A3AB42C2B75090085C241 /* Project object */;
}
================================================
FILE: GenerateSwiftUICoreImage/GenerateSwiftUICoreImage.xcodeproj/project.xcworkspace/contents.xcworkspacedata
================================================
================================================
FILE: GenerateSwiftUICoreImage/GenerateSwiftUICoreImage.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist
================================================
IDEDidComputeMac32BitWarning
================================================
FILE: Generator/CIImage-Generation.swift
================================================
//
// CIImage-Generation.swift
// SwiftUI Core Image
//
// Created by Dan Wood on 4/27/23.
//
// When executed, this outputs Swift code that can be pasted into the file "CIImage+Generated.swift".
//
// This will run under iOS or macOS and the resulting code is almost the same. Notably in affineClamp and affineTile the default values are not
// the same. Also as noted in the documentation that we generate, the `cubeDimension` parameter has a different range between iOS and macOS.
import Foundation
import CoreImage
import CoreImage.CIFilterBuiltins
#if canImport(UIKit)
private typealias AffineTransform = CGAffineTransform
#elseif canImport(AppKit)
private typealias AffineTransform = NSAffineTransform
#endif
private var unknownProperties: [String: [String: String]] = [:]
func dumpFilters() {
/*
New documentation base found at
https://developer.apple.com/documentation/coreimage
or
https://developer.apple.com/documentation/coreimage/cifilter
15 categories. Open each in tab. Select all, copy, paste into rich text TextEdit doc. Save as HTML.
Copy this source, then in terminal, grep out the lines I want:
pbpaste | grep 'class func' | grep 'any CIFilter ' | sort | uniq > ~/Desktop/AllFunctions.html
(There are a few duplicated functions; gonna not worry about right now)
In BBEdit, remove the stuff before the
From that, in BBEdit, grep replace all lines:
^.+class func ([^>]+)\(\) -> any CIFilter & ([^>]+)
to:
"\2": "\1",
and then…
^.+class func ([^(]+).+? any CIFilter & ([^<]+)
to:
"\2": "\1",
Save as RawLookup.json to Desktop
cat ~/Desktop/RawLookup.json | sort | uniq > ~/Desktop/docLookup.json
Now edit to include { and } and remove last comma
This file lets us know the documentation URL fragment to append to https://developer.apple.com/documentation/coreimage/cifilter/
*/
guard let url = Bundle.main.url(forResource: "docLookup", withExtension: "json"),
let data = try? Data(contentsOf: url),
let json = try? JSONSerialization.jsonObject(with: data, options: []),
let docLookup: [String: String] = json as? [String: String]
else { print("// 🛑 can't load docLookup.json"); return }
/*
Load abstracts for all functions that are documented on the OLD reference page. Still, some of these descriptions are a bit more descriptive than the built-in descriptions.
Possible improvement, scrape the same pages that are used above to generate docLookup.json to get the most up-to-date abstracts from the web.
Start with
https://developer.apple.com/library/archive/documentation/GraphicsImaging/Reference/CoreImageFilterReference/
auto-expand all symbols
get HTML source
in BBEdit change all instances (with Grep) of:
+href="#//apple_ref/doc/filter/ci/([^"]+)"\n +title="([^"]+)">
to:
•"\1": "\2",
Sort, extract lines starting with •
Paste and preserve formatting into abstracts.json; fix the last line.
Look for any little tweaks that may be needed.
*/
guard let url = Bundle.main.url(forResource: "abstracts", withExtension: "json"),
let data = try? Data(contentsOf: url),
let json = try? JSONSerialization.jsonObject(with: data, options: []),
let abstractLookup: [String: String] = json as? [String: String]
else { print("// 🛑 can't load abstracts.json"); return }
/*
A dictionary mapping filters (pretty function names) to override iOS versions when we have noted that the core image functions (or occasionally parameters of them) required newer OSs.
Not sure where we got this originally! We may need to update some of these.
*/
guard let url = Bundle.main.url(forResource: "FunctionMinima", withExtension: "json"),
let data = try? Data(contentsOf: url),
let json = try? JSONSerialization.jsonObject(with: data, options: []),
let functionMinima: [String: String] = json as? [String: String]
else { print("// 🛑 can't load FunctionMinima.json"); return }
/* Generate this list by running the code; it finds inputs missing documentation replacing with "_____TODO_____". Update the MissingParameterDocumentation.json file as this is improved. Documentation can come from whatever sources can be scrapped together; use "_NOTE" key just to notate how we found the information.
*/
guard let url = Bundle.main.url(forResource: "MissingParameterDocumentation", withExtension: "json"),
let data = try? Data(contentsOf: url),
let json = try? JSONSerialization.jsonObject(with: data, options: []),
let forUnknownProperties = json as? [String: [String: String]]
else { print("// 🛑 can't load MissingParameterDocumentation.json"); return }
unknownProperties = forUnknownProperties
let ciFilterList = CIFilter.filterNames(inCategories: nil)
var generators: [String: CIFilter] = [:]
var imageToImage: [String: CIFilter] = [:]
for filterName in ciFilterList {
guard let filter = CIFilter(name: filterName) else { print("// 🛑 can't instantiate \(filterName)"); continue }
if !filter.inputKeys.contains(kCIInputImageKey) {
generators[filterName] = filter
} else if filter.outputKeys.contains(kCIOutputImageKey) {
imageToImage[filterName] = filter
} else {
print("// 🛑 Don't know what to do with \(filterName) - outputKeys = \(filter.outputKeys)")
}
}
print("//")
print("// Automatically generated by CIImage-Generation.swift - do not edit")
print("//")
print("")
print("import Foundation")
print("import CoreImage")
print("import CoreImage.CIFilterBuiltins")
print("import CoreML")
print("import AVFoundation")
print("")
print("public extension CIImage {")
print("")
print("//")
print("// MARK: IMAGE-TO-IMAGE FILTERS")
print("//")
for filterName in imageToImage.keys.sorted() {
guard let filter: CIFilter = imageToImage[filterName] else { continue }
outputImageToImage(filter, abstractLookup: abstractLookup, docLookup: docLookup, functionMinima: functionMinima)
}
print("")
print("//")
print("// MARK: GENERATORS")
print("//")
for filterName in generators.keys.sorted() {
guard let filter: CIFilter = generators[filterName] else { continue }
outputGeneratorFilter(filter, abstractLookup: abstractLookup, docLookup: docLookup, functionMinima: functionMinima)
}
// End of class extension
print("}")
print("\n\n\n\n\n\n\n")
}
// Use this to start collecting properties needing some documentation, to then put into MissingParameterDocumentation.json
func dumpUnknownProperties() {
do {
let theJSONData = try JSONSerialization.data(
withJSONObject: unknownProperties,
options: [.sortedKeys, .prettyPrinted]
)
if let theJSONText = String(data: theJSONData,
encoding: String.Encoding.utf8) {
print("\n\n\n_________________________\n\nDumped properties missing documentation = \n\n\n\(theJSONText)")
} else {
print("Unable to convert data to JSON")
}
}
catch {
print(error)
}
}
private func outputGeneratorFilter(_ filter: CIFilter, abstractLookup: [String: String], docLookup: [String: String], functionMinima: [String: String]) {
let filterName = filter.name
let filtersThatAlreadyHaveInitializer: [String: String] = ["CIConstantColorGenerator": "init(color: CIColor)"]
if let existingFunction: String = filtersThatAlreadyHaveInitializer[filterName] {
print("// ℹ️ \(filterName) already has a CIImage initializer: \(existingFunction)")
return
}
outputDocumentation(filter, isGenerator: true, abstractLookup: abstractLookup, docLookup: docLookup)
outputOSVersion(filter, functionMinima: functionMinima)
outputImageFunction(filter, isGenerator: true)
}
private func outputDocumentation(_ filter: CIFilter, isGenerator: Bool, abstractLookup: [String: String], docLookup: [String: String]) {
let filterName = filter.name
let description: String? = CIFilter.localizedDescription(forFilterName: filterName)
let categories: Array = filter.attributes[kCIAttributeFilterCategories] as? Array ?? []
let filterDisplayName: String = filter.attributes[kCIAttributeFilterDisplayName] as? String ?? ""
let documentationURL: URL? = filter.attributes[kCIAttributeReferenceDocumentation] as? URL
// https://developer.apple.com/documentation/xcode/writing-symbol-documentation-in-your-source-files
print("\n/// \(filterDisplayName)")
print("///")
if let description {
if let abstract = abstractLookup[filterName], !abstract.hasPrefix("Returns "), abstract.count > description.count {
// Replace description with longer abstract scraped from the website, unless it starts with 'Returns ' since we use that for the output.
print("/// \(abstract)")
} else {
print("/// \(description)")
}
print("///")
}
// Convert, for example, CIAccordionFoldTransition to accordionFoldTransition
let functionFilterNameCapitalized = filterName.dropFirst(2)
var functionFilterName = (functionFilterNameCapitalized.first?.lowercased() ?? "") + functionFilterNameCapitalized.dropFirst()
let manualNameLookup = ["CICMYKHalftone": "cmykHalftone", "CIPDF417BarcodeGenerator": "pdf417BarcodeGenerator", "CIQRCodeGenerator": "qrCodeGenerator"]
if let foundManualLookup = manualNameLookup[filterName] {
functionFilterName = foundManualLookup
}
// These are still in beta, so I'm not seeing them on the main category lists. https://developer.apple.com/documentation/coreimage/cifilter
let manualURLLookup = ["CIAreaBoundsRed": "4401847-areaboundsred",
"CIMaximumScaleTransform": "4401870-maximumscaletransform",
"CIToneMapHeadroom": "4401878-tonemapheadroom",
"CIAreaAlphaWeightedHistogram": "4401846-areaalphaweightedhistogram"
]
let newDocURLFragment: String?
if let manualURLFragment = manualURLLookup[filterName] {
newDocURLFragment = manualURLFragment
} else {
newDocURLFragment = docLookup[functionFilterName]
}
if let newDocURLFragment {
print("/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/\(newDocURLFragment))")
} else {
let withoutSuffix = functionFilterName.replacingOccurrences(of: "Filter", with: "", options: [.backwards, .anchored])
if let newDocURLFragment = docLookup[withoutSuffix] {
print("/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/\(newDocURLFragment))")
} else {
print("/// ⚠️ No documentation available for \(filterName)")
}
}
if let documentationURL {
if nil != abstractLookup[filterName] {
let urlFragment: String
#if canImport(UIKit)
urlFragment = "http://developer.apple.com/library/ios"
#elseif canImport(AppKit)
urlFragment = "http://developer.apple.com/library/mac"
#endif
var urlString: String = documentationURL.absoluteString.replacingOccurrences(of: urlFragment,
with: "https://developer.apple.com/library/archive",
options: .anchored)
urlString = urlString.replacingOccurrences(of: "https://developer.apple.com/library/archive/documentation/GraphicsImaging/Reference/CoreImageFilterReference/index.html", with: "https://t.ly/Gyd6")
print("/// [Classic Documentation](\(urlString))")
}
// Special cases for documentation
if filterName == "CIDepthBlurEffect" {
// Some helpful hints since this is otherwise undocumented
print("/// [WWDC Video](https://devstreaming-cdn.apple.com/videos/wwdc/2017/508wdyl5rm2jy9z8/508/508_hd_image_editing_with_depth.mp4)")
print("/// [WWDC Slides](https://devstreaming-cdn.apple.com/videos/wwdc/2017/508wdyl5rm2jy9z8/508/508_image_editing_with_depth.pdf)")
} else if filterName == "CICoreMLModelFilter" {
print("/// [WWDC Video](https://developer.apple.com/videos/play/wwdc2018-719/?time=2378)")
}
print("///")
}
if categories.count == 1, let category = categories.first {
print("/// Category: \(CIFilter.localizedName(forCategory: category))")
print("///")
} else if categories.count > 1 {
let prettyList: String = categories.map { CIFilter.localizedName(forCategory: $0) }.joined(separator: ", ")
print("/// Categories: \(prettyList)")
print("///")
}
print("///")
print("/// - Parameters:")
var adjustedInputKeys = filter.inputKeys.filter { $0 != kCIInputImageKey }
if !isGenerator && filter.identityInputKeys.isEmpty && !filter.inputKeys.contains("inputBackgroundImage") {
adjustedInputKeys.append("active")
}
for inputKey in adjustedInputKeys {
guard inputKey != "active" else {
print("/// - active: should this filter be applied")
continue
}
guard let attributes = filter.attributes[inputKey] as? [String: AnyObject],
let attributeClass = attributes[kCIAttributeClass] as? String
else {
print("/// - \(inputKey): 🛑 couldn't get input attributes")
continue
}
let displayName: String = attributes[kCIAttributeDisplayName] as? String ?? "" // space-separated
let longerInput: String = parameterName(displayName: displayName, filterName: filterName)
var description: String = attributes[kCIAttributeDescription] as? String ?? "[unknown]"
if nil == attributes[kCIAttributeDescription] {
// TEMPORARY CODE TO COLLECT UNKNOWN PROPERTIES
var foundUnknownPropertiesForFilter: [String: String] = unknownProperties[filterName] ?? [:]
if nil == foundUnknownPropertiesForFilter[longerInput] {
foundUnknownPropertiesForFilter[longerInput] = "_____TODO_____"
}
unknownProperties[filterName] = foundUnknownPropertiesForFilter
if let missingParameters: [String: String] = unknownProperties[filterName],
let replacementDocumentation: String = missingParameters[longerInput] {
description = replacementDocumentation
}
}
// Remove rounding information since we are passing in integers directly.
description = description.replacing(" The value will be rounded to the nearest odd integer.", with: "")
description = description.replacing(" Set to nil for automatic.", with: "")
// Fix this weird ObjC style documentation
description = description.replacing("Force a compact style Aztec code to @YES or @NO.",
with: "A Boolean that specifies whether to force a compact style Aztec code.")
description = description.replacing("Force compaction style to @YES or @NO.",
with: "A Boolean value specifying whether to force compaction style.")
print("/// - \(longerInput): \(description)", terminator: "")
// For numbers, show the range on the same line
switch attributeClass {
case "NSNumber":
guard attributes[kCIAttributeType] as? String != kCIAttributeTypeBoolean, longerInput != "extrapolate" else { break }
guard longerInput != "cubeDimension" else {
// Special case. MacOS and iOS report different values so show that here
print("(2...64 iOS; 2...128 macOS)", terminator: "")
break
}
let minimumValue: Float? = (attributes[kCIAttributeMin] as? NSNumber)?.floatValue
let maximumValue: Float? = (attributes[kCIAttributeMax] as? NSNumber)?.floatValue
// Ignore very large maximum value since it's not practical
if let minimumValue, let maximumValue, maximumValue < 0x0800_0000_00000_0000 {
print(" (\(minimumValue.format5)...\(maximumValue.format5))", terminator: "")
} else if let minimumValue {
print(" (\(minimumValue.format5)...)", terminator: "")
} else if let maximumValue, maximumValue < 0x0800_0000_00000_0000 {
print(" (...\(maximumValue.format5))", terminator: "")
}
default:
break
}
print("") // finish up the line
}
if filter.outputKeys.contains(kCIOutputImageKey) {
if isGenerator {
if let abstract: String = abstractLookup[filterName],
let match = abstract.firstMatch(of: /^Generates*\h/) {
let abstractWithoutReturnsPrefix = abstract[match.range.upperBound...]
let sentences = Array(abstractWithoutReturnsPrefix.split(separator: /\./))
let firstSentence = sentences.first ?? abstractWithoutReturnsPrefix
print("/// - Returns: \(firstSentence)")
} else if let description,
let match = description.firstMatch(of: /^Generates*\h/) {
let descriptionWithoutReturnsPrefix = description[match.range.upperBound...]
let sentences = Array(descriptionWithoutReturnsPrefix.split(separator: /\./))
let firstSentence = sentences.first ?? descriptionWithoutReturnsPrefix
print("/// - Returns: \(firstSentence)")
} else {
print("/// - Returns: new `CIImage`")
}
} else {
var returnInfo: String
if var abstract = abstractLookup[filterName], abstract.hasPrefix("Returns ") {
abstract = String(abstract.dropFirst(8))
abstract = abstract.replacingOccurrences(of: ".", with: "", options: [.anchored, .backwards]) // remove any ending period
returnInfo = abstract
} else {
returnInfo = "processed new `CIImage`"
}
if filter.identityInputKeys.isEmpty && filter.inputKeys.contains("inputBackgroundImage") {
// Append info about when active is false
returnInfo += ", or identity if `backgroundImage` is nil"
} else if filter.identityInputKeys.isEmpty {
// Append info about when active is false
returnInfo += ", or identity if `active` is false"
} else {
// Append info about identity parameters
returnInfo += " or identity if parameters result in no operation applied"
// TODO: colorCrossPolynomial broken
}
print("/// - Returns: \(returnInfo)")
}
}
}
private func outputOSVersion(_ filter: CIFilter, functionMinima: [String: String]) {
let filterName = filter.name
var macOSVersion: String? = filter.attributes[kCIAttributeFilterAvailable_Mac] as? String
if nil == Float(macOSVersion ?? "") {
if filterName == "CIHistogramDisplayFilter" {
macOSVersion = "10.9" // repair "10.?" with 10.9 from documentation
}
}
if nil != macOSVersion?.firstMatch(of: /10\.[0-9]+/) && macOSVersion != "10.15" {
macOSVersion = "10.15" // For minimum version of SwiftUI and most filter functions
}
var iOSVersion: String? = filter.attributes[kCIAttributeFilterAvailable_iOS] as? String
if Float(iOSVersion ?? "") ?? 0 < 13 {
iOSVersion = "13" // minimum version for SwiftUI and most filter functions
}
// Override versions of our functions when we have noted that the core image functions (or occasionally parameters of them) required newer OSs
if let functionMinimum = functionMinima[filter.name.prettyFunction] {
macOSVersion = functionMinimum
if let convertedFromMacVersion = ["11.0": "14", "12.0": "15", "13.0": "16"][functionMinimum] {
iOSVersion = convertedFromMacVersion
}
}
if let macOSVersion, let iOSVersion {
print("@available(iOS \(iOSVersion), macOS \(macOSVersion), *)")
}
}
private func outputImageFunctionHeader(_ filter: CIFilter, isGenerator: Bool) {
let filterName: String = filter.name
let filterFunction: String = filterName.prettyFunction
print("\(isGenerator ? "static " : "")func \(filterFunction)(", terminator: "")
var inputParams: [String] = filter.inputKeys
.filter { $0 != kCIInputImageKey }
.map { inputKey in
(inputKey, (filter.attributes[inputKey] as? [String: AnyObject] ?? [:])) } // tuple of the inputKey and its attributes
.compactMap { (inputKey: String, inputAttributes: [String: AnyObject]) in
parameterStatement(inputKey: inputKey, inputAttributes: inputAttributes, filterName: filterName)
}
if !isGenerator && filter.identityInputKeys.isEmpty && !filter.inputKeys.contains("inputBackgroundImage"),
let attributesForActiveParam: [String: AnyObject] = .some([kCIAttributeDisplayName: "Active" as NSString,
kCIAttributeClass: "NSNumber" as NSString,
kCIAttributeType: kCIAttributeTypeBoolean as NSString,
kCIAttributeDefault: true as AnyObject,
kCIAttributeIdentity: true as AnyObject]),
let activeParameterStatement: String = parameterStatement(inputKey: "active", inputAttributes: attributesForActiveParam, filterName: filterName) {
inputParams.append(activeParameterStatement)
}
let inputParamsOnOneLine = inputParams.joined(separator: ", ")
let forceMultiLines: Bool = inputParamsOnOneLine.contains("//")
if inputParamsOnOneLine.count + filterFunction.count >= 100 || forceMultiLines {
print(inputParams.joined(separator: ",\n "), terminator: forceMultiLines ? "\n" : "")
} else {
print(inputParamsOnOneLine, terminator: "")
}
print(") -> CIImage {")
}
private func outputImageDictionaryFunction(_ filter: CIFilter, isGenerator: Bool) {
assert(!isGenerator) // not supported for generators; none known to be needed
let filterName: String = filter.name
outputImageFunctionHeader(filter, isGenerator: isGenerator)
outputIdentityGuards(filter)
print(" // Filter not included in CoreImage.CIFilterBuiltins; using dictionary-based method.")
print(" guard let filter = CIFilter(name: \"\(filter.name)\", parameters: [", terminator: "")
let otherInputSettingStatements: [String] = filter.inputKeys
.filter { $0 != kCIInputImageKey }
.map { inputKey in
(inputKey, (filter.attributes[inputKey] as? [String: AnyObject] ?? [:])) } // tuple of the inputKey and its attributes
.compactMap { (inputKey: String, inputAttributes: [String: AnyObject]) in
guard let displayName: String = inputAttributes[kCIAttributeDisplayName] as? String
else { return nil }
let inputName: String = parameterName(displayName: displayName, filterName: filterName)
return " \"\(inputKey)\": \(inputName),"
}
if !otherInputSettingStatements.isEmpty {
print("\n")
print(otherInputSettingStatements.joined(separator: "\n"))
print(" ", terminator: "")
} else {
print(":", terminator: "")
}
print("]) else { return self }")
print(" return filter.outputImage ?? CIImage.empty()")
print("}")
}
private func outputIdentityGuards(_ filter: CIFilter) {
let filterName: String = filter.name
// doesn't make sense to have an identity function for generators
// Guards for identity/inert values
let identityComparisons: String
if filter.identityInputKeys.isEmpty {
if filter.inputKeys.contains("inputBackgroundImage") {
identityComparisons = "let backgroundImage"
} else {
identityComparisons = "active"
}
} else {
identityComparisons = filter.inputKeys
.filter { $0 != kCIInputImageKey }
.map { inputKey in
(inputKey, (filter.attributes[inputKey] as? [String: AnyObject] ?? [:])) } // tuple of the inputKey and its attributes
.compactMap { (inputKey: String, inputAttributes: [String: AnyObject]) in
guard let displayName: String = inputAttributes[kCIAttributeDisplayName] as? String,
let identityValue: Any = inputAttributes[kCIAttributeIdentity]
else { return nil }
let attributeType: String? = inputAttributes[kCIAttributeType] as? String
let inputName: String = parameterName(displayName: displayName, filterName: filterName)
guard hasReasonableDefaultValue(identityValue, attributeType: attributeType, inputName: inputName)
else { return nil }
let identityValueFormatted: String = formatSmart(identityValue, attributeType: attributeType, inputName: inputName, filterName: filterName)
return "\(inputName) != \(identityValueFormatted)"
}
.joined(separator: " || ")
}
if !identityComparisons.isEmpty {
print(" guard \(identityComparisons) else { return self }")
print("")
}
}
private func outputImageFunction(_ filter: CIFilter, isGenerator: Bool) {
let filterName: String = filter.name
let filterFunction: String = filterName.prettyFunction
outputImageFunctionHeader(filter, isGenerator: isGenerator)
if !isGenerator {
outputIdentityGuards(filter)
}
print(" let filter = CIFilter.\(filterFunction)() // \(filterName)")
if !isGenerator {
print(" filter.inputImage = self")
}
let otherInputSettingStatements: String = filter.inputKeys
.filter { $0 != kCIInputImageKey }
.map { inputKey in
(inputKey, (filter.attributes[inputKey] as? [String: AnyObject] ?? [:])) } // tuple of the inputKey and its attributes
.compactMap { (inputKey: String, inputAttributes: [String: AnyObject]) in
guard let displayName: String = inputAttributes[kCIAttributeDisplayName] as? String
else { return nil }
let inputName: String = parameterName(displayName: displayName, filterName: filterName)
let attributeType: String? = inputAttributes[kCIAttributeType] as? String
// Special case - barcode generators, for some reason, want all their parameters as Float. Let's upgrade it here to keep the API simple.
if nil != filterFunction.firstMatch(of: /(?i)codeGenerator$/),
let className = inputAttributes[kCIAttributeClass] as? String,
let attributeType = inputAttributes[kCIAttributeType] as? String,
className == "NSNumber" {
if attributeType == kCIAttributeTypeBoolean {
return " filter.\(inputName) = Float(\(inputName) ? 1 : 0)"
} else {
return " filter.\(inputName) = Float(\(inputName))"
}
}
// Annoying to have these negative cases, but the instances where
// we need to wrap in a float are much more numerous!
if !(filterFunction == "kMeans" && inputName == "count"), // this function's parameter wants an integer so leave alone
!(filterFunction == "cannyEdgeDetector" && inputName == "hysteresisPasses"),
!(filterFunction == "personSegmentation" && inputName == "qualityLevel"),
attributeType == kCIAttributeTypeInteger || attributeType == kCIAttributeTypeCount {
return " filter.\(inputName) = Float(\(inputName))" // We pass in Int, but function wants a Float
}
// fall through
return " filter.\(inputName) = \(inputName)"
}
.joined(separator: "\n")
print(otherInputSettingStatements)
print(" return filter.outputImage ?? CIImage.empty()")
print("}")
}
private func outputImageToImage(_ filter: CIFilter, abstractLookup: [String: String], docLookup: [String: String], functionMinima: [String: String]) {
let filterName = filter.name
let filtersWithoutSwiftAPI: Set = ["CICameraCalibrationLensCorrection", "CIGuidedFilter"]
let filtersThatAlreadyHaveImageExtension: [String: String] = ["CIAffineTransform": "transformed(by: CGAffineTransform)",
"CICrop": "cropped(to: CGRect)",
"CIClamp": "clamped(to: CGRect)",
"CISampleNearest": "samplingNearest()",
// https://developer.apple.com/documentation/coreimage/ciimage/2867429-samplingnearest
"CIDepthBlurEffect": "depthBlurEffectFilter(for...)"
// https://developer.apple.com/documentation/coreimage/cicontext#4375374
]
let filtersThatAlreadyHaveImageExtensionDoc: [String: String] = ["CISampleNearest": "https://developer.apple.com/documentation/coreimage/ciimage/2867429-samplingnearest",
"CIDepthBlurEffect": "https://developer.apple.com/documentation/coreimage/cicontext#4375374"]
if let existingFunction: String = filtersThatAlreadyHaveImageExtension[filterName] {
print("")
print("// ℹ️ \(filterName) already has a CIImage method: func \(existingFunction) -> CIImage")
if let existingFunctionURL = filtersThatAlreadyHaveImageExtensionDoc[filterName] {
print("// \(existingFunctionURL)")
}
print("")
return
}
outputDocumentation(filter, isGenerator: false, abstractLookup: abstractLookup, docLookup: docLookup)
outputOSVersion(filter, functionMinima: functionMinima)
if filtersWithoutSwiftAPI.contains(filterName) {
outputImageDictionaryFunction(filter, isGenerator: false)
} else {
outputImageFunction(filter, isGenerator: false)
}
}
// convert long name like "Gray Component Replacement" to input name used in CoreImage.CIFilterBuiltins. And fix a bunch of inconsistencies.
private func parameterName(displayName: String, filterName: String) -> String {
let words: [String] = displayName.components(separatedBy: " ").map { $0.capitalized }
let removeSpaces: String = words.joined(separator: "")
var result: String = removeSpaces.prefix(1).lowercased() + removeSpaces.dropFirst()
if result == "texture" {
result = "textureImage"
} else if result == "b" {
result = "parameterB"
} else if result == "c" {
result = "parameterC"
} else if result == "means" {
result = "inputMeans"
} else if result == "redVector" {
result = "rVector"
} else if result == "greenVector" {
result = "gVector"
} else if result == "blueVector" {
result = "bVector"
} else if result == "alphaVector" {
result = "aVector"
} else if result == "maximumStriationRadius" {
result = "maxStriationRadius"
} else if result == "color1" {
result = "color0"
} else if result == "color2" {
result = "color1"
} else if result == "radius1" {
result = "radius0"
} else if result == "radius2" {
result = "radius1"
} else if result == "image2" && filterName == "CIColorAbsoluteDifference" { // only substitute for this function
result = "inputImage2"
} else if result.hasSuffix(".") {
result = String(result.dropLast(1)) // to deal with data anomoly where "." is at end of parameter
}
return result
}
private func parameterStatement(inputKey: String, inputAttributes: [String: AnyObject], filterName: String) -> String? {
guard let displayName: String = inputAttributes[kCIAttributeDisplayName] as? String,
let attributeClass: String = inputAttributes[kCIAttributeClass] as? String
else { return nil }
let inputName: String = parameterName(displayName: displayName, filterName: filterName)
let attributeType: String? = inputAttributes[kCIAttributeType] as? String
var convertedClass: String
switch attributeClass {
case "NSNumber":
if attributeType == kCIAttributeTypeBoolean
|| inputName == "extrapolate" { // Hack - missing info
convertedClass = "Bool"
} else if attributeType == kCIAttributeTypeInteger || attributeType == kCIAttributeTypeCount
|| inputName == "qualityLevel" || inputName == "count" { // Hack - missing or misleading info
convertedClass = "Int"
} else if [kCIAttributeTypeScalar, kCIAttributeTypeAngle, kCIAttributeTypeDistance, kCIAttributeTypeTime].contains(attributeType)
|| inputName == "preferredAspectRatio" // missing info
{
convertedClass = "Float"
} else {
print("\n// 🛑 unknown number type \(inputName): \(attributeType ?? "")")
convertedClass = "Float" // seems to be when no type is specified
}
case "CIVector":
guard filterName != "CITemperatureAndTint" && filterName != "CIDepthBlurEffect" else { // special case, should remain a CIVector
convertedClass = "CIVector"
break
}
convertedClass = attributeType == kCIAttributeTypeRectangle
? "CGRect"
: attributeType == kCIAttributeTypePosition || attributeType == kCIAttributeTypeOffset
? "CGPoint"
: "CIVector" // CIVector tends to have no attribute type
case "NSAffineTransform":
convertedClass = "CGAffineTransform"
case "NSData":
convertedClass = "Data"
case "NSString":
convertedClass = "String"
case "NSArray":
convertedClass = "[Any]"
case "CGImageMetadataRef":
convertedClass = "CGImageMetadata"
case "NSObject":
if inputName == "colorSpace" {
convertedClass = "CGColorSpace"
} else {
convertedClass = attributeClass // Unexpected case
print("\n// 🛑 unknown attributeClass \(attributeClass) with \(inputName), \(attributeType ?? "")")
}
case "NSValue":
if attributeType == kCIAttributeTypeTransform {
convertedClass = "CGAffineTransform"
} else {
convertedClass = attributeClass // Unexpected case
print("\n// 🛑 unknown attributeClass \(attributeClass) with \(inputName), \(attributeType ?? "")")
}
default:
// Other cases where the class is the same: CIImage, CIColor, etc.
convertedClass = attributeClass
}
if inputName == "backgroundImage" && convertedClass == "CIImage" {
convertedClass = "CIImage?" // make optional, for our special identity handling
}
var defaultStatement: String = ""
if let defaultValue: AnyObject = inputAttributes[kCIAttributeDefault] {
if hasReasonableDefaultValue(defaultValue, attributeType: attributeType, inputName: inputName) {
let defaultValueString = formatSmart(defaultValue, attributeType: attributeType, inputName: inputName, filterName: filterName)
if !defaultValueString.isEmpty {
defaultStatement = " = \(defaultValueString)"
}
}
}
return "\(inputName): \(convertedClass)\(defaultStatement)"
}
// Look at value and/or context.
private func hasReasonableDefaultValue(_ value: Any, attributeType: String?, inputName: String) -> Bool {
if nil != value as? Data {
return false // Not feasible to have data anyhow
} else if let number = value as? NSNumber {
if attributeType == kCIAttributeTypeDistance {
return number == 0
} else if attributeType == kCIAttributeTypeInteger {
return false
} else if attributeType == kCIAttributeTypeCount {
return false
} else if attributeType == kCIAttributeTypeBoolean {
return true
} else if attributeType == kCIAttributeTypeAngle {
return number.doubleValue <= Double.pi // avoid those weird angles that don't make any sense
} else if attributeType == kCIAttributeTypeScalar {
return true // not sure
}
} else if let defaultVector = value as? CIVector {
if defaultVector.count > 4 {
return false
}
if attributeType == kCIAttributeTypeRectangle {
return defaultVector == CIVector(x: 0, y: 0, z: 0, w: 0) // only keep zero rectangle
} else if attributeType == kCIAttributeTypePosition3 {
return false
} else if attributeType == kCIAttributeTypePosition {
return defaultVector.x < 50 && defaultVector.y < 50 // seems like 50+ values are arbitrary coordinates
} else if attributeType == kCIAttributeTypeOffset {
return defaultVector.x != 0 && defaultVector.y != 0 // any non-zero points seem pretty arbitrary
}
} else if let color = value as? CIColor {
return color == CIColor.black
|| color == CIColor.white
|| color == CIColor.clear
} else if nil != value as? AffineTransform {
return true
} else if nil != value as? String {
return true
} else if inputName == "colorSpace" { // it's a CFType so not so easy to compare
return true
} else {
print("\n🛑 \(attributeType ?? "") \(inputName) -> \(value) \((value as? AnyObject)?.className)")
return true // not sure yet
}
return false
}
private func formatSmart(_ value: Any, attributeType: String?, inputName: String, filterName: String?) -> String {
var result: String = ""
if let number = value as? NSNumber {
if attributeType == kCIAttributeTypeBoolean || inputName == "extrapolate" { // Hack - missing info
result = number.boolValue.description
} else {
result = number.formatSmart
}
} else if let defaultVector = value as? CIVector {
if attributeType == kCIAttributeTypeRectangle {
result = defaultVector.formatRectSmart
} else if attributeType == kCIAttributeTypePosition {
result = defaultVector.formatPointSmart
} else {
result = defaultVector.formatVectorSmart
}
} else if let color = value as? CIColor {
result = color.formatSmart
} else if let string = value as? String {
result = "\"" + string.replacingOccurrences(of: "\"", with: "\\\"") + "\""
} else if inputName == "colorSpace" {
if CFGetTypeID(value as AnyObject) == CGColorSpace.typeID {
let colorspace: CGColorSpace = value as! CGColorSpace
if let name: String = colorspace.name as? String {
var newName = name.replacing(/^kCGColorSpace/, with: "")
newName = newName.prefix(1).lowercased() + newName.dropFirst()
result = "CGColorSpace(name: CGColorSpace." + newName + ")!"
}
}
} else if let transform = value as? AffineTransform {
let transformIdentity: AffineTransform
#if canImport(UIKit)
transformIdentity = CGAffineTransform.identity
#elseif canImport(AppKit)
transformIdentity = NSAffineTransform()
#endif
// Special case these filters to default to identity. Their default values are weird!
if transform == transformIdentity || filterName == "CIAffineClamp" || filterName == "CIAffineTile" {
result = "CGAffineTransform.identity"
} else {
#if canImport(UIKit)
let t: CGAffineTransform = transform
result = "CGAffineTransform(a: \(t.a.format5), b: \(t.b.format5), c: \(t.c.format5), d: \(t.d.format5), tx: \(t.tx.format5), ty: \(t.tx.format5))"
#elseif canImport(AppKit)
let t: NSAffineTransformStruct = transform.transformStruct
result = "CGAffineTransform(a: \(t.m11.format5), b: \(t.m12.format5), c: \(t.m21.format5), d: \(t.m22.format5), tx: \(t.tX.format5), ty: \(t.tY.format5))"
#endif
}
} else {
print("\n🛑 \(attributeType ?? "") \(inputName) -> \(value) \((value as? AnyObject)?.className)")
result = String(describing: value)
}
return result
}
// https://unicode-org.github.io/icu/userguide/strings/regexp.html
private extension String {
var prettyFunction: String {
let result: String = self.replacing(/^CI/, with: "").replacing(/Filter$/, with: "")
return result.fixingCamelCase
}
// AbcDef -> abcDef but ABcdef -> aBcdef, ABCDEF -> abcDef - keep the last
var fixingCamelCase: String {
if nil != self.firstMatch(of: /^[A-Z][^A-Z]/)
|| self.hasPrefix("SRGB") // special case
{
// Just one uppercase characters, so make it lowercase and append the rest
return self.prefix(1).lowercased() + self.dropFirst()
} else if let foundUppercaseMatch: Regex.RegexOutput>.Match = self.firstMatch(of: /^[A-Z]{2,}/) {
// FIXME: Might need some tweaking to deal with complex characters. But since we are just modifying ASCII, this simple case is fine.
// More than one, so make all but the last character lowercased, so that the last character there stays capitalized.
let lowercasedPrefix = self[foundUppercaseMatch.range].lowercased()
let remaining = self.dropFirst(lowercasedPrefix.count)
if nil != remaining.firstMatch(of: /^[a-z]/) { // lowercase letter after uppercase, the usual. Keep last uppercase from prefix
return String(lowercasedPrefix.dropLast()) + self.dropFirst(lowercasedPrefix.count - 1)
} else {
// Unusual; characters after uppercase is not a lowercase character, e.g. a number. Keep all the uppercase characters.
return String(lowercasedPrefix) + self.dropFirst(lowercasedPrefix.count)
}
}
return self
}
}
// Format numbers with UP TO five decimal places
private extension Float {
var format5: String {
let formatter = NumberFormatter()
formatter.numberStyle = .decimal
#if canImport(UIKit)
formatter.numberStyle = .none
#elseif canImport(AppKit)
formatter.hasThousandSeparators = false
#endif
formatter.maximumFractionDigits = 5
let number = NSNumber(value: self)
return formatter.string(from: number) ?? ""
}
}
private extension Double {
var format5: String {
let formatter = NumberFormatter()
formatter.numberStyle = .decimal
#if canImport(UIKit)
formatter.numberStyle = .none
#elseif canImport(AppKit)
formatter.hasThousandSeparators = false
#endif
formatter.maximumFractionDigits = 5
let number = NSNumber(value: self)
return formatter.string(from: number) ?? ""
}
}
private extension CGFloat {
var format5: String {
let formatter = NumberFormatter()
formatter.numberStyle = .decimal
#if canImport(UIKit)
formatter.numberStyle = .none
#elseif canImport(AppKit)
formatter.hasThousandSeparators = false
#endif
formatter.maximumFractionDigits = 5
let number = NSNumber(value: self)
return formatter.string(from: number) ?? ""
}
}
private extension NSNumber {
var format5: String {
let formatter = NumberFormatter()
formatter.numberStyle = .decimal
#if canImport(UIKit)
formatter.numberStyle = .none
#elseif canImport(AppKit)
formatter.hasThousandSeparators = false
#endif
formatter.maximumFractionDigits = 5
return formatter.string(from: self) ?? ""
}
var formatSmart: String {
let result: String
switch self.doubleValue {
case Double.pi:
result = ".pi"
case Double.pi/2:
result = ".pi/2"
case Double.pi * 18:
result = ".pi*18" // for vortexDistortion
// What about triangleKaleidoscope 5.924285296593801
default:
result = self.format5
}
return result
}
}
private extension CIVector {
var formatPointSmart: String {
if x == 0 && y == 0 {
return ".zero"
} else {
return ".init(x: \(x.format5), y: \(y.format5))"
}
}
// The CGRect structure’s X, Y, height and width values are stored in the vector’s X, Y, Z and W properties.
var formatRectSmart: String {
if x == 0 && y == 0 && z == 0 && w == 0 {
return ".zero"
} else {
return ".init(x: \(x.format5), y: \(y.format5), width: \(w.format5), height: \(z.format5))"
}
}
var formatVectorSmart: String {
switch count {
case 0:
return ".init()"
case 1:
return ".init(x: \(x.format5))"
case 2:
return ".init(x: \(x.format5), y: \(y.format5))"
case 3:
return ".init(x: \(x.format5), y: \(y.format5), z: \(z.format5))"
case 4:
return ".init(x: \(x.format5), y: \(y.format5), z: \(z.format5), w: \(w.format5))"
default:
return "🛑 no vector initializer for count > 4"
}
}
}
private extension CIColor {
var formatSmart: String {
switch self {
case CIColor.black: return "CIColor.black" // Include "CIColor." so it's compatible with older OS
case CIColor.white: return "CIColor.white"
case CIColor.gray: return "CIColor.gray"
case CIColor.red: return "CIColor.red"
case CIColor.green: return "CIColor.green"
case CIColor.blue: return "CIColor.blue"
case CIColor.cyan: return "CIColor.cyan"
case CIColor.magenta: return "CIColor.magenta"
case CIColor.yellow: return "CIColor.yellow"
case CIColor.clear: return "CIColor.clear"
default:
let colorSpaceName: String = colorSpace.name as? String ?? "" // e.g. kCGColorSpaceDeviceRGB
let colorSpaceNameSuffix: String = colorSpaceName.replacing(/^kCGColorSpace/, with: "")
let colorSpaceNameFormatted = "CGColorSpace." + colorSpaceNameSuffix.prefix(1).lowercased() + colorSpaceNameSuffix.dropFirst()
let colorSpaceSRGB: String = CGColorSpace.sRGB as String
// Some issues with kCGColorSpaceDeviceRGB since we would have to create that. Let's just ignore.
if alpha != 1.0 && colorSpaceName != colorSpaceSRGB
&& colorSpaceName != "kCGColorSpaceDeviceRGB" {
return "CIColor(red: \(red), green: \(green), blue: \(blue), alpha: \(alpha), colorSpace: \(colorSpaceNameFormatted))"
} else if alpha == 1.0 && colorSpaceName != colorSpaceSRGB
&& colorSpaceName != "kCGColorSpaceDeviceRGB" {
return "CIColor(red: \(red), green: \(green), blue: \(blue), colorSpace: \(colorSpaceNameFormatted))"
} else
if alpha != 1.0 {
return "CIColor(red: \(red), green: \(green), blue: \(blue), alpha: \(alpha))"
} else {
return "CIColor(red: \(red), green: \(green), blue: \(blue))"
}
}
}
}
private extension CIFilter {
var identityInputKeys: [String] {
inputKeys
.filter { $0 != kCIInputImageKey }
.map { inputKey in
(inputKey, (attributes[inputKey] as? [String: AnyObject] ?? [:])) } // tuple of the inputKey and its attributes
.compactMap { (inputKey: String, inputAttributes: [String: AnyObject]) in
guard let displayName: String = inputAttributes[kCIAttributeDisplayName] as? String,
let identityValue: Any = inputAttributes[kCIAttributeIdentity]
else { return nil }
let attributeType: String? = inputAttributes[kCIAttributeType] as? String
let inputName: String = parameterName(displayName: displayName, filterName: self.name)
guard hasReasonableDefaultValue(identityValue, attributeType: attributeType, inputName: inputName)
else { return nil }
return inputKey
}
}
}
================================================
FILE: Generator/FunctionMinima.json
================================================
{
"areaAverage": "11.0",
"areaHistogram": "11.0",
"areaMaximum": "11.0",
"areaMaximumAlpha": "11.0",
"areaMinimum": "11.0",
"areaMinimumAlpha": "11.0",
"areaMinMax": "11.0",
"areaMinMaxRed": "11.0",
"bumpDistortion": "11.0",
"bumpDistortionLinear": "11.0",
"circleSplashDistortion": "11.0",
"circularWrap": "11.0",
"colorAbsoluteDifference": "11.0",
"colorThreshold": "11.0",
"colorThresholdOtsu": "11.0",
"columnAverage": "11.0",
"displacementDistortion": "11.0",
"droste": "11.0",
"glassDistortion": "11.0",
"glassLozenge": "11.0",
"histogramDisplay": "11.0",
"holeDistortion": "11.0",
"kMeans": "11.0",
"lightTunnel": "11.0",
"ninePartStretched": "11.0",
"ninePartTiled": "11.0",
"pinchDistortion": "11.0",
"rowAverage": "11.0",
"stretchCrop": "11.0",
"torusLensDistortion": "11.0",
"twirlDistortion": "11.0",
"vortexDistortion": "11.0",
"convolutionRGB3X3": "12.0",
"convolutionRGB5X5": "12.0",
"convolutionRGB7X7": "12.0",
"convolutionRGB9Horizontal": "12.0",
"convolutionRGB9Vertical": "12.0",
"linearLightBlendMode": "12.0",
"personSegmentation": "12.0",
"vividLightBlendMode": "12.0",
"areaLogarithmicHistogram": "13.0",
"convertLabToRGB": "13.0",
"convertRGBtoLab": "13.0",
"colorCubesMixedWithMask": "13.0",
"colorCubeWithColorSpace": "13.0",
"colorCube": "13.0",
"attributedTextImageGenerator": "13.0",
"textImageGenerator": "13.0"
}
================================================
FILE: Generator/MissingParameterDocumentation.json
================================================
{
"CIBokehBlur" : {
"softness" : "The softness of the bokeh effect"
},
"CIDepthBlurEffect" : {
"_NOTE" : "THIS WAS GLEANED FROM WWDC2017#508 VIDEO; NO OFFICIAL APPLE DOCUMENTATION FOUND. NOT ACTUALLY USED HERE BECAUSE OF SEVERAL BUILT-IN INITIALIZERS.",
"aperture" : "Simulated lens aperature to adjust blur for unfocused elements",
"auxDataMetadata" : "UNKNOWN",
"calibrationData" : "UNKNOWN",
"chinPositions" : "Vector of up to 4 x,y positions indicating where peoples' chins are",
"disparityImage" : "Grayscale image indicating depth; lighter is nearer.",
"focusRectangle" : "Part of the rectangle to make sure to put into focus",
"gainMap" : "UNKNOWN",
"leftEyePositions" : "Vector of up to 4 x,y positions indicating where peoples' left eyes are",
"lumaNoiseScale" : "UNKNOWN",
"nosePositions" : "Vector of up to 4 x,y positions indicating where peoples' noses are",
"rightEyePositions" : "Vector of up to 4 x,y positions indicating where peoples' right eyes are",
"scaleFactor" : "Integrated downsampling (rather than doing it later) since this is computationally expensive",
"shape" : "UNKNOWN"
},
"CIDepthOfField" : {
"point0" : "A set of coordinates marking the first point to be focused on",
"point1" : "A set of coordinates marking the second point to be focused on",
"unsharpMaskIntensity" : "The intensity of the unsharp mask effect",
"unsharpMaskRadius" : "The radius of the unsharpened mask effect applied to the in-focus area of effect"
},
"CIDroste" : {
"insetPoint0" : "The x and y position that defines the first inset point",
"insetPoint1" : "The x and y position that defines the second inset point",
"periodicity" : "The amount of intervals",
"rotation" : "The angle of the rotation, in radians",
"strands" : "The amount of strands",
"zoom" : "The zoom of the effect"
},
"CIEdgePreserveUpsampleFilter" : {
"lumaSigma" : "Influence of the input image’s luma information on the upsampling operation",
"smallImage" : "An image representing the reference for scaling the input image with the type CIImage",
"spatialSigma" : "The influence of the input image’s spatial information on the upsampling operation"
},
"CIGuidedFilter" : {
"_NOTE" : "THIS WAS GLEANED FROM DESCRIPTIONS OF THIS FILTER IN GENERAL; NO OFFICIAL APPLE DOCUMENTATION FOUND",
"epsilon" : "Smoothness. A higher value means more smoothing."
},
"CIHueSaturationValueGradient" : {
"dither" : "A boolean value specifying whether the distort the generated output",
"softness" : "The softness of the generated color wheel"
},
"CILenticularHaloGenerator" : {
"haloOverlap" : "The overlap of red, green, and blue halos. A value of 1 results in a full overlap."
},
"CIPerspectiveCorrection" : {
"crop" : "A rectangle that specifies the extent of the corrected image"
},
"CIToneCurve" : {
"point0" : "A vector containing the position of the first point of the tone curve",
"point1" : "A vector containing the position of the second point of the tone curve",
"point2" : "A vector containing the position of the third point of the tone curve",
"point3" : "A vector containing the position of the fourth point of the tone curve",
"point4" : "A vector containing the position of the fifth point of the tone curve"
}
}
================================================
FILE: Generator/abstracts.json
================================================
{
"CIAccordionFoldTransition": "Transitions from one image to another of differing dimensions by unfolding and crossfading.",
"CIAdditionCompositing": "Adds color components to achieve a brightening effect.",
"CIAffineClamp": "Performs an affine transform on a source image and then clamps the pixels at the edge of the transformed image, extending them outwards.",
"CIAffineTile": "Applies an affine transform to an image and then tiles the transformed image.",
"CIAffineTransform": "Applies an affine transform to an image.",
"CIAreaAverage": "Returns a single-pixel image that contains the average color for the region of interest.",
"CIAreaHistogram": "Returns a 1D image (inputCount wide by one pixel high) that contains the component-wise histogram computed for the specified rectangular area.",
"CIAreaMaximum": "Returns a single-pixel image that contains the maximum color components for the region of interest.",
"CIAreaMaximumAlpha": "Returns a single-pixel image that contains the color vector with the maximum alpha value for the region of interest.",
"CIAreaMinimum": "Returns a single-pixel image that contains the minimum color components for the region of interest.",
"CIAreaMinimumAlpha": "Returns a single-pixel image that contains the color vector with the minimum alpha value for the region of interest.",
"CIAztecCodeGenerator": "Generates an Aztec code (two-dimensional barcode) from input data.",
"CIBarsSwipeTransition": "Transitions from one image to another by passing a bar over the source image.",
"CIBlendWithAlphaMask": "Uses alpha values from a mask to interpolate between an image and the background.",
"CIBlendWithMask": "Uses values from a grayscale mask to interpolate between an image and the background.",
"CIBloom": "Softens edges and applies a pleasant glow to an image.",
"CIBoxBlur": "Blurs an image using a box-shaped convolution kernel.",
"CIBumpDistortion": "Creates a bump that originates at a specified point in the image.",
"CIBumpDistortionLinear": "Creates a concave or convex distortion that originates from a line in the image.",
"CICheckerboardGenerator": "Generates a checkerboard pattern.",
"CICircleSplashDistortion": "Distorts the pixels starting at the circumference of a circle and emanating outward.",
"CICircularScreen": "Simulates a circular-shaped halftone screen.",
"CICircularWrap": "Wraps an image around a transparent circle.",
"CICMYKHalftone": "Creates a color, halftoned rendition of the source image, using cyan, magenta, yellow, and black inks over a white page.",
"CICode128BarcodeGenerator": "Generates a Code 128 one-dimensional barcode from input data.",
"CIColorBlendMode": "Uses the luminance values of the background with the hue and saturation values of the source image.",
"CIColorBurnBlendMode": "Darkens the background image samples to reflect the source image samples.",
"CIColorClamp": "Modifies color values to keep them within a specified range.",
"CIColorControls": "Adjusts saturation, brightness, and contrast values.",
"CIColorCrossPolynomial": "Modifies the pixel values in an image by applying a set of polynomial cross-products.",
"CIColorCube": "Uses a three-dimensional color table to transform the source image pixels.",
"CIColorCubeWithColorSpace": "Uses a three-dimensional color table to transform the source image pixels and maps the result to a specified color space.",
"CIColorDodgeBlendMode": "Brightens the background image samples to reflect the source image samples.",
"CIColorInvert": "Inverts the colors in an image.",
"CIColorMap": "Performs a nonlinear transformation of source color values using mapping values provided in a table.",
"CIColorMatrix": "Multiplies source color values and adds a bias factor to each color component.",
"CIColorMonochrome": "Remaps colors so they fall within shades of a single color.",
"CIColorPolynomial": "Modifies the pixel values in an image by applying a set of cubic polynomials.",
"CIColorPosterize": "Remaps red, green, and blue color components to the number of brightness values you specify for each color component.",
"CIColumnAverage": "Returns a 1-pixel high image that contains the average color for each scan column.",
"CIComicEffect": "Simulates a comic book drawing by outlining edges and applying a color halftone effect.",
"CIConstantColorGenerator": "Generates a solid color.",
"CIConvolution3X3": "Modifies pixel values by performing a 3x3 matrix convolution.",
"CIConvolution5X5": "Modifies pixel values by performing a 5x5 matrix convolution.",
"CIConvolution7X7": "Modifies pixel values by performing a 7x7 matrix convolution.",
"CIConvolution9Horizontal": "Modifies pixel values by performing a 9-element horizontal convolution.",
"CIConvolution9Vertical": "Modifies pixel values by performing a 9-element vertical convolution.",
"CICopyMachineTransition": "Transitions from one image to another by simulating the effect of a copy machine.",
"CICrop": "Applies a crop to an image.",
"CICrystallize": "Creates polygon-shaped color blocks by aggregating source pixel-color values.",
"CIDarkenBlendMode": "Creates composite image samples by choosing the darker samples (from either the source image or the background).",
"CIDepthOfField": "Simulates a depth of field effect.",
"CIDifferenceBlendMode": "Subtracts either the source image sample color from the background image sample color, or the reverse, depending on which sample has the greater brightness value.",
"CIDiscBlur": "Blurs an image using a disc-shaped convolution kernel.",
"CIDisintegrateWithMaskTransition": "Transitions from one image to another using the shape defined by a mask.",
"CIDisplacementDistortion": "Applies the grayscale values of the second image to the first image.",
"CIDissolveTransition": "Uses a dissolve to transition from one image to another.",
"CIDivideBlendMode": "Divides the background image sample color from the source image sample color.",
"CIDotScreen": "Simulates the dot patterns of a halftone screen.",
"CIDroste": "Recursively draws a portion of an image in imitation of an M. C. Escher drawing.",
"CIEdges": "Finds all edges in an image and displays them in color.",
"CIEdgeWork": "Produces a stylized black-and-white rendition of an image that looks similar to a woodblock cutout.",
"CIEightfoldReflectedTile": "Produces a tiled image from a source image by applying an 8-way reflected symmetry.",
"CIExclusionBlendMode": "Produces an effect similar to that produced by the CIDifferenceBlendMode filter but with lower contrast.",
"CIExposureAdjust": "Adjusts the exposure setting for an image similar to the way you control exposure for a camera when you change the F-stop.",
"CIFalseColor": "Maps luminance to a color ramp of two colors.",
"CIFlashTransition": "Transitions from one image to another by creating a flash.",
"CIFourfoldReflectedTile": "Produces a tiled image from a source image by applying a 4-way reflected symmetry.",
"CIFourfoldRotatedTile": "Produces a tiled image from a source image by rotating the source image at increments of 90 degrees.",
"CIFourfoldTranslatedTile": "Produces a tiled image from a source image by applying 4 translation operations.",
"CIGammaAdjust": "Adjusts midtone brightness.",
"CIGaussianBlur": "Spreads source pixels by an amount specified by a Gaussian distribution.",
"CIGaussianGradient": "Generates a gradient that varies from one color to another using a Gaussian distribution.",
"CIGlassDistortion": "Distorts an image by applying a glass-like texture.",
"CIGlassLozenge": "Creates a lozenge-shaped lens and distorts the portion of the image over which the lens is placed.",
"CIGlideReflectedTile": "Produces a tiled image from a source image by translating and smearing the image.",
"CIGloom": "Dulls the highlights of an image.",
"CIHardLightBlendMode": "Either multiplies or screens colors, depending on the source image sample color.",
"CIHatchedScreen": "Simulates the hatched pattern of a halftone screen.",
"CIHeightFieldFromMask": "Produces a continuous three-dimensional, loft-shaped height field from a grayscale mask.",
"CIHexagonalPixellate": "Maps an image to colored hexagons whose color is defined by the replaced pixels.",
"CIHighlightShadowAdjust": "Adjust the tonal mapping of an image while preserving spatial detail.",
"CIHistogramDisplayFilter": "Generates a histogram image from the output of the CIAreaHistogram filter.",
"CIHoleDistortion": "Creates a circular area that pushes the image pixels outward, distorting those pixels closest to the circle the most.",
"CIHueAdjust": "Changes the overall hue, or tint, of the source pixels.",
"CIHueBlendMode": "Uses the luminance and saturation values of the background image with the hue of the input image.",
"CIKaleidoscope": "Produces a kaleidoscopic image from a source image by applying 12-way symmetry.",
"CILanczosScaleTransform": "Produces a high-quality, scaled version of a source image.",
"CILenticularHaloGenerator": "Simulates a lens flare.",
"CILightenBlendMode": "Creates composite image samples by choosing the lighter samples (either from the source image or the background).",
"CILightTunnel": "Rotates a portion of the input image specified by the center and radius parameters to give a tunneling effect.",
"CILinearBurnBlendMode": "Darkens the background image samples to reflect the source image samples while also increasing contrast.",
"CILinearDodgeBlendMode": "Brightens the background image samples to reflect the source image samples while also increasing contrast.",
"CILinearGradient": "Generates a gradient that varies along a linear axis between two defined endpoints.",
"CILinearToSRGBToneCurve": "Maps color intensity from a linear gamma curve to the sRGB color space.",
"CILineOverlay": "Creates a sketch that outlines the edges of an image in black.",
"CILineScreen": "Simulates the line pattern of a halftone screen.",
"CILuminosityBlendMode": "Uses the hue and saturation of the background image with the luminance of the input image.",
"CIMaskedVariableBlur": "Blurs the source image according to the brightness levels in a mask image.",
"CIMaskToAlpha": "Converts a grayscale image to a white image that is masked by alpha.",
"CIMaximumComponent": "Returns a grayscale image from max(r,g,b).",
"CIMaximumCompositing": "Computes the maximum value, by color component, of two input images and creates an output image using the maximum values.",
"CIMedianFilter": "Computes the median value for a group of neighboring pixels and replaces each pixel value with the median.",
"CIMinimumComponent": "Returns a grayscale image from min(r,g,b).",
"CIMinimumCompositing": "Computes the minimum value, by color component, of two input images and creates an output image using the minimum values.",
"CIModTransition": "Transitions from one image to another by revealing the target image through irregularly shaped holes.",
"CIMotionBlur": "Blurs an image to simulate the effect of using a camera that moves a specified angle and distance while capturing the image.",
"CIMultiplyBlendMode": "Multiplies the input image samples with the background image samples.",
"CIMultiplyCompositing": "Multiplies the color component of two input images and creates an output image using the multiplied values.",
"CINoiseReduction": "Reduces noise using a threshold value to define what is considered noise.",
"CIOpTile": "Segments an image, applying any specified scaling and rotation, and then assembles the image again to give an op art appearance.",
"CIOverlayBlendMode": "Either multiplies or screens the input image samples with the background image samples, depending on the background color.",
"CIPageCurlTransition": "Transitions from one image to another by simulating a curling page, revealing the new image as the page curls.",
"CIPageCurlWithShadowTransition": "Transitions from one image to another by simulating a curling page, revealing the new image as the page curls.",
"CIParallelogramTile": "Warps an image by reflecting it in a parallelogram, and then tiles the result.",
"CIPDF417BarcodeGenerator": "Generates a PDF417 code (two-dimensional barcode) from input data.",
"CIPerspectiveCorrection": "Applies a perspective correction, transforming an arbitrary quadrilateral region in the source image to a rectangular output image.",
"CIPerspectiveTile": "Applies a perspective transform to an image and then tiles the result.",
"CIPerspectiveTransform": "Alters the geometry of an image to simulate the observer changing viewing position.",
"CIPerspectiveTransformWithExtent": "Alters the geometry of a portion of an image to simulate the observer changing viewing position.",
"CIPhotoEffectChrome": "Applies a preconfigured set of effects that imitate vintage photography film with exaggerated color.",
"CIPhotoEffectFade": "Applies a preconfigured set of effects that imitate vintage photography film with diminished color.",
"CIPhotoEffectInstant": "Applies a preconfigured set of effects that imitate vintage photography film with distorted colors.",
"CIPhotoEffectMono": "Applies a preconfigured set of effects that imitate black-and-white photography film with low contrast.",
"CIPhotoEffectNoir": "Applies a preconfigured set of effects that imitate black-and-white photography film with exaggerated contrast.",
"CIPhotoEffectProcess": "Applies a preconfigured set of effects that imitate vintage photography film with emphasized cool colors.",
"CIPhotoEffectTonal": "Applies a preconfigured set of effects that imitate black-and-white photography film without significantly altering contrast.",
"CIPhotoEffectTransfer": "Applies a preconfigured set of effects that imitate vintage photography film with emphasized warm colors.",
"CIPinchDistortion": "Creates a rectangular area that pinches source pixels inward, distorting those pixels closest to the rectangle the most.",
"CIPinLightBlendMode": "Conditionally replaces background image samples with source image samples depending on the brightness of the source image samples.",
"CIPixellate": "Makes an image blocky by mapping the image to colored squares whose color is defined by the replaced pixels.",
"CIPointillize": "Renders the source image in a pointillistic style.",
"CIQRCodeGenerator": "Generates a Quick Response code (two-dimensional barcode) from input data.",
"CIRadialGradient": "Generates a gradient that varies radially between two circles having the same center.",
"CIRandomGenerator": "Generates an image of infinite extent whose pixel values are made up of four independent, uniformly-distributed random numbers in the 0 to 1 range.",
"CIRippleTransition": "Transitions from one image to another by creating a circular wave that expands from the center point, revealing the new image in the wake of the wave.",
"CIRowAverage": "Returns a 1-pixel high image that contains the average color for each scan row.",
"CISaturationBlendMode": "Uses the luminance and hue values of the background image with the saturation of the input image.",
"CIScreenBlendMode": "Multiplies the inverse of the input image samples with the inverse of the background image samples.",
"CISepiaTone": "Maps the colors of an image to various shades of brown.",
"CIShadedMaterial": "Produces a shaded image from a height field.",
"CISharpenLuminance": "Increases image detail by sharpening.",
"CISixfoldReflectedTile": "Produces a tiled image from a source image by applying a 6-way reflected symmetry.",
"CISixfoldRotatedTile": "Produces a tiled image from a source image by rotating the source image at increments of 60 degrees.",
"CISmoothLinearGradient": "Generates a gradient that uses an S-curve function to blend colors along a linear axis between two defined endpoints.",
"CISoftLightBlendMode": "Either darkens or lightens colors, depending on the input image sample color.",
"CISourceAtopCompositing": "Places the input image over the background image, then uses the luminance of the background image to determine what to show.",
"CISourceInCompositing": "Uses the background image to define what to leave in the input image, effectively cropping the input image.",
"CISourceOutCompositing": "Uses the background image to define what to take out of the input image.",
"CISourceOverCompositing": "Places the input image over the input background image.",
"CISpotColor": "Replaces one or more color ranges with spot colors.",
"CISpotLight": "Applies a directional spotlight effect to an image.",
"CISRGBToneCurveToLinear": "Maps color intensity from the sRGB color space to a linear gamma curve.",
"CIStarShineGenerator": "Generates a starburst pattern that is similar to a supernova; can be used to simulate a lens flare.",
"CIStraightenFilter": "Rotates the source image by the specified angle in radians.",
"CIStretchCrop": "Distorts an image by stretching and or cropping it to fit a target size.",
"CIStripesGenerator": "Generates a stripe pattern.",
"CISubtractBlendMode": "Subtracts the background image sample color from the source image sample color.",
"CISunbeamsGenerator": "Generates a sun effect.",
"CISwipeTransition": "Transitions from one image to another by simulating a swiping action.",
"CITemperatureAndTint": "Adapts the reference white point for an image.",
"CIToneCurve": "Adjusts tone response of the R, G, and B channels of an image.",
"CITorusLensDistortion": "Creates a torus-shaped lens and distorts the portion of the image over which the lens is placed.",
"CITriangleKaleidoscope": "Maps a triangular portion of an input image to create a kaleidoscope effect.",
"CITriangleTile": "Maps a triangular portion of image to a triangular area and then tiles the result.",
"CITwelvefoldReflectedTile": "Produces a tiled image from a source image by rotating the source image at increments of 30 degrees.",
"CITwirlDistortion": "Rotates pixels around a point to give a twirling effect.",
"CIUnsharpMask": "Increases the contrast of the edges between pixels of different colors in an image.",
"CIVibrance": "Adjusts the saturation of an image while keeping pleasing skin tones.",
"CIVignette": "Reduces the brightness of an image at the periphery.",
"CIVignetteEffect": "Modifies the brightness of an image around the periphery of a specified region.",
"CIVortexDistortion": "Rotates pixels around a point to simulate a vortex.",
"CIWhitePointAdjust": "Adjusts the reference white point for an image and maps all colors in the source using the new reference.",
"CIZoomBlur": "Simulates the effect of zooming the camera while capturing the image."
}
================================================
FILE: Generator/docLookup.json
================================================
{
"accordionFoldTransition": "3228263-accordionfoldtransition",
"additionCompositing": "3228264-additioncompositing",
"affineClamp": "3228265-affineclamp",
"affineTile": "3228266-affinetile",
"areaAverage": "3547111-areaaverage",
"areaHistogram": "3547112-areahistogram",
"areaLogarithmicHistogram": "4401848-arealogarithmichistogram",
"areaMaximum": "3547114-areamaximum",
"areaMaximumAlpha": "3547113-areamaximumalpha",
"areaMinMax": "3547115-areaminmax",
"areaMinMaxRed": "3547116-areaminmaxred",
"areaMinimum": "3547118-areaminimum",
"areaMinimumAlpha": "3547117-areaminimumalpha",
"attributedTextImageGenerator": "3228267-attributedtextimagegenerator",
"aztecCodeGenerator": "3228268-azteccodegenerator",
"barcodeGenerator": "3228269-barcodegenerator",
"barsSwipeTransition": "3228270-barsswipetransition",
"bicubicScaleTransform": "3228271-bicubicscaletransform",
"blendWithAlphaMask": "3228272-blendwithalphamask",
"blendWithBlueMask": "3228273-blendwithbluemask",
"blendWithMask": "3228274-blendwithmask",
"blendWithRedMask": "3228275-blendwithredmask",
"bloom": "3228276-bloom",
"blurredRectangleGenerator": "4401849-blurredrectanglegenerator",
"bokehBlur": "3228277-bokehblur",
"boxBlur": "3228278-boxblur",
"bumpDistortion": "4401850-bumpdistortion",
"bumpDistortionLinear": "4401851-bumpdistortionlinear",
"cannyEdgeDetector": "4401852-cannyedgedetector",
"checkerboardGenerator": "3228279-checkerboardgenerator",
"circleSplashDistortion": "4401853-circlesplashdistortion",
"circularScreen": "3228280-circularscreen",
"circularWrap": "4401854-circularwrap",
"cmykHalftone": "3228259-cmykhalftone",
"code128BarcodeGenerator": "3228281-code128barcodegenerator",
"colorAbsoluteDifference": "3547119-colorabsolutedifference",
"colorBlendMode": "3228282-colorblendmode",
"colorBurnBlendMode": "3228283-colorburnblendmode",
"colorClamp": "3228284-colorclamp",
"colorControls": "3228285-colorcontrols",
"colorCrossPolynomial": "3228286-colorcrosspolynomial",
"colorCube": "3228287-colorcube",
"colorCubeWithColorSpace": "3228288-colorcubewithcolorspace",
"colorCubesMixedWithMask": "3228289-colorcubesmixedwithmask",
"colorCurves": "3228290-colorcurves",
"colorDodgeBlendMode": "3228291-colordodgeblendmode",
"colorInvert": "3228292-colorinvert",
"colorMap": "3228293-colormap",
"colorMatrix": "3228294-colormatrix",
"colorMonochrome": "3228295-colormonochrome",
"colorPolynomial": "3228296-colorpolynomial",
"colorPosterize": "3228297-colorposterize",
"colorThreshold": "3547120-colorthreshold",
"colorThresholdOtsu": "4401855-colorthresholdotsu",
"columnAverage": "3547121-columnaverage",
"comicEffect": "3228298-comiceffect",
"convertLabToRGB": "4401856-convertlabtorgb",
"convertRGBtoLab": "4401857-convertrgbtolab",
"convolution3X3": "3228299-convolution3x3",
"convolution5X5": "3228300-convolution5x5",
"convolution7X7": "3228301-convolution7x7",
"convolution9Horizontal": "3228302-convolution9horizontal",
"convolution9Vertical": "3228303-convolution9vertical",
"convolutionRGB3X3": "4401858-convolutionrgb3x3",
"convolutionRGB5X5": "4401859-convolutionrgb5x5",
"convolutionRGB7X7": "4401860-convolutionrgb7x7",
"convolutionRGB9Horizontal": "4401861-convolutionrgb9horizontal",
"convolutionRGB9Vertical": "4401862-convolutionrgb9vertical",
"copyMachineTransition": "3228304-copymachinetransition",
"coreMLModel": "3228305-coremlmodel",
"crystallize": "3228306-crystallize",
"darkenBlendMode": "3228307-darkenblendmode",
"depthOfField": "3228308-depthoffield",
"depthToDisparity": "3228309-depthtodisparity",
"differenceBlendMode": "3228310-differenceblendmode",
"discBlur": "3228311-discblur",
"disintegrateWithMaskTransition": "3228312-disintegratewithmasktransition",
"disparityToDepth": "3228313-disparitytodepth",
"displacementDistortion": "4401863-displacementdistortion",
"dissolveTransition": "3228314-dissolvetransition",
"dither": "3228315-dither",
"divideBlendMode": "3228316-divideblendmode",
"documentEnhancer": "3228317-documentenhancer",
"dotScreen": "3228318-dotscreen",
"droste": "4401864-droste",
"edgePreserveUpsample": "3228319-edgepreserveupsample",
"edgeWork": "3228320-edgework",
"edges": "3228321-edges",
"eightfoldReflectedTile": "3228322-eightfoldreflectedtile",
"exclusionBlendMode": "3228323-exclusionblendmode",
"exposureAdjust": "3228324-exposureadjust",
"falseColor": "3228325-falsecolor",
"flashTransition": "3228326-flashtransition",
"fourfoldReflectedTile": "3228327-fourfoldreflectedtile",
"fourfoldRotatedTile": "3228328-fourfoldrotatedtile",
"fourfoldTranslatedTile": "3228329-fourfoldtranslatedtile",
"gaborGradients": "3325508-gaborgradients",
"gammaAdjust": "3228330-gammaadjust",
"gaussianBlur": "3228331-gaussianblur",
"gaussianGradient": "3228332-gaussiangradient",
"glassDistortion": "4401865-glassdistortion",
"glassLozenge": "4401866-glasslozenge",
"glideReflectedTile": "3228333-glidereflectedtile",
"gloom": "3228334-gloom",
"hardLightBlendMode": "3228335-hardlightblendmode",
"hatchedScreen": "3228336-hatchedscreen",
"heightFieldFromMask": "3228337-heightfieldfrommask",
"hexagonalPixellate": "3228338-hexagonalpixellate",
"highlightShadowAdjust": "3228339-highlightshadowadjust",
"histogramDisplay": "3547122-histogramdisplay",
"holeDistortion": "4401867-holedistortion",
"hueAdjust": "3228340-hueadjust",
"hueBlendMode": "3228341-hueblendmode",
"hueSaturationValueGradient": "3228342-huesaturationvaluegradient",
"kMeans": "3547110-kmeans",
"kaleidoscope": "3228343-kaleidoscope",
"keystoneCorrectionCombined": "3325509-keystonecorrectioncombined",
"keystoneCorrectionHorizontal": "3325510-keystonecorrectionhorizontal",
"keystoneCorrectionVertical": "3325511-keystonecorrectionvertical",
"labDeltaE": "3228260-labdeltae",
"lanczosScaleTransform": "3228344-lanczosscaletransform",
"lenticularHaloGenerator": "3228345-lenticularhalogenerator",
"lightTunnel": "4401868-lighttunnel",
"lightenBlendMode": "3228346-lightenblendmode",
"lineOverlay": "3228347-lineoverlay",
"lineScreen": "3228348-linescreen",
"linearBurnBlendMode": "3228349-linearburnblendmode",
"linearDodgeBlendMode": "3228350-lineardodgeblendmode",
"linearGradient": "3228351-lineargradient",
"linearLightBlendMode": "4401869-linearlightblendmode",
"linearToSRGBToneCurve": "3228352-lineartosrgbtonecurve",
"luminosityBlendMode": "3228353-luminosityblendmode",
"maskToAlpha": "3228354-masktoalpha",
"maskedVariableBlur": "3228355-maskedvariableblur",
"maximumComponent": "3228356-maximumcomponent",
"maximumCompositing": "3228357-maximumcompositing",
"median": "3228358-median",
"meshGenerator": "3228359-meshgenerator",
"minimumComponent": "3228360-minimumcomponent",
"minimumCompositing": "3228361-minimumcompositing",
"mix": "3228362-mix",
"modTransition": "3228363-modtransition",
"morphologyGradient": "3228364-morphologygradient",
"morphologyMaximum": "3228365-morphologymaximum",
"morphologyMinimum": "3228366-morphologyminimum",
"morphologyRectangleMaximum": "3228367-morphologyrectanglemaximum",
"morphologyRectangleMinimum": "3228368-morphologyrectangleminimum",
"motionBlur": "3228369-motionblur",
"multiplyBlendMode": "3228370-multiplyblendmode",
"multiplyCompositing": "3228371-multiplycompositing",
"ninePartStretched": "4401871-ninepartstretched",
"ninePartTiled": "4401872-nineparttiled",
"noiseReduction": "3228372-noisereduction",
"opTile": "3228373-optile",
"overlayBlendMode": "3228374-overlayblendmode",
"pageCurlTransition": "3228375-pagecurltransition",
"pageCurlWithShadowTransition": "3228376-pagecurlwithshadowtransition",
"paletteCentroid": "3228377-palettecentroid",
"palettize": "3228378-palettize",
"parallelogramTile": "3228379-parallelogramtile",
"pdf417BarcodeGenerator": "3228261-pdf417barcodegenerator",
"personSegmentation": "4401873-personsegmentation",
"perspectiveCorrection": "3228380-perspectivecorrection",
"perspectiveRotate": "3325512-perspectiverotate",
"perspectiveTile": "3228381-perspectivetile",
"perspectiveTransform": "3228382-perspectivetransform",
"perspectiveTransformWithExtent": "3228383-perspectivetransformwithextent",
"photoEffectChrome": "3228384-photoeffectchrome",
"photoEffectFade": "3228385-photoeffectfade",
"photoEffectInstant": "3228386-photoeffectinstant",
"photoEffectMono": "3228387-photoeffectmono",
"photoEffectNoir": "3228388-photoeffectnoir",
"photoEffectProcess": "3228389-photoeffectprocess",
"photoEffectTonal": "3228390-photoeffecttonal",
"photoEffectTransfer": "3228391-photoeffecttransfer",
"pinLightBlendMode": "3228392-pinlightblendmode",
"pinchDistortion": "4401874-pinchdistortion",
"pixellate": "3228393-pixellate",
"pointillize": "3228394-pointillize",
"qrCodeGenerator": "3228262-qrcodegenerator",
"radialGradient": "3228395-radialgradient",
"randomGenerator": "3228396-randomgenerator",
"rippleTransition": "3228397-rippletransition",
"roundedRectangleGenerator": "3335007-roundedrectanglegenerator",
"roundedRectangleStrokeGenerator": "4401875-roundedrectanglestrokegenerator",
"rowAverage": "3547123-rowaverage",
"sRGBToneCurveToLinear": "3228398-srgbtonecurvetolinear",
"saliencyMap": "3228399-saliencymap",
"saturationBlendMode": "3228400-saturationblendmode",
"screenBlendMode": "3228401-screenblendmode",
"sepiaTone": "3228402-sepiatone",
"shadedMaterial": "3228403-shadedmaterial",
"sharpenLuminance": "3228404-sharpenluminance",
"sixfoldReflectedTile": "3228405-sixfoldreflectedtile",
"sixfoldRotatedTile": "3228406-sixfoldrotatedtile",
"smoothLinearGradient": "3228407-smoothlineargradient",
"sobelGradients": "4401876-sobelgradients",
"softLightBlendMode": "3228408-softlightblendmode",
"sourceAtopCompositing": "3228409-sourceatopcompositing",
"sourceInCompositing": "3228410-sourceincompositing",
"sourceOutCompositing": "3228411-sourceoutcompositing",
"sourceOverCompositing": "3228412-sourceovercompositing",
"spotColor": "3228413-spotcolor",
"spotLight": "3228414-spotlight",
"starShineGenerator": "3228415-starshinegenerator",
"straighten": "3228416-straighten",
"stretchCrop": "4401877-stretchcrop",
"stripesGenerator": "3228417-stripesgenerator",
"subtractBlendMode": "3228418-subtractblendmode",
"sunbeamsGenerator": "3228419-sunbeamsgenerator",
"swipeTransition": "3228420-swipetransition",
"temperatureAndTint": "3228421-temperatureandtint",
"textImageGenerator": "3228422-textimagegenerator",
"thermal": "3228423-thermal",
"toneCurve": "3228424-tonecurve",
"torusLensDistortion": "4401879-toruslensdistortion",
"triangleKaleidoscope": "3228425-trianglekaleidoscope",
"triangleTile": "3228426-triangletile",
"twelvefoldReflectedTile": "3228427-twelvefoldreflectedtile",
"twirlDistortion": "4401880-twirldistortion",
"unsharpMask": "3228428-unsharpmask",
"vibrance": "3228429-vibrance",
"vignette": "3228431-vignette",
"vignetteEffect": "3228430-vignetteeffect",
"vividLightBlendMode": "4401881-vividlightblendmode",
"vortexDistortion": "4401882-vortexdistortion",
"whitePointAdjust": "3228432-whitepointadjust",
"xRay": "3228433-xray",
"zoomBlur": "3228434-zoomblur"
}
================================================
FILE: LICENSE.txt
================================================
MIT License
Copyright (c) 2023 Dan Wood
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
================================================
FILE: Package.swift
================================================
// swift-tools-version: 5.7
// The swift-tools-version declares the minimum version of Swift required to build this package.
import PackageDescription
let package = Package(
name: "SwiftUICoreImage",
platforms: [
.macOS(.v10_15),
.iOS(.v13)
],
products: [
// Products define the executables and libraries a package produces, and make them visible to other packages.
.library(
name: "SwiftUICoreImage",
targets: ["SwiftUICoreImage"]),
],
dependencies: [
// Dependencies declare other packages that this package depends on.
// .package(url: /* package url */, from: "1.0.0"),
],
targets: [
// Targets are the basic building blocks of a package. A target can define a module or a test suite.
// Targets can depend on other targets in this package, and on products in packages this package depends on.
.target(
name: "SwiftUICoreImage",
dependencies: [],
path: "Sources")
]
)
================================================
FILE: README.md
================================================
# SwiftUICoreImage
Help for using Core Image within the context of SwiftUI. Also useful even without SwiftUI.
## Introduction
Core Image is a wonderful image-processsing toolkit in macOS and iOS, but it's a bit clunky to use. Even after Apple added Swift APIs to many of the filters ([CoreImage.CIFilterBuiltins](https://developer.apple.com/documentation/coreimage/methods_and_protocols_for_filter_creation)), it's still pretty tedious to chain filters to images.
The purpose of this package is to provide an easier way to chain multiple filters to CIImage instances and then render them into SwiftUI (or any other context — SwiftUI is not needed).
```Swift
Image(ciImage: CIImage("Bernie.jpeg")
.sepiaTone(intensity: sepia)
.recropping { image in
image
.clampedToExtent(active: clamped)
.gaussianBlur(radius: gaussianBlurRadius)
}
)
.resizable()
.aspectRatio(contentMode: .fit)
```
## Manifest
Included in this package is:
* CIImage-Filters.swift
* about 200 modifiers on `CIImage` that return a new modified `CIImage` (or the original if unmodified)
* 20 static functions that return a newly generated `CIImage`
* Includes filters up to iOS 18, macOS 15
* CIImage-Extensions.swift
* Convenience initializers for `CIImage` from a resource name and from an `NSImage`/`UIImage`
* Modifiers for `CIImage` to return cropped, scaled, etc. to be easier to work with SwiftUI
* Overloads of several built-in `CIImage` modifier functions that take an `active` boolean parameter
* Image-Extensions.swift
* Convenience initializer to create a SwiftUI `Image` from a `CIImage`
## How This Works
Similarly to how SwiftUI view modifiers each return a modified `View` instance, these modifiers on `CIImage` take care of the core image chaining by creating a corresponding `CIFilter`, hooking up the `inputImage` for you, and returning the resulting `outputImage`.
When creating SwiftUI code, I think it's important that you can use [Inert Modifiers](https://developer.apple.com/videos/play/wwdc2021/10022/?time=2303) in which you pass in some parameter that causes the modifier to have no effect. (For instance, specifying opacity of 1.0 or padding of 0.0 to a view.)
In this code, I've made sure that each of our image modifiers come with inert modifiers: in some cases it's passing in a parameter that clearly has no effect (e.g. zero intensity, zero radius); or it's a nil background image when combining with another image; or a boolean `active` parameter. If the parameter(s) specified would cause no change in the image, then the identity (self) is returned forthwith.
The contents of CIImage-Filters.swift are generated source code, using code that I've included in this repository (`CIImage-Generation.swift`, not included in the package import). This loops through the core image metadata that Apple provides (`CIFilter.filterNames(inCategories: nil)`). Unfortunately this list is somewhat out of date and contains a number of inconsistencies that I've done my best to overcome. There are some JSON files that provide additional metadata such as a list of the functions that actually do have online documentation — 56 functions aren't documented so some guesswork is needed — or repairs to missing or obsolete documentation. You probably won't need to run this code unless you have some special requirements or the list has been updated in a future OS release.
## Using With SwiftUI
Remember that Core Image operations are really just a "recipe" for the processing steps; the actual work is not performed until the image needs to be rendered to a bitmap.
Instead of creating a SwiftUI `Image` using a [built-in initializer](https://developer.apple.com/documentation/swiftui/image) from a resource name or other image type (`CGImage`, `NSImage`, `UIImage`), this code provides a new initializer to create an `Image` from a `CIImage`. When SwiftUI needs to render the image, the Core Image is rendered to the screen.
Your typical approach, then, will be to create an `Image`, passing in a `CIImage` created using one of the [built-in initializers](https://developer.apple.com/documentation/coreimage/ciimage) or the convenience methods included here to create from a resource name or another image type.
Then, just chain modifiers to that `CIImage` to indicate what to modify.
Many modifiers are simple. For instance:
```Swift
Image(ciImage: CIImage("Halloween.jpeg")
.xRay()
)
```
If you wish to toggle whether the filter is applied, use the `active` parameter (default value of `true`):
```Swift
Image(ciImage: CIImage("Halloween.jpeg")
.xRay(active: isMachineOn)
)
```
Chain any number of modifiers found in `CIImage-Filters.swift` to construct the desired result.
### Image Scaling
Many Core Image filters use pixel values for parameters. Therefore, it may be needed to get an image scaled to an appropriate size _before_ applying operations. For example, applying a 10-pixel-radius blur to a 6000⨉4000 image that is then scaled down to 300⨉200 might not yield what you want; perhaps you want to first scale the image to 300⨉200 and then apply the 10-pixel-radius blur.
Core Image provides a scaling operation (`CILanczosScaleTransform` and `lanczosScaleTransform()`) but this package also includes more convenient alternatives: `scaledToFill()` and `scaledToFit()` where you pass in the dimensions you want.
A typical use of this works well in conjunction with `GeometryReader`. For example:
```Swift
GeometryReader { geo in
let geoSize: CGSize = geo.frame(in: .local).integral.size
// Resize image to double the frame size, assuming we are on a retina display
let newSize: CGSize = CGSize(width: geoSize.width * 2,
height: geoSize.height * 2)
Image(ciImage: CIImage("M83.jpeg")
.scaledToFit(newSize)
.sharpenLuminance(sharpness: 1.0, radius: 5)
)
.resizable() // Make sure retina image is scaled to fit
.aspectRatio(contentMode: .fit)
}
```

## Using Without SwiftUI
SwiftUI is not needed at all. Just create a `CIImage` and perform operations. Then, render to a bitmap.
```Swift
let tiledImage: CIImage = CIImage("HeyGoodMorning.png").
.triangleTile(center: .zero, angle: 0.356, width: 2.0)
imageView.image = UIImage(CIImage: tiledImage)
```
## Other Notes
If you've used Core Image, you'll know that sometimes you need to play with the extent of an image, e.g. clamping an image to have infinite edges before applying a gaussian blur, then re-cropping to the image's original extent. To accomplish this, you can use the **`recropping`** modifier which is followed by a closure. The operation saves the extent of the image, applies whatever is in the closure, and then re-crops to that extent. In the example below, the image in `ciImage` is converted into an image with the pixel colors along its edges extend infinitely in all directions, then it is blurred, and then upon exit from the closure, the returned image is re-cropped.
```Swift
ciImage
.recropping { image in
image
.clampedToExtent()
.gaussianBlur(radius: 10)
}
```

The `recropping` modifier is also useful if you find that a filter (e.g. `comicEffect()` has grown your image's extent slightly and you want to clamp it to its original size.
Another useful operation is **`replacing`**. Much like `recropping` except that it does not mess with the extent of the image. You pass in a closure, which starts with the image you were working with; your closure returns a new image. This can be useful when working with the compositing operations in Core Image, which require a *background* image to be passed in. What if your chain of operations is on the background image, and you want to overlay something on top? Just wrap your operation in `.replacing` and return the composited image.
```Swift
ciImage
.replacing { backgroundImage in
ciImage2
.sourceAtopCompositing(backgroundImage: backgroundImage)
}
```
In this case, the image in `ciImage2` is the foreground image, placed atop the `backgroundImage`, then returned to the chain of operations.
## Using Package
In Xcode, Choose File > Add Packages… then enter the URL of this repository into the search bar, and continue from there.
In your code:
```Swift
import SwiftUICoreImage
```
That's it!
---
## Future Improvements
Rather than generating repetitive code, it would be nice to define some macros that expand to the repetitive code!
The advantage of this is that one could just import the macro package and just define only the filters they want,
rather than defining all 200+ mostly-unused filters.
[Apparently](https://forums.swift.org/t/macros-attached-macros-to-methods-and-functions/65531/6)
this would require [Function Body Macros](https://github.com/swiftlang/swift-evolution/blob/main/proposals/0415-function-body-macros.md)
which are not available in Swift 5.x but might make it into Swift 6.0.
Ideally we would specify something like this:
```
@CoreImageExtension
func pixellate(center: CGPoint, scale: Float, active: Bool = true) -> CIImage
```
And this would fill in the body with some code that does the following:
* Guard statement to return self if active flag is false (or other inert modifier; maybe we'd have to indicate which parameter is this?)
* Create built-in CIFilter object based off of name of the function being expanded
* Set all the parameters as specified in the function parameters. Maybe need some way to indicate parameters that need to be cast to another type
* return the outputImage from the filter
---
Please file an issue or pull request if you can think of an improvement to the code or documentation of the generated filters,
or find any other helpful utilities for manipulating Core Images in this toolkit!
================================================
FILE: Sources/CIImage-Extensions.swift
================================================
//
// CIImage-Extensions.swift
// SwiftUI Core Image
//
// Created by Dan Wood on 5/9/23.
//
import SwiftUI
import CoreImage
import CoreImage.CIFilterBuiltins
public extension CIImage {
// Pretty fast. Subsequent invocations are cached.
convenience init(_ name: String, bundle: Bundle? = nil) {
#if canImport(UIKit)
if let uiImage = UIImage(named: name, in: bundle, with: nil) {
self.init(uiImage: uiImage)
} else {
self.init()
}
#elseif canImport(AppKit)
let nsImage: NSImage?
if let bundle {
nsImage = bundle.image(forResource: name)
} else {
nsImage = NSImage(named: name)
}
if let nsImage {
self.init(nsImage: nsImage)
} else {
self.init()
}
#endif
}
#if canImport(UIKit)
convenience init(uiImage: UIImage) {
if let cgImage = uiImage.cgImage {
self.init(cgImage: cgImage)
} else {
self.init()
}
}
#elseif canImport(AppKit)
convenience init(nsImage: NSImage) {
if let cgImage = nsImage.cgImage(forProposedRect: nil, context: nil, hints: nil) { // TODO: Maybe consider NSGraphicsContext
self.init(cgImage: cgImage)
} else {
self.init()
}
}
#endif
/// Useful for debugging when chaining multiple CIImage modifiers together.
func logExtent(file: String = #file, line: Int = #line) -> CIImage {
NSLog("\(file):\(line) \(self.extent)")
return self
}
}
// MARK: USEFUL EXTENSIONS FOR WORKING IN A SWIFTUI-LIKE FASHION
public extension CIImage {
/// Save the extent and then re-crop to that extent after applying whatever is in the closure
func recropping(apply: (CIImage) -> CIImage) -> CIImage {
let savedExtent: CGRect = extent
let newCIImage = apply(self)
let cropped = newCIImage.cropped(to: savedExtent)
return cropped
}
/// Apply to whatever is in the closure. Useful if the current image is used as a parameter to a new image process.
func replacing(apply: (CIImage) -> CIImage) -> CIImage {
let newCIImage = apply(self)
return newCIImage
}
/// Resize an image down so it fully fills the container, cropping in the center as needed.
@available(macOS 10.15, *)
func scaledToFill(_ size: CGSize?) -> CIImage {
guard let size else { return self }
let currentSize = extent.size
let largerRatio: CGFloat = max(size.width / currentSize.width, size.height / currentSize.height)
let newSize: CGSize = CGSize(width: currentSize.width * largerRatio, height: currentSize.height * largerRatio)
// Scale to the larger of two ratios so it fills
let scaled = self.lanczosScaleTransform(scale: Float(largerRatio))
let clamped = scaled.clampedToExtent()
let cropped = clamped.cropped(to: CGRect(x: (newSize.width - size.width) / 2,
y: (newSize.height - size.height) / 2,
width: size.width, height: size.height))
return cropped
}
/// Resize an image down so it fully fits in container, centered as needed. No cropping.
@available(macOS 10.15, *)
func scaledToFit(_ size: CGSize?) -> CIImage {
guard let size else { return self }
let currentSize = extent.size
let smallerRatio: CGFloat = min(size.width / currentSize.width, size.height / currentSize.height)
let newSize: CGSize = CGSize(width: currentSize.width * smallerRatio, height: currentSize.height * smallerRatio)
// Scale to the smaller of two ratios so it fits
let scaled = self.lanczosScaleTransform(scale: Float(smallerRatio))
let clamped = scaled.clampedToExtent()
let cropped = clamped.cropped(to: CGRect(origin: .zero, size: newSize))
return cropped
}
/// convenience, to be similar to SwiftUI view offset
func offset(by offset: CGSize) -> CIImage {
guard offset != .zero else { return self }
return self.transformed(by: CGAffineTransform(translationX: offset.width, y: offset.height))
}
}
// MARK: OVERLOADS OF EXISTING CIIMAGE OPERATIONS SO WE CAN PASS IN 'ACTIVE' BOOLEAN TO BE ABLE TO HAVE INERT MODIFIER
public extension CIImage {
// Don't overload these; already a way to pass in arguments to get an inert modifier
//open func transformed(by matrix: CGAffineTransform) -> CIImage // pass in CGAffineTransform.identity
//open func transformed(by matrix: CGAffineTransform, highQualityDownsample: Bool) -> CIImage // pass in CGAffineTransform.identity
//open func composited(over dest: CIImage) -> CIImage // pass in empty image
//open func cropped(to rect: CGRect) -> CIImage // Pass in CGRect.infinite
//open func clamped(to rect: CGRect) -> CIImage // Pass in CGRect.infinite
//open func settingProperties(_ properties: [AnyHashable : Any]) -> CIImage // Pass in empty to add no properties
// Maybe not worth dealing with.
//open func oriented(forExifOrientation orientation: Int32) -> CIImage
//open func oriented(_ orientation: CGImagePropertyOrientation) -> CIImage
//open func matchedToWorkingSpace(from colorSpace: CGColorSpace) -> CIImage?
//open func matchedFromWorkingSpace(to colorSpace: CGColorSpace) -> CIImage?
//open func insertingIntermediate() -> CIImage
//open func insertingIntermediate(cache: Bool) -> CIImage
//open func convertingWorkingSpaceToLab() -> CIImage
//open func convertingLabToWorkingSpace() -> CIImage
// Doesn't really apply since the whole point is to have image modifiers for all the filters.
//open func applyingFilter(_ filterName: String, parameters params: [String : Any]) -> CIImage
//open func applyingFilter(_ filterName: String) -> CIImage
// Don't implement because we have an equivalent operation already. Sigma is just the pixel radius.
//open func applyingGaussianBlur(sigma: Double) -> CIImage
// OK these get an active overload.
/* Return a new infinite image by replicating the edge pixels of the receiver image. */
@available(macOS 10.10, *)
func clampedToExtent(active: Bool = true) -> CIImage {
guard active else { return self }
return clampedToExtent()
}
/* Return a new image by multiplying the receiver's RGB values by its alpha. */
@available(macOS 10.12, *)
func premultiplyingAlpha(active: Bool = true) -> CIImage {
guard active else { return self }
return premultiplyingAlpha()
}
/* Return a new image by dividing the receiver's RGB values by its alpha. */
@available(macOS 10.12, *)
func unpremultiplyingAlpha(active: Bool = true) -> CIImage {
guard active else { return self }
return unpremultiplyingAlpha()
}
/* Return a new image with alpha set to 1 within the rectangle and 0 outside. */
@available(macOS 10.12, *)
func settingAlphaOne(in extent: CGRect, active: Bool = true) -> CIImage {
guard active else { return self }
return settingAlphaOne(in: extent)
}
/* Returns a new image by changing the receiver's sample mode to bilinear interpolation. */
@available(macOS 10.13, *)
func samplingLinear(active: Bool = true) -> CIImage {
guard active else { return self }
return samplingLinear()
}
/* Returns a new image by changing the receiver's sample mode to nearest neighbor. */
@available(macOS 10.13, *)
func samplingNearest(active: Bool = true) -> CIImage { // equivalent to CISampleNearest filter
guard active else { return self }
return samplingNearest()
}
}
================================================
FILE: Sources/CIImage-Filters.swift
================================================
//
// Automatically generated by CIImage-Generation.swift - do not edit
//
import Foundation
import CoreImage
import CoreImage.CIFilterBuiltins
import CoreML
import AVFoundation
public extension CIImage {
//
// MARK: IMAGE-TO-IMAGE FILTERS
//
/// Accordion Fold Transition
///
/// Transitions from one image to another of differing dimensions by unfolding and crossfading.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228263-accordionfoldtransition)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIAccordionFoldTransition)
///
/// Categories: Transition, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - targetImage: The target image for a transition.
/// - bottomHeight: The height in pixels from the bottom of the image to the bottom of the folded part of the transition. (0...)
/// - numberOfFolds: The number of folds used in the transition. (1...50)
/// - foldShadowAmount: A value that specifies the intensity of the shadow in the transition. (0...1)
/// - time: The duration of the effect. (0...1)
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func accordionFoldTransition(targetImage: CIImage,
bottomHeight: Float = 0,
numberOfFolds: Float = 3,
foldShadowAmount: Float = 0.1,
time: Float,
active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.accordionFoldTransition() // CIAccordionFoldTransition
filter.inputImage = self
filter.targetImage = targetImage
filter.bottomHeight = bottomHeight
filter.numberOfFolds = numberOfFolds
filter.foldShadowAmount = foldShadowAmount
filter.time = time
return filter.outputImage ?? CIImage.empty()
}
/// Addition
///
/// Adds color components to achieve a brightening effect. This filter is typically used to add highlights and lens flare effects.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228264-additioncompositing)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIAdditionCompositing)
///
/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - backgroundImage: The image to use as a background image.
/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil
@available(iOS 13, macOS 10.15, *)
func additionCompositing(backgroundImage: CIImage?) -> CIImage {
guard let backgroundImage else { return self }
let filter = CIFilter.additionCompositing() // CIAdditionCompositing
filter.inputImage = self
filter.backgroundImage = backgroundImage
return filter.outputImage ?? CIImage.empty()
}
/// Affine Clamp
///
/// Performs an affine transformation on a source image and then clamps the pixels at the edge of the transformed image, extending them outwards. This filter performs similarly to the “Affine Transform” filter except that it produces an image with infinite extent. You can use this filter when you need to blur an image but you want to avoid a soft, black fringe along the edges.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228265-affineclamp)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIAffineClamp)
///
/// Categories: Tile Effect, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - transform: The transform to apply to the image.
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func affineClamp(transform: CGAffineTransform = CGAffineTransform.identity) -> CIImage {
guard transform != CGAffineTransform.identity else { return self }
let filter = CIFilter.affineClamp() // CIAffineClamp
filter.inputImage = self
filter.transform = transform
return filter.outputImage ?? CIImage.empty()
}
/// Affine Tile
///
/// Applies an affine transformation to an image and then tiles the transformed image.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228266-affinetile)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIAffineTile)
///
/// Categories: Tile Effect, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - transform: The transform to apply to the image.
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func affineTile(transform: CGAffineTransform = CGAffineTransform.identity) -> CIImage {
guard transform != CGAffineTransform.identity else { return self }
let filter = CIFilter.affineTile() // CIAffineTile
filter.inputImage = self
filter.transform = transform
return filter.outputImage ?? CIImage.empty()
}
// ℹ️ CIAffineTransform already has a CIImage method: func transformed(by: CGAffineTransform) -> CIImage
/// Area Alpha Weighted Histogram
///
/// Calculates alpha-weighted histograms of the unpremultiplied R, G, B channels for the specified area of an image. The output image is a one pixel tall image containing the histogram data for the RGB channels.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401846-areaalphaweightedhistogram)
///
/// Categories: Reduction, Video, Still Image, Built-In
///
///
/// - Parameters:
/// - extent: A rectangle that defines the extent of the effect.
/// - scale: The scale value to use for the histogram values. If the scale is 1.0 and the image is opaque, then the bins in the resulting image will add up to 1.0. (0...)
/// - count: The number of bins for the histogram. This value will determine the width of the output image. (1...2048)
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 18, macOS 15.0, *)
func areaAlphaWeightedHistogram(extent: CGRect, scale: Float = 1, count: Int = 64, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.areaAlphaWeightedHistogram() // CIAreaAlphaWeightedHistogram
filter.inputImage = self
filter.extent = extent
filter.scale = scale
filter.count = count
return filter.outputImage ?? CIImage.empty()
}
/// Area Average
///
/// Calculates the average color for the specified area in an image, returning the result in a pixel.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3547111-areaaverage)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIAreaAverage)
///
/// Categories: Reduction, Video, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - extent: A rectangle that specifies the subregion of the image that you want to process.
/// - active: should this filter be applied
/// - Returns: a single-pixel image that contains the average color for the region of interest, or identity if `active` is false
@available(iOS 14, macOS 11.0, *)
func areaAverage(extent: CGRect, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.areaAverage() // CIAreaAverage
filter.inputImage = self
filter.extent = extent
return filter.outputImage ?? CIImage.empty()
}
/// Area Bounds Red
///
/// Calculates the approximate bounding box of pixels within the specified area of an image where the red component values are non-zero. The result is 1x1 pixel image where the RGBA values contain the normalized X,Y,W,H dimensions of the bounding box.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401847-areaboundsred)
///
/// Categories: Reduction, Video, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - extent: A rectangle that specifies the subregion of the image that you want to process.
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 18, macOS 15.0, *)
func areaBoundsRed(extent: CGRect, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.areaBoundsRed() // CIAreaBoundsRed
filter.inputImage = self
filter.extent = extent
return filter.outputImage ?? CIImage.empty()
}
/// Area Histogram
///
/// Calculates histograms of the R, G, B, and A channels of the specified area of an image. The output image is a one pixel tall image containing the histogram data for all four channels.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3547112-areahistogram)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIAreaHistogram)
///
/// Categories: Reduction, Video, Still Image, Built-In
///
///
/// - Parameters:
/// - extent: A rectangle that, after intersection with the image extent, specifies the subregion of the image that you want to process.
/// - scale: The scale value to use for the histogram values. If the scale is 1.0, then the bins in the resulting image will add up to 1.0. (0...)
/// - count: The number of bins for the histogram. This value will determine the width of the output image. (1...2048)
/// - active: should this filter be applied
/// - Returns: a 1D image (inputCount wide by one pixel high) that contains the component-wise histogram computed for the specified rectangular area, or identity if `active` is false
@available(iOS 14, macOS 11.0, *)
func areaHistogram(extent: CGRect, scale: Float = 1, count: Int = 64, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.areaHistogram() // CIAreaHistogram
filter.inputImage = self
filter.extent = extent
filter.scale = scale
filter.count = count
return filter.outputImage ?? CIImage.empty()
}
/// Area Logarithmic Histogram
///
/// Calculates histogram of the R, G, B, and A channels of the specified area of an image. Before binning, the R, G, and B channel values are transformed by the log base two function. The output image is a one pixel tall image containing the histogram data for all four channels.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401848-arealogarithmichistogram)
///
/// Categories: Reduction, Video, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - extent: A rectangle that defines the extent of the effect.
/// - scale: The amount of the effect. (0...)
/// - count: The number of bins for the histogram. This value will determine the width of the output image. (1...2048)
/// - minimumStop: The minimum of the range of color channel values to be in the logarithmic histogram image.
/// - maximumStop: The maximum of the range of color channel values to be in the logarithmic histogram image.
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 16, macOS 13.0, *)
func areaLogarithmicHistogram(extent: CGRect,
scale: Float = 1,
count: Int = 64,
minimumStop: Float = -10,
maximumStop: Float = 4,
active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.areaLogarithmicHistogram() // CIAreaLogarithmicHistogram
filter.inputImage = self
filter.extent = extent
filter.scale = scale
filter.count = count
filter.minimumStop = minimumStop
filter.maximumStop = maximumStop
return filter.outputImage ?? CIImage.empty()
}
/// Area Maximum
///
/// Calculates the maximum component values for the specified area in an image, returning the result in a pixel.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3547114-areamaximum)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIAreaMaximum)
///
/// Categories: Reduction, Video, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - extent: A rectangle that specifies the subregion of the image that you want to process.
/// - active: should this filter be applied
/// - Returns: a single-pixel image that contains the maximum color components for the region of interest, or identity if `active` is false
@available(iOS 14, macOS 11.0, *)
func areaMaximum(extent: CGRect, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.areaMaximum() // CIAreaMaximum
filter.inputImage = self
filter.extent = extent
return filter.outputImage ?? CIImage.empty()
}
/// Area Maximum Alpha
///
/// Finds and returns the pixel with the maximum alpha value.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3547113-areamaximumalpha)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIAreaMaximumAlpha)
///
/// Categories: Reduction, Video, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - extent: A rectangle that specifies the subregion of the image that you want to process.
/// - active: should this filter be applied
/// - Returns: a single-pixel image that contains the color vector with the maximum alpha value for the region of interest, or identity if `active` is false
@available(iOS 14, macOS 11.0, *)
func areaMaximumAlpha(extent: CGRect, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.areaMaximumAlpha() // CIAreaMaximumAlpha
filter.inputImage = self
filter.extent = extent
return filter.outputImage ?? CIImage.empty()
}
/// Area Min and Max
///
/// Calculates the per-component minimum and maximum value for the specified area in an image. The result is returned in a 2x1 image where the component minimum values are stored in the pixel on the left.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3547115-areaminmax)
///
/// Categories: Reduction, Video, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - extent: A rectangle that specifies the subregion of the image that you want to process.
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 14, macOS 11.0, *)
func areaMinMax(extent: CGRect, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.areaMinMax() // CIAreaMinMax
filter.inputImage = self
filter.extent = extent
return filter.outputImage ?? CIImage.empty()
}
/// Area Min and Max Red
///
/// Calculates the minimum and maximum red component value for the specified area in an image. The result is returned in the red and green channels of a one pixel image.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3547116-areaminmaxred)
///
/// Categories: Reduction, Video, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - extent: A rectangle that specifies the subregion of the image that you want to process.
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 14, macOS 11.0, *)
func areaMinMaxRed(extent: CGRect, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.areaMinMaxRed() // CIAreaMinMaxRed
filter.inputImage = self
filter.extent = extent
return filter.outputImage ?? CIImage.empty()
}
/// Area Minimum
///
/// Calculates the minimum component values for the specified area in an image, returning the result in a pixel.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3547118-areaminimum)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIAreaMinimum)
///
/// Categories: Reduction, Video, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - extent: A rectangle that specifies the subregion of the image that you want to process.
/// - active: should this filter be applied
/// - Returns: a single-pixel image that contains the minimum color components for the region of interest, or identity if `active` is false
@available(iOS 14, macOS 11.0, *)
func areaMinimum(extent: CGRect, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.areaMinimum() // CIAreaMinimum
filter.inputImage = self
filter.extent = extent
return filter.outputImage ?? CIImage.empty()
}
/// Area Minimum Alpha
///
/// Finds and returns the pixel with the minimum alpha value.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3547117-areaminimumalpha)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIAreaMinimumAlpha)
///
/// Categories: Reduction, Video, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - extent: A rectangle that specifies the subregion of the image that you want to process.
/// - active: should this filter be applied
/// - Returns: a single-pixel image that contains the color vector with the minimum alpha value for the region of interest, or identity if `active` is false
@available(iOS 14, macOS 11.0, *)
func areaMinimumAlpha(extent: CGRect, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.areaMinimumAlpha() // CIAreaMinimumAlpha
filter.inputImage = self
filter.extent = extent
return filter.outputImage ?? CIImage.empty()
}
/// Bars Swipe Transition
///
/// Transitions from one image to another by swiping rectangular portions of the foreground image to disclose the target image.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228270-barsswipetransition)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIBarsSwipeTransition)
///
/// Categories: Transition, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - targetImage: The target image for a transition.
/// - angle: The angle in radians of the bars.
/// - width: The width of each bar. (2...)
/// - barOffset: The offset of one bar with respect to another. (1...)
/// - time: The parametric time of the transition. This value drives the transition from start (at time 0) to end (at time 1). (0...1)
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func barsSwipeTransition(targetImage: CIImage,
angle: Float = .pi,
width: Float,
barOffset: Float = 10,
time: Float) -> CIImage {
guard angle != 0 else { return self }
let filter = CIFilter.barsSwipeTransition() // CIBarsSwipeTransition
filter.inputImage = self
filter.targetImage = targetImage
filter.angle = angle
filter.width = width
filter.barOffset = barOffset
filter.time = time
return filter.outputImage ?? CIImage.empty()
}
/// Bicubic Scale Transform
///
/// Produces a high-quality, scaled version of a source image. The parameters of B and C for this filter determine the sharpness or softness of the resampling. The most commonly used B and C values are 0.0 and 0.75, respectively.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228271-bicubicscaletransform)
///
/// Categories: Geometry Adjustment, Video, Still Image, Non-Square Pixels, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - scale: The scaling factor to use on the image. Values less than 1.0 scale down the images. Values greater than 1.0 scale up the image. (0...)
/// - aspectRatio: The additional horizontal scaling factor to use on the image. (0...)
/// - parameterB: Specifies the value of B to use for the cubic resampling function. (0...1)
/// - parameterC: Specifies the value of C to use for the cubic resampling function. (0...1)
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func bicubicScaleTransform(scale: Float = 1,
aspectRatio: Float = 1,
parameterB: Float = 0,
parameterC: Float = 0.75) -> CIImage {
guard scale != 1 || aspectRatio != 1 else { return self }
let filter = CIFilter.bicubicScaleTransform() // CIBicubicScaleTransform
filter.inputImage = self
filter.scale = scale
filter.aspectRatio = aspectRatio
filter.parameterB = parameterB
filter.parameterC = parameterC
return filter.outputImage ?? CIImage.empty()
}
/// Blend With Alpha Mask
///
/// Uses values from a mask image to interpolate between an image and the background. When a mask alpha value is 0.0, the result is the background. When the mask alpha value is 1.0, the result is the image.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228272-blendwithalphamask)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIBlendWithAlphaMask)
///
/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - backgroundImage: The image to use as a background image.
/// - maskImage: A masking image.
/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil
@available(iOS 13, macOS 10.15, *)
func blendWithAlphaMask(backgroundImage: CIImage?, maskImage: CIImage) -> CIImage {
guard let backgroundImage else { return self }
let filter = CIFilter.blendWithAlphaMask() // CIBlendWithAlphaMask
filter.inputImage = self
filter.backgroundImage = backgroundImage
filter.maskImage = maskImage
return filter.outputImage ?? CIImage.empty()
}
/// Blend With Blue Mask
///
/// Uses values from a mask image to interpolate between an image and the background. When a mask blue value is 0.0, the result is the background. When the mask blue value is 1.0, the result is the image.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228273-blendwithbluemask)
///
/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - backgroundImage: The image to use as a background image.
/// - maskImage: A masking image.
/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil
@available(iOS 13, macOS 10.15, *)
func blendWithBlueMask(backgroundImage: CIImage?, maskImage: CIImage) -> CIImage {
guard let backgroundImage else { return self }
let filter = CIFilter.blendWithBlueMask() // CIBlendWithBlueMask
filter.inputImage = self
filter.backgroundImage = backgroundImage
filter.maskImage = maskImage
return filter.outputImage ?? CIImage.empty()
}
/// Blend With Mask
///
/// Uses values from a grayscale mask to interpolate between an image and the background. When a mask green value is 0.0, the result is the background. When the mask green value is 1.0, the result is the image.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228274-blendwithmask)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIBlendWithMask)
///
/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - backgroundImage: The image to use as a background image.
/// - maskImage: A grayscale mask. When a mask value is 0.0, the result is the background. When the mask value is 1.0, the result is the image.
/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil
@available(iOS 13, macOS 10.15, *)
func blendWithMask(backgroundImage: CIImage?, maskImage: CIImage) -> CIImage {
guard let backgroundImage else { return self }
let filter = CIFilter.blendWithMask() // CIBlendWithMask
filter.inputImage = self
filter.backgroundImage = backgroundImage
filter.maskImage = maskImage
return filter.outputImage ?? CIImage.empty()
}
/// Blend With Red Mask
///
/// Uses values from a mask image to interpolate between an image and the background. When a mask red value is 0.0, the result is the background. When the mask red value is 1.0, the result is the image.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228275-blendwithredmask)
///
/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - backgroundImage: The image to use as a background image.
/// - maskImage: A masking image.
/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil
@available(iOS 13, macOS 10.15, *)
func blendWithRedMask(backgroundImage: CIImage?, maskImage: CIImage) -> CIImage {
guard let backgroundImage else { return self }
let filter = CIFilter.blendWithRedMask() // CIBlendWithRedMask
filter.inputImage = self
filter.backgroundImage = backgroundImage
filter.maskImage = maskImage
return filter.outputImage ?? CIImage.empty()
}
/// Bloom
///
/// Softens edges and applies a pleasant glow to an image.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228276-bloom)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIBloom)
///
/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - radius: The radius determines how many pixels are used to create the effect. The larger the radius, the greater the effect. (0...)
/// - intensity: The intensity of the effect. A value of 0.0 is no effect. A value of 1.0 is the maximum effect. (0...)
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func bloom(radius: Float, intensity: Float = 0.5) -> CIImage {
guard radius != 0 || intensity != 0 else { return self }
let filter = CIFilter.bloom() // CIBloom
filter.inputImage = self
filter.radius = radius
filter.intensity = intensity
return filter.outputImage ?? CIImage.empty()
}
/// Bokeh Blur
///
/// Smooths an image using a disc-shaped convolution kernel.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228277-bokehblur)
///
/// Categories: Blur, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - radius: The radius determines how many pixels are used to create the blur. The larger the radius, the blurrier the result. (0...500)
/// - ringAmount: The amount of extra emphasis at the ring of the bokeh. (0...1)
/// - ringSize: The size of extra emphasis at the ring of the bokeh. (0...0.2)
/// - softness: The softness of the bokeh effect (0...10)
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func bokehBlur(radius: Float,
ringAmount: Float = 0,
ringSize: Float = 0.1,
softness: Float = 1,
active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.bokehBlur() // CIBokehBlur
filter.inputImage = self
filter.radius = radius
filter.ringAmount = ringAmount
filter.ringSize = ringSize
filter.softness = softness
return filter.outputImage ?? CIImage.empty()
}
/// Box Blur
///
/// Smooths or sharpens an image using a box-shaped convolution kernel.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228278-boxblur)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIBoxBlur)
///
/// Categories: Blur, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - radius: The radius determines how many pixels are used to create the blur. The larger the radius, the blurrier the result. (1...)
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func boxBlur(radius: Float, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.boxBlur() // CIBoxBlur
filter.inputImage = self
filter.radius = radius
return filter.outputImage ?? CIImage.empty()
}
/// Bump Distortion
///
/// Creates a concave or convex bump that originates at a specified point in the image.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401850-bumpdistortion)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIBumpDistortion)
///
/// Categories: Distortion Effect, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - center: The center of the effect as x and y pixel coordinates.
/// - radius: The radius determines how many pixels are used to create the distortion. The larger the radius, the wider the extent of the distortion. (0...)
/// - scale: The scale of the effect determines the curvature of the bump. A value of 0.0 has no effect. Positive values create an outward bump; negative values create an inward bump.
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 14, macOS 11.0, *)
func bumpDistortion(center: CGPoint, radius: Float, scale: Float = 0.5) -> CIImage {
guard scale != 0 else { return self }
let filter = CIFilter.bumpDistortion() // CIBumpDistortion
filter.inputImage = self
filter.center = center
filter.radius = radius
filter.scale = scale
return filter.outputImage ?? CIImage.empty()
}
/// Bump Distortion Linear
///
/// Creates a concave or convex distortion that originates from a line in the image.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401851-bumpdistortionlinear)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIBumpDistortionLinear)
///
/// Categories: Distortion Effect, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - center: The center of the effect as x and y pixel coordinates.
/// - radius: The radius determines how many pixels are used to create the distortion. The larger the radius, the wider the extent of the distortion. (0...)
/// - angle: The angle in radians of the line around which the distortion occurs.
/// - scale: The scale of the effect. (-1...)
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 14, macOS 11.0, *)
func bumpDistortionLinear(center: CGPoint, radius: Float, angle: Float = 0, scale: Float = 0.5) -> CIImage {
guard radius != 0 || angle != 0 || scale != 1 else { return self }
let filter = CIFilter.bumpDistortionLinear() // CIBumpDistortionLinear
filter.inputImage = self
filter.center = center
filter.radius = radius
filter.angle = angle
filter.scale = scale
return filter.outputImage ?? CIImage.empty()
}
/// CMYK Halftone
///
/// Creates a color, halftoned rendition of the source image, using cyan, magenta, yellow, and black inks over a white page.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228259-cmykhalftone)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CICMYKHalftone)
///
/// Categories: Halftone Effect, Video, Still Image, Built-In
///
///
/// - Parameters:
/// - center: The center of the effect as x and y pixel coordinates.
/// - width: The distance between dots in the pattern. (-2...)
/// - angle: The angle in radians of the pattern.
/// - sharpness: The sharpness of the pattern. The larger the value, the sharper the pattern. (0...)
/// - grayComponentReplacement: The gray component replacement value. The value can vary from 0.0 (none) to 1.0. (0...)
/// - underColorRemoval: The under color removal value. The value can vary from 0.0 to 1.0. (0...)
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func cmykHalftone(center: CGPoint,
width: Float,
angle: Float = 0,
sharpness: Float,
grayComponentReplacement: Float = 1,
underColorRemoval: Float = 0.5) -> CIImage {
guard angle != 0 || grayComponentReplacement != 1 || underColorRemoval != 0.5 else { return self }
let filter = CIFilter.cmykHalftone() // CICMYKHalftone
filter.inputImage = self
filter.center = center
filter.width = width
filter.angle = angle
filter.sharpness = sharpness
filter.grayComponentReplacement = grayComponentReplacement
filter.underColorRemoval = underColorRemoval
return filter.outputImage ?? CIImage.empty()
}
/// Lens Correction for AVC
///
/// Geometrically distorts an image by altering the magnification based on the radial distance from the optical center to the farthest radius.
///
/// ⚠️ No documentation available for CICameraCalibrationLensCorrection
///
/// Categories: Distortion Effect, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - avcameracalibrationdata: AVCameraCalibrationData for the correction. Will be set from the input image if available and can be overridden here.
/// - useInverseLookupTable: Boolean value used to select the Look Up Table from the AVCameraCalibrationData.
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func cameraCalibrationLensCorrection(avcameracalibrationdata: AVCameraCalibrationData,
useInverseLookupTable: Bool = false,
active: Bool = true) -> CIImage {
guard active else { return self }
// Filter not included in CoreImage.CIFilterBuiltins; using dictionary-based method.
guard let filter = CIFilter(name: "CICameraCalibrationLensCorrection", parameters: [
"inputAVCameraCalibrationData": avcameracalibrationdata,
"inputUseInverseLookUpTable": useInverseLookupTable,
]) else { return self }
return filter.outputImage ?? CIImage.empty()
}
/// Canny Edge Detector
///
/// Applies the Canny Edge Detection algorithm to an image.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401852-cannyedgedetector)
///
/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - gaussianSigma: The gaussian sigma of blur to apply to the image to reduce high-frequency noise. (0...)
/// - perceptual: Specifies whether the edge thresholds should be computed in a perceptual color space.
/// - thresholdHigh: The threshold that determines if gradient magnitude is a strong edge. (0...)
/// - thresholdLow: The threshold that determines if gradient magnitude is a weak edge. (0...)
/// - hysteresisPasses: The number of hysteresis passes to apply to promote weak edge pixels. (0...20)
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 17, macOS 14.0, *)
func cannyEdgeDetector(gaussianSigma: Float = 1.6,
perceptual: Bool = false,
thresholdHigh: Float = 0.05,
thresholdLow: Float = 0.02,
hysteresisPasses: Int,
active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.cannyEdgeDetector() // CICannyEdgeDetector
filter.inputImage = self
filter.gaussianSigma = gaussianSigma
filter.perceptual = perceptual
filter.thresholdHigh = thresholdHigh
filter.thresholdLow = thresholdLow
filter.hysteresisPasses = hysteresisPasses
return filter.outputImage ?? CIImage.empty()
}
/// Circle Splash Distortion
///
/// Distorts the pixels starting at the circumference of a circle and emanating outward.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401853-circlesplashdistortion)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CICircleSplashDistortion)
///
/// Categories: Distortion Effect, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - center: The center of the effect as x and y pixel coordinates.
/// - radius: The radius determines how many pixels are used to create the distortion. The larger the radius, the wider the extent of the distortion. (0...)
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 14, macOS 11.0, *)
func circleSplashDistortion(center: CGPoint, radius: Float, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.circleSplashDistortion() // CICircleSplashDistortion
filter.inputImage = self
filter.center = center
filter.radius = radius
return filter.outputImage ?? CIImage.empty()
}
/// Circular Screen
///
/// Simulates a circular-shaped halftone screen.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228280-circularscreen)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CICircularScreen)
///
/// Categories: Halftone Effect, Video, Still Image, Built-In
///
///
/// - Parameters:
/// - center: The center of the effect as x and y pixel coordinates.
/// - width: The distance between each circle in the pattern. (1...)
/// - sharpness: The sharpness of the circles. The larger the value, the sharper the circles. (0...1)
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func circularScreen(center: CGPoint, width: Float, sharpness: Float = 0.7, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.circularScreen() // CICircularScreen
filter.inputImage = self
filter.center = center
filter.width = width
filter.sharpness = sharpness
return filter.outputImage ?? CIImage.empty()
}
/// Circular Wrap Distortion
///
/// Wraps an image around a transparent circle. The distortion of the image increases with the distance from the center of the circle.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401854-circularwrap)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CICircularWrap)
///
/// Categories: Distortion Effect, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - center: The center of the effect as x and y pixel coordinates.
/// - radius: The radius determines how many pixels are used to create the distortion. The larger the radius, the wider the extent of the distortion. (0...)
/// - angle: The angle in radians of the effect.
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 14, macOS 11.0, *)
func circularWrap(center: CGPoint, radius: Float, angle: Float = 0) -> CIImage {
guard angle != 0 else { return self }
let filter = CIFilter.circularWrap() // CICircularWrap
filter.inputImage = self
filter.center = center
filter.radius = radius
filter.angle = angle
return filter.outputImage ?? CIImage.empty()
}
// ℹ️ CIClamp already has a CIImage method: func clamped(to: CGRect) -> CIImage
/// Color Absolute Difference
///
/// Produces an image that is the absolute value of the color difference between two images. The alpha channel of the result will be the product of the two image alpha channels.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3547119-colorabsolutedifference)
///
/// Categories: Color Adjustment, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - inputImage2: The second input image for differencing.
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 14, macOS 11.0, *)
func colorAbsoluteDifference(inputImage2: CIImage, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.colorAbsoluteDifference() // CIColorAbsoluteDifference
filter.inputImage = self
filter.inputImage2 = inputImage2
return filter.outputImage ?? CIImage.empty()
}
/// Color Blend Mode
///
/// Uses the luminance values of the background with the hue and saturation values of the source image. This mode preserves the gray levels in the image.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228282-colorblendmode)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIColorBlendMode)
///
/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, Built-In
///
///
/// - Parameters:
/// - backgroundImage: The image to use as a background image.
/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil
@available(iOS 13, macOS 10.15, *)
func colorBlendMode(backgroundImage: CIImage?) -> CIImage {
guard let backgroundImage else { return self }
let filter = CIFilter.colorBlendMode() // CIColorBlendMode
filter.inputImage = self
filter.backgroundImage = backgroundImage
return filter.outputImage ?? CIImage.empty()
}
/// Color Burn Blend Mode
///
/// Darkens the background image samples to reflect the source image samples. Source image sample values that specify white do not produce a change.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228283-colorburnblendmode)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIColorBurnBlendMode)
///
/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, Built-In
///
///
/// - Parameters:
/// - backgroundImage: The image to use as a background image.
/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil
@available(iOS 13, macOS 10.15, *)
func colorBurnBlendMode(backgroundImage: CIImage?) -> CIImage {
guard let backgroundImage else { return self }
let filter = CIFilter.colorBurnBlendMode() // CIColorBurnBlendMode
filter.inputImage = self
filter.backgroundImage = backgroundImage
return filter.outputImage ?? CIImage.empty()
}
/// Color Clamp
///
/// Modifies color values to keep them within a specified range.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228284-colorclamp)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIColorClamp)
///
/// Categories: Color Adjustment, Video, Interlaced, Non-Square Pixels, Still Image, Built-In
///
///
/// - Parameters:
/// - minComponents: Lower clamping values.
/// - maxComponents: Higher clamping values.
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func colorClamp(minComponents: CIVector, maxComponents: CIVector, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.colorClamp() // CIColorClamp
filter.inputImage = self
filter.minComponents = minComponents
filter.maxComponents = maxComponents
return filter.outputImage ?? CIImage.empty()
}
/// Color Controls
///
/// Adjusts saturation, brightness, and contrast values.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228285-colorcontrols)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIColorControls)
///
/// Categories: Color Adjustment, Video, Still Image, Interlaced, Non-Square Pixels, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - saturation: The amount of saturation to apply. The larger the value, the more saturated the result. (0...)
/// - brightness: The amount of brightness to apply. The larger the value, the brighter the result. (-1...)
/// - contrast: The amount of contrast to apply. The larger the value, the more contrast in the resulting image. (0...)
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func colorControls(saturation: Float = 1, brightness: Float = 0, contrast: Float = 1) -> CIImage {
guard saturation != 1 || brightness != 0 || contrast != 1 else { return self }
let filter = CIFilter.colorControls() // CIColorControls
filter.inputImage = self
filter.saturation = saturation
filter.brightness = brightness
filter.contrast = contrast
return filter.outputImage ?? CIImage.empty()
}
/// Color Cross Polynomial
///
/// Modifies the pixel values in an image by applying a set of polynomial cross-products.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228286-colorcrosspolynomial)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIColorCrossPolynomial)
///
/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - redCoefficients: Polynomial coefficients for red channel.
/// - greenCoefficients: Polynomial coefficients for green channel.
/// - blueCoefficients: Polynomial coefficients for blue channel.
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func colorCrossPolynomial(redCoefficients: CIVector,
greenCoefficients: CIVector,
blueCoefficients: CIVector,
active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.colorCrossPolynomial() // CIColorCrossPolynomial
filter.inputImage = self
filter.redCoefficients = redCoefficients
filter.greenCoefficients = greenCoefficients
filter.blueCoefficients = blueCoefficients
return filter.outputImage ?? CIImage.empty()
}
/// Color Cube
///
/// Uses a three-dimensional color table to transform the source image pixels.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228287-colorcube)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIColorCube)
///
/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - cubeDimension: The dimension of the color cube.(2...64 iOS; 2...128 macOS)
/// - cubeData: Data containing a 3-dimensional color table of floating-point premultiplied RGBA values. The cells are organized in a standard ordering. The columns and rows of the data are indexed by red and green, respectively. Each data plane is followed by the next higher plane in the data, with planes indexed by blue.
/// - extrapolate: If true, then the color cube will be extrapolated if the input image contains RGB component values outside the range 0.0 to 1.0.
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 16, macOS 13.0, *)
func colorCube(cubeDimension: Int, cubeData: Data, extrapolate: Bool = false, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.colorCube() // CIColorCube
filter.inputImage = self
filter.cubeDimension = Float(cubeDimension)
filter.cubeData = cubeData
filter.extrapolate = extrapolate
return filter.outputImage ?? CIImage.empty()
}
/// Color Cube with ColorSpace
///
/// Uses a three-dimensional color table to transform the source image pixels and maps the result to a specified color space.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228288-colorcubewithcolorspace)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIColorCubeWithColorSpace)
///
/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - cubeDimension: The dimension of the color cube.(2...64 iOS; 2...128 macOS)
/// - cubeData: Data containing a 3-dimensional color table of floating-point premultiplied RGBA values. The cells are organized in a standard ordering. The columns and rows of the data are indexed by red and green, respectively. Each data plane is followed by the next higher plane in the data, with planes indexed by blue.
/// - extrapolate: If true, then the color cube will be extrapolated if the input image contains RGB component values outside the range 0.0 to 1.0.
/// - colorSpace: The CGColorSpace that defines the RGB values in the color table.
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 16, macOS 13.0, *)
func colorCubeWithColorSpace(cubeDimension: Int,
cubeData: Data,
extrapolate: Bool,
colorSpace: CGColorSpace,
active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.colorCubeWithColorSpace() // CIColorCubeWithColorSpace
filter.inputImage = self
filter.cubeDimension = Float(cubeDimension)
filter.cubeData = cubeData
filter.extrapolate = extrapolate
filter.colorSpace = colorSpace
return filter.outputImage ?? CIImage.empty()
}
/// Color Cubes Mixed With Mask
///
/// Uses two three-dimensional color tables in a specified colorspace to transform the source image pixels. The mask image is used as an interpolant to mix the output of the two cubes.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228289-colorcubesmixedwithmask)
///
/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - maskImage: A masking image.
/// - cubeDimension: The dimension of the color cubes.(2...64 iOS; 2...128 macOS)
/// - cube0Data: Data containing a 3-dimensional color table of floating-point premultiplied RGBA values. The cells are organized in a standard ordering. The columns and rows of the data are indexed by red and green, respectively. Each data plane is followed by the next higher plane in the data, with planes indexed by blue.
/// - cube1Data: Data containing a 3-dimensional color table of floating-point premultiplied RGBA values. The cells are organized in a standard ordering. The columns and rows of the data are indexed by red and green, respectively. Each data plane is followed by the next higher plane in the data, with planes indexed by blue.
/// - colorSpace: The CGColorSpace that defines the RGB values in the color table.
/// - extrapolate: If true, then the color cube will be extrapolated if the input image contains RGB component values outside the range 0 to 1.
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 16, macOS 13.0, *)
func colorCubesMixedWithMask(maskImage: CIImage,
cubeDimension: Int,
cube0Data: Data,
cube1Data: Data,
colorSpace: CGColorSpace,
extrapolate: Bool = false,
active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.colorCubesMixedWithMask() // CIColorCubesMixedWithMask
filter.inputImage = self
filter.maskImage = maskImage
filter.cubeDimension = Float(cubeDimension)
filter.cube0Data = cube0Data
filter.cube1Data = cube1Data
filter.colorSpace = colorSpace
filter.extrapolate = extrapolate
return filter.outputImage ?? CIImage.empty()
}
/// Color Curves
///
/// Uses a three-channel one-dimensional color table to transform the source image pixels.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228290-colorcurves)
///
/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, Built-In
///
///
/// - Parameters:
/// - curvesData: Data containing a color table of floating-point RGB values.
/// - curvesDomain: A two-element vector that defines the minimum and maximum RGB component values that are used to look up result values from the color table.
/// - colorSpace: The CGColorSpace that defines the RGB values in the color table.
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func colorCurves(curvesData: Data, curvesDomain: CIVector, colorSpace: CGColorSpace, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.colorCurves() // CIColorCurves
filter.inputImage = self
filter.curvesData = curvesData
filter.curvesDomain = curvesDomain
filter.colorSpace = colorSpace
return filter.outputImage ?? CIImage.empty()
}
/// Color Dodge Blend Mode
///
/// Brightens the background image samples to reflect the source image samples. Source image sample values that specify black do not produce a change.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228291-colordodgeblendmode)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIColorDodgeBlendMode)
///
/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, Built-In
///
///
/// - Parameters:
/// - backgroundImage: The image to use as a background image.
/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil
@available(iOS 13, macOS 10.15, *)
func colorDodgeBlendMode(backgroundImage: CIImage?) -> CIImage {
guard let backgroundImage else { return self }
let filter = CIFilter.colorDodgeBlendMode() // CIColorDodgeBlendMode
filter.inputImage = self
filter.backgroundImage = backgroundImage
return filter.outputImage ?? CIImage.empty()
}
/// Color Invert
///
/// Inverts the colors in an image.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228292-colorinvert)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIColorInvert)
///
/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func colorInvert(active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.colorInvert() // CIColorInvert
filter.inputImage = self
return filter.outputImage ?? CIImage.empty()
}
/// Color Map
///
/// Performs a nonlinear transformation of source color values using mapping values provided in a table.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228293-colormap)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIColorMap)
///
/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, Built-In
///
///
/// - Parameters:
/// - gradientImage: The image data from this image transforms the source image values.
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func colorMap(gradientImage: CIImage, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.colorMap() // CIColorMap
filter.inputImage = self
filter.gradientImage = gradientImage
return filter.outputImage ?? CIImage.empty()
}
/// Color Matrix
///
/// Multiplies source color values and adds a bias factor to each color component.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228294-colormatrix)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIColorMatrix)
///
/// Categories: Color Adjustment, Video, Still Image, Interlaced, Non-Square Pixels, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - rVector: The amount of red to multiply the source color values by.
/// - gVector: The amount of green to multiply the source color values by.
/// - bVector: The amount of blue to multiply the source color values by.
/// - aVector: The amount of alpha to multiply the source color values by.
/// - biasVector: A vector that’s added to each color component.
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func colorMatrix(rVector: CIVector,
gVector: CIVector,
bVector: CIVector,
aVector: CIVector,
biasVector: CIVector,
active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.colorMatrix() // CIColorMatrix
filter.inputImage = self
filter.rVector = rVector
filter.gVector = gVector
filter.bVector = bVector
filter.aVector = aVector
filter.biasVector = biasVector
return filter.outputImage ?? CIImage.empty()
}
/// Color Monochrome
///
/// Remaps colors so they fall within shades of a single color.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228295-colormonochrome)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIColorMonochrome)
///
/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - color: The monochrome color to apply to the image.
/// - intensity: The intensity of the monochrome effect. A value of 1.0 creates a monochrome image using the supplied color. A value of 0.0 has no effect on the image. (0...)
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func colorMonochrome(color: CIColor, intensity: Float = 1) -> CIImage {
guard intensity != 0 else { return self }
let filter = CIFilter.colorMonochrome() // CIColorMonochrome
filter.inputImage = self
filter.color = color
filter.intensity = intensity
return filter.outputImage ?? CIImage.empty()
}
/// Color Polynomial
///
/// Modifies the pixel values in an image by applying a set of cubic polynomials.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228296-colorpolynomial)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIColorPolynomial)
///
/// Categories: Color Adjustment, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - redCoefficients: Polynomial coefficients for red channel.
/// - greenCoefficients: Polynomial coefficients for green channel.
/// - blueCoefficients: Polynomial coefficients for blue channel.
/// - alphaCoefficients: Polynomial coefficients for alpha channel.
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func colorPolynomial(redCoefficients: CIVector,
greenCoefficients: CIVector,
blueCoefficients: CIVector,
alphaCoefficients: CIVector,
active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.colorPolynomial() // CIColorPolynomial
filter.inputImage = self
filter.redCoefficients = redCoefficients
filter.greenCoefficients = greenCoefficients
filter.blueCoefficients = blueCoefficients
filter.alphaCoefficients = alphaCoefficients
return filter.outputImage ?? CIImage.empty()
}
/// Color Posterize
///
/// Remaps red, green, and blue color components to the number of brightness values you specify for each color component. This filter flattens colors to achieve a look similar to that of a silk-screened poster.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228297-colorposterize)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIColorPosterize)
///
/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - levels: The number of brightness levels to use for each color component. Lower values result in a more extreme poster effect. (1...)
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func colorPosterize(levels: Float = 6) -> CIImage {
guard levels != 300 else { return self }
let filter = CIFilter.colorPosterize() // CIColorPosterize
filter.inputImage = self
filter.levels = levels
return filter.outputImage ?? CIImage.empty()
}
/// Color Threshold
///
/// Produces a binarized image from an image and a threshold value. The red, green and blue channels of the resulting image will be one if its value is greater than the threshold and zero otherwise.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3547120-colorthreshold)
///
/// Categories: Color Adjustment, Video, Interlaced, Non-Square Pixels, Still Image, Built-In
///
///
/// - Parameters:
/// - threshold: The threshold value that governs if the RGB channels of the resulting image will be zero or one.
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 14, macOS 11.0, *)
func colorThreshold(threshold: Float = 0.5, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.colorThreshold() // CIColorThreshold
filter.inputImage = self
filter.threshold = threshold
return filter.outputImage ?? CIImage.empty()
}
/// Color Threshold Otsu
///
/// Produces a binarized image from an image with finite extent. The threshold is calculated from the image histogram using Otsu’s method. The red, green and blue channels of the resulting image will be one if its value is greater than the threshold and zero otherwise.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401855-colorthresholdotsu)
///
/// Categories: Color Adjustment, Video, Interlaced, Non-Square Pixels, Still Image, Built-In
///
///
/// - Parameters:
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 14, macOS 11.0, *)
func colorThresholdOtsu(active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.colorThresholdOtsu() // CIColorThresholdOtsu
filter.inputImage = self
return filter.outputImage ?? CIImage.empty()
}
/// Column Average
///
/// Calculates the average color for each column of the specified area in an image, returning the result in a 1D image.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3547121-columnaverage)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIColumnAverage)
///
/// Categories: Reduction, Video, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - extent: A rectangle that specifies the subregion of the image that you want to process.
/// - active: should this filter be applied
/// - Returns: a 1-pixel high image that contains the average color for each scan column, or identity if `active` is false
@available(iOS 14, macOS 11.0, *)
func columnAverage(extent: CGRect, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.columnAverage() // CIColumnAverage
filter.inputImage = self
filter.extent = extent
return filter.outputImage ?? CIImage.empty()
}
/// Comic Effect
///
/// Simulates a comic book drawing by outlining edges and applying a color halftone effect.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228298-comiceffect)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIComicEffect)
///
/// Categories: Stylize, Video, Still Image, Built-In
///
///
/// - Parameters:
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func comicEffect(active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.comicEffect() // CIComicEffect
filter.inputImage = self
return filter.outputImage ?? CIImage.empty()
}
/// Convert Lab to RGB
///
/// Converts an image from La*b* color space to the Core Image RGB working space.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401856-convertlabtorgb)
///
/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - normalize: If normalize is false then the L channel is in the range 0 to 100 and the a*b* channels are in the range -128 to 128. If normalize is true then the La*b* channels are in the range 0 to 1.
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 16, macOS 13.0, *)
func convertLabToRGB(normalize: Bool = false, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.convertLabToRGB() // CIConvertLabToRGB
filter.inputImage = self
filter.normalize = normalize
return filter.outputImage ?? CIImage.empty()
}
/// Convert RGB to Lab
///
/// Converts an image from the Core Image RGB working space to La*b* color space.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401857-convertrgbtolab)
///
/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - normalize: If normalize is false then the L channel is in the range 0 to 100 and the a*b* channels are in the range -128 to 128. If normalize is true then the La*b* channels are in the range 0 to 1.
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 16, macOS 13.0, *)
func convertRGBtoLab(normalize: Bool = false, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.convertRGBtoLab() // CIConvertRGBtoLab
filter.inputImage = self
filter.normalize = normalize
return filter.outputImage ?? CIImage.empty()
}
/// 3 by 3 Convolution
///
/// Modifies pixel values by performing a 3x3 matrix convolution.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228299-convolution3x3)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIConvolution3X3)
///
/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - weights: A vector containing the 9 weights of the convolution kernel.
/// - bias: A value that is added to the RGBA components of the output pixel.
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func convolution3X3(weights: CIVector, bias: Float = 0) -> CIImage {
guard bias != 0 else { return self }
let filter = CIFilter.convolution3X3() // CIConvolution3X3
filter.inputImage = self
filter.weights = weights
filter.bias = bias
return filter.outputImage ?? CIImage.empty()
}
/// 5 by 5 Convolution
///
/// Modifies pixel values by performing a 5x5 matrix convolution.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228300-convolution5x5)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIConvolution5X5)
///
/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - weights: A vector containing the 25 weights of the convolution kernel.
/// - bias: A value that is added to the RGBA components of the output pixel.
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func convolution5X5(weights: CIVector, bias: Float = 0) -> CIImage {
guard bias != 0 else { return self }
let filter = CIFilter.convolution5X5() // CIConvolution5X5
filter.inputImage = self
filter.weights = weights
filter.bias = bias
return filter.outputImage ?? CIImage.empty()
}
/// 7 by 7 Convolution
///
/// Modifies pixel values by performing a 7x7 matrix convolution.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228301-convolution7x7)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIConvolution7X7)
///
/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - weights: A vector containing the 49 weights of the convolution kernel.
/// - bias: A value that is added to the RGBA components of the output pixel.
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func convolution7X7(weights: CIVector, bias: Float = 0) -> CIImage {
guard bias != 0 else { return self }
let filter = CIFilter.convolution7X7() // CIConvolution7X7
filter.inputImage = self
filter.weights = weights
filter.bias = bias
return filter.outputImage ?? CIImage.empty()
}
/// Horizontal 9 Convolution
///
/// Modifies pixel values by performing a 9-element horizontal convolution.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228302-convolution9horizontal)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIConvolution9Horizontal)
///
/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - weights: A vector containing the 9 weights of the convolution kernel.
/// - bias: A value that is added to the RGBA components of the output pixel.
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func convolution9Horizontal(weights: CIVector, bias: Float = 0) -> CIImage {
guard bias != 0 else { return self }
let filter = CIFilter.convolution9Horizontal() // CIConvolution9Horizontal
filter.inputImage = self
filter.weights = weights
filter.bias = bias
return filter.outputImage ?? CIImage.empty()
}
/// Vertical 9 Convolution
///
/// Modifies pixel values by performing a 9-element vertical convolution.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228303-convolution9vertical)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIConvolution9Vertical)
///
/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - weights: A vector containing the 9 weights of the convolution kernel.
/// - bias: A value that is added to the RGBA components of the output pixel.
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func convolution9Vertical(weights: CIVector, bias: Float = 0) -> CIImage {
guard bias != 0 else { return self }
let filter = CIFilter.convolution9Vertical() // CIConvolution9Vertical
filter.inputImage = self
filter.weights = weights
filter.bias = bias
return filter.outputImage ?? CIImage.empty()
}
/// 3 by 3 RGB Convolution
///
/// Convolution of RGB channels with 3 by 3 matrix.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401858-convolutionrgb3x3)
///
/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - weights: A vector containing the 9 weights of the convolution kernel.
/// - bias: A value that is added to the RGB components of the output pixel.
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 15, macOS 12.0, *)
func convolutionRGB3X3(weights: CIVector, bias: Float = 0) -> CIImage {
guard bias != 0 else { return self }
let filter = CIFilter.convolutionRGB3X3() // CIConvolutionRGB3X3
filter.inputImage = self
filter.weights = weights
filter.bias = bias
return filter.outputImage ?? CIImage.empty()
}
/// 5 by 5 RGB Convolution
///
/// Convolution of RGB channels with 5 by 5 matrix.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401859-convolutionrgb5x5)
///
/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - weights: A vector containing the 25 weights of the convolution kernel.
/// - bias: A value that is added to the RGB components of the output pixel.
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 15, macOS 12.0, *)
func convolutionRGB5X5(weights: CIVector, bias: Float = 0) -> CIImage {
guard bias != 0 else { return self }
let filter = CIFilter.convolutionRGB5X5() // CIConvolutionRGB5X5
filter.inputImage = self
filter.weights = weights
filter.bias = bias
return filter.outputImage ?? CIImage.empty()
}
/// 7 by 7 RGB Convolution
///
/// Convolution of RGB channels with 7 by 7 matrix.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401860-convolutionrgb7x7)
///
/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - weights: A vector containing the 49 weights of the convolution kernel.
/// - bias: A value that is added to the RGB components of the output pixel.
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 15, macOS 12.0, *)
func convolutionRGB7X7(weights: CIVector, bias: Float = 0) -> CIImage {
guard bias != 0 else { return self }
let filter = CIFilter.convolutionRGB7X7() // CIConvolutionRGB7X7
filter.inputImage = self
filter.weights = weights
filter.bias = bias
return filter.outputImage ?? CIImage.empty()
}
/// Horizontal 9 RGB Convolution
///
/// Horizontal Convolution of RGB channels with 9 values.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401861-convolutionrgb9horizontal)
///
/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - weights: A vector containing the 9 weights of the convolution kernel.
/// - bias: A value that is added to the RGB components of the output pixel.
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 15, macOS 12.0, *)
func convolutionRGB9Horizontal(weights: CIVector, bias: Float = 0) -> CIImage {
guard bias != 0 else { return self }
let filter = CIFilter.convolutionRGB9Horizontal() // CIConvolutionRGB9Horizontal
filter.inputImage = self
filter.weights = weights
filter.bias = bias
return filter.outputImage ?? CIImage.empty()
}
/// Vertical 9 RGB Convolution
///
/// Vertical Convolution of RGB channels with 9 values.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401862-convolutionrgb9vertical)
///
/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - weights: A vector containing the 9 weights of the convolution kernel.
/// - bias: A value that is added to the RGB components of the output pixel.
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 15, macOS 12.0, *)
func convolutionRGB9Vertical(weights: CIVector, bias: Float = 0) -> CIImage {
guard bias != 0 else { return self }
let filter = CIFilter.convolutionRGB9Vertical() // CIConvolutionRGB9Vertical
filter.inputImage = self
filter.weights = weights
filter.bias = bias
return filter.outputImage ?? CIImage.empty()
}
/// Copy Machine
///
/// Transitions from one image to another by simulating the effect of a copy machine.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228304-copymachinetransition)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CICopyMachineTransition)
///
/// Categories: Transition, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - targetImage: The target image for a transition.
/// - extent: A rectangle that defines the extent of the effect.
/// - color: The color of the copier light.
/// - time: The parametric time of the transition. This value drives the transition from start (at time 0) to end (at time 1). (0...1)
/// - angle: The angle in radians of the copier light. (0...)
/// - width: The width of the copier light. (0.1...)
/// - opacity: The opacity of the copier light. A value of 0.0 is transparent. A value of 1.0 is opaque. (0...)
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func copyMachineTransition(targetImage: CIImage,
extent: CGRect,
color: CIColor,
time: Float,
angle: Float = 0,
width: Float,
opacity: Float = 1.3) -> CIImage {
guard angle != 0 || opacity != 1.3 else { return self }
let filter = CIFilter.copyMachineTransition() // CICopyMachineTransition
filter.inputImage = self
filter.targetImage = targetImage
filter.extent = extent
filter.color = color
filter.time = time
filter.angle = angle
filter.width = width
filter.opacity = opacity
return filter.outputImage ?? CIImage.empty()
}
/// CoreML Model Filter
///
/// Generates output image by applying input CoreML model to the input image.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228305-coremlmodel)
/// [WWDC Video](https://developer.apple.com/videos/play/wwdc2018-719/?time=2378)
///
/// Categories: Still Image, Built-In, Stylize
///
///
/// - Parameters:
/// - model: The CoreML model to be used for applying effect on the image.
/// - headIndex: A number to specify which output of a multi-head CoreML model should be used for applying effect on the image. (0...10)
/// - softmaxNormalization: A boolean value to specify that Softmax normalization should be applied to the output of the model.
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func coreMLModel(model: MLModel, headIndex: Int, softmaxNormalization: Bool = false, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.coreMLModel() // CICoreMLModelFilter
filter.inputImage = self
filter.model = model
filter.headIndex = Float(headIndex)
filter.softmaxNormalization = softmaxNormalization
return filter.outputImage ?? CIImage.empty()
}
// ℹ️ CICrop already has a CIImage method: func cropped(to: CGRect) -> CIImage
/// Crystallize
///
/// Creates polygon-shaped color blocks by aggregating source pixel-color values.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228306-crystallize)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CICrystallize)
///
/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - radius: The radius determines how many pixels are used to create the effect. The larger the radius, the larger the resulting crystals. (1...)
/// - center: The center of the effect as x and y pixel coordinates.
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func crystallize(radius: Float, center: CGPoint, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.crystallize() // CICrystallize
filter.inputImage = self
filter.radius = radius
filter.center = center
return filter.outputImage ?? CIImage.empty()
}
/// Darken Blend Mode
///
/// Creates composite image samples by choosing the darker samples (from either the source image or the background). The result is that the background image samples are replaced by any source image samples that are darker. Otherwise, the background image samples are left unchanged.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228307-darkenblendmode)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIDarkenBlendMode)
///
/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, Built-In
///
///
/// - Parameters:
/// - backgroundImage: The image to use as a background image.
/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil
@available(iOS 13, macOS 10.15, *)
func darkenBlendMode(backgroundImage: CIImage?) -> CIImage {
guard let backgroundImage else { return self }
let filter = CIFilter.darkenBlendMode() // CIDarkenBlendMode
filter.inputImage = self
filter.backgroundImage = backgroundImage
return filter.outputImage ?? CIImage.empty()
}
// ℹ️ CIDepthBlurEffect already has a CIImage method: func depthBlurEffectFilter(for...) -> CIImage
// https://developer.apple.com/documentation/coreimage/cicontext#4375374
/// Depth of Field
///
/// Simulates miniaturization effect created by Tilt & Shift lens by performing depth of field effects.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228308-depthoffield)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIDepthOfField)
///
/// Categories: Stylize, Video, Still Image, Built-In
///
///
/// - Parameters:
/// - point0: A set of coordinates marking the first point to be focused on
/// - point1: A set of coordinates marking the second point to be focused on
/// - saturation: The amount to adjust the saturation. (0...)
/// - unsharpMaskRadius: The radius of the unsharpened mask effect applied to the in-focus area of effect (0...)
/// - unsharpMaskIntensity: The intensity of the unsharp mask effect (0...)
/// - radius: The distance from the center of the effect. (0...)
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func depthOfField(point0: CGPoint,
point1: CGPoint,
saturation: Float = 1.5,
unsharpMaskRadius: Float = 2.5,
unsharpMaskIntensity: Float = 0.5,
radius: Float = 6,
active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.depthOfField() // CIDepthOfField
filter.inputImage = self
filter.point0 = point0
filter.point1 = point1
filter.saturation = saturation
filter.unsharpMaskRadius = unsharpMaskRadius
filter.unsharpMaskIntensity = unsharpMaskIntensity
filter.radius = radius
return filter.outputImage ?? CIImage.empty()
}
/// Depth To Disparity
///
/// Convert a depth data image to disparity data.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228309-depthtodisparity)
///
/// Categories: Color Adjustment, Video, Still Image, Built-In
///
///
/// - Parameters:
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func depthToDisparity(active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.depthToDisparity() // CIDepthToDisparity
filter.inputImage = self
return filter.outputImage ?? CIImage.empty()
}
/// Difference Blend Mode
///
/// Subtracts either the source image sample color from the background image sample color, or the reverse, depending on which sample has the greater brightness value. Source image sample values that are black produce no change; white inverts the background color values.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228310-differenceblendmode)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIDifferenceBlendMode)
///
/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, Built-In
///
///
/// - Parameters:
/// - backgroundImage: The image to use as a background image.
/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil
@available(iOS 13, macOS 10.15, *)
func differenceBlendMode(backgroundImage: CIImage?) -> CIImage {
guard let backgroundImage else { return self }
let filter = CIFilter.differenceBlendMode() // CIDifferenceBlendMode
filter.inputImage = self
filter.backgroundImage = backgroundImage
return filter.outputImage ?? CIImage.empty()
}
/// Disc Blur
///
/// Smooths an image using a disc-shaped convolution kernel.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228311-discblur)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIDiscBlur)
///
/// Categories: Blur, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - radius: The radius determines how many pixels are used to create the blur. The larger the radius, the blurrier the result. (0...)
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func discBlur(radius: Float, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.discBlur() // CIDiscBlur
filter.inputImage = self
filter.radius = radius
return filter.outputImage ?? CIImage.empty()
}
/// Disintegrate With Mask
///
/// Transitions from one image to another using the shape defined by a mask.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228312-disintegratewithmasktransition)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIDisintegrateWithMaskTransition)
///
/// Categories: Transition, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - targetImage: The target image for a transition.
/// - maskImage: An image that defines the shape to use when disintegrating from the source to the target image.
/// - time: The parametric time of the transition. This value drives the transition from start (at time 0) to end (at time 1). (0...1)
/// - shadowRadius: The radius of the shadow created by the mask. (0...)
/// - shadowDensity: The density of the shadow created by the mask. (0...1)
/// - shadowOffset: The offset of the shadow created by the mask.
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func disintegrateWithMaskTransition(targetImage: CIImage,
maskImage: CIImage,
time: Float,
shadowRadius: Float,
shadowDensity: Float = 0.65,
shadowOffset: CGPoint) -> CIImage {
guard shadowDensity != 0 else { return self }
let filter = CIFilter.disintegrateWithMaskTransition() // CIDisintegrateWithMaskTransition
filter.inputImage = self
filter.targetImage = targetImage
filter.maskImage = maskImage
filter.time = time
filter.shadowRadius = shadowRadius
filter.shadowDensity = shadowDensity
filter.shadowOffset = shadowOffset
return filter.outputImage ?? CIImage.empty()
}
/// Disparity To Depth
///
/// Convert a disparity data image to depth data.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228313-disparitytodepth)
///
/// Categories: Color Adjustment, Video, Still Image, Built-In
///
///
/// - Parameters:
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func disparityToDepth(active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.disparityToDepth() // CIDisparityToDepth
filter.inputImage = self
return filter.outputImage ?? CIImage.empty()
}
/// Displacement Distortion
///
/// Applies the grayscale values of the second image to the first image. The output image has a texture defined by the grayscale values.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401863-displacementdistortion)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIDisplacementDistortion)
///
/// Categories: Distortion Effect, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - displacementImage: An image whose grayscale values will be applied to the source image.
/// - scale: The amount of texturing of the resulting image. The larger the value, the greater the texturing. (0...)
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 14, macOS 11.0, *)
func displacementDistortion(displacementImage: CIImage, scale: Float) -> CIImage {
guard scale != 0 else { return self }
let filter = CIFilter.displacementDistortion() // CIDisplacementDistortion
filter.inputImage = self
filter.displacementImage = displacementImage
filter.scale = scale
return filter.outputImage ?? CIImage.empty()
}
/// Dissolve
///
/// Uses a dissolve to transition from one image to another.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228314-dissolvetransition)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIDissolveTransition)
///
/// Categories: Transition, Video, Still Image, Interlaced, Non-Square Pixels, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - targetImage: The target image for a transition.
/// - time: The parametric time of the transition. This value drives the transition from start (at time 0) to end (at time 1). (0...1)
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func dissolveTransition(targetImage: CIImage, time: Float, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.dissolveTransition() // CIDissolveTransition
filter.inputImage = self
filter.targetImage = targetImage
filter.time = time
return filter.outputImage ?? CIImage.empty()
}
/// Dither
///
/// Apply dithering to an image. This operation is usually performed in a perceptual color space.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228315-dither)
///
/// Categories: Color Effect, Video, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - intensity: The intensity of the effect. (0...5)
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func dither(intensity: Float = 0.1) -> CIImage {
guard intensity != 0 else { return self }
let filter = CIFilter.dither() // CIDither
filter.inputImage = self
filter.intensity = intensity
return filter.outputImage ?? CIImage.empty()
}
/// Divide Blend Mode
///
/// Divides the background image sample color from the source image sample color.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228316-divideblendmode)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIDivideBlendMode)
///
/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, Built-In
///
///
/// - Parameters:
/// - backgroundImage: The image to use as a background image.
/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil
@available(iOS 13, macOS 10.15, *)
func divideBlendMode(backgroundImage: CIImage?) -> CIImage {
guard let backgroundImage else { return self }
let filter = CIFilter.divideBlendMode() // CIDivideBlendMode
filter.inputImage = self
filter.backgroundImage = backgroundImage
return filter.outputImage ?? CIImage.empty()
}
/// Document Enhancer
///
/// Enhance a document image by removing unwanted shadows, whitening the background, and enhancing contrast.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228317-documentenhancer)
///
/// Categories: Color Effect, Non-Square Pixels, Still Image, Built-In
///
///
/// - Parameters:
/// - amount: The amount of enhancement. (0...10)
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func documentEnhancer(amount: Float = 1, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.documentEnhancer() // CIDocumentEnhancer
filter.inputImage = self
filter.amount = amount
return filter.outputImage ?? CIImage.empty()
}
/// Dot Screen
///
/// Simulates the dot patterns of a halftone screen.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228318-dotscreen)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIDotScreen)
///
/// Categories: Halftone Effect, Video, Still Image, Built-In
///
///
/// - Parameters:
/// - center: The center of the effect as x and y pixel coordinates.
/// - angle: The angle in radians of the pattern.
/// - width: The distance between dots in the pattern. (1...)
/// - sharpness: The sharpness of the pattern. The larger the value, the sharper the pattern. (0...1)
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func dotScreen(center: CGPoint, angle: Float = 0, width: Float, sharpness: Float = 0.7) -> CIImage {
guard angle != 0 else { return self }
let filter = CIFilter.dotScreen() // CIDotScreen
filter.inputImage = self
filter.center = center
filter.angle = angle
filter.width = width
filter.sharpness = sharpness
return filter.outputImage ?? CIImage.empty()
}
/// Droste
///
/// Recursively draws a portion of an image in imitation of an M. C. Escher drawing.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401864-droste)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIDroste)
///
/// Categories: Distortion Effect, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - insetPoint0: The x and y position that defines the first inset point
/// - insetPoint1: The x and y position that defines the second inset point
/// - strands: The amount of strands (-10...10)
/// - periodicity: The amount of intervals (1...)
/// - rotation: The angle of the rotation, in radians
/// - zoom: The zoom of the effect (0.01...)
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 14, macOS 11.0, *)
func droste(insetPoint0: CGPoint,
insetPoint1: CGPoint,
strands: Float = 1,
periodicity: Float = 1,
rotation: Float = 0,
zoom: Float = 1,
active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.droste() // CIDroste
filter.inputImage = self
filter.insetPoint0 = insetPoint0
filter.insetPoint1 = insetPoint1
filter.strands = strands
filter.periodicity = periodicity
filter.rotation = rotation
filter.zoom = zoom
return filter.outputImage ?? CIImage.empty()
}
/// Edge Preserve Upsample Filter
///
/// Upsamples a small image to the size of the input image using the luminance of the input image as a guide to preserve detail.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228319-edgepreserveupsample)
///
/// Categories: Geometry Adjustment, Video, Still Image, Interlaced, Non-Square Pixels, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - smallImage: An image representing the reference for scaling the input image with the type CIImage
/// - spatialSigma: The influence of the input image’s spatial information on the upsampling operation (0...5)
/// - lumaSigma: Influence of the input image’s luma information on the upsampling operation (0...1)
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func edgePreserveUpsample(smallImage: CIImage,
spatialSigma: Float = 3,
lumaSigma: Float = 0.15,
active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.edgePreserveUpsample() // CIEdgePreserveUpsampleFilter
filter.inputImage = self
filter.smallImage = smallImage
filter.spatialSigma = spatialSigma
filter.lumaSigma = lumaSigma
return filter.outputImage ?? CIImage.empty()
}
/// Edge Work
///
/// Produces a stylized black-and-white rendition of an image that looks similar to a woodblock cutout.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228320-edgework)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIEdgeWork)
///
/// Categories: Stylize, Video, Still Image, Built-In
///
///
/// - Parameters:
/// - radius: The thickness of the edges. The larger the value, the thicker the edges. (0...)
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func edgeWork(radius: Float, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.edgeWork() // CIEdgeWork
filter.inputImage = self
filter.radius = radius
return filter.outputImage ?? CIImage.empty()
}
/// Edges
///
/// Finds all edges in an image and displays them in color.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228321-edges)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIEdges)
///
/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - intensity: The intensity of the edges. The larger the value, the higher the intensity. (0...)
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func edges(intensity: Float = 1) -> CIImage {
guard intensity != 0 else { return self }
let filter = CIFilter.edges() // CIEdges
filter.inputImage = self
filter.intensity = intensity
return filter.outputImage ?? CIImage.empty()
}
/// Eightfold Reflected Tile
///
/// Produces a tiled image from a source image by applying an 8-way reflected symmetry.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228322-eightfoldreflectedtile)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIEightfoldReflectedTile)
///
/// Categories: Tile Effect, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - center: The center of the effect as x and y pixel coordinates.
/// - angle: The angle in radians of the tiled pattern.
/// - width: The width of a tile. (0...)
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func eightfoldReflectedTile(center: CGPoint, angle: Float = 0, width: Float) -> CIImage {
guard angle != 0 else { return self }
let filter = CIFilter.eightfoldReflectedTile() // CIEightfoldReflectedTile
filter.inputImage = self
filter.center = center
filter.angle = angle
filter.width = width
return filter.outputImage ?? CIImage.empty()
}
/// Exclusion Blend Mode
///
/// Produces an effect similar to that produced by the “Difference Blend Mode” filter but with lower contrast. Source image sample values that are black do not produce a change; white inverts the background color values.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228323-exclusionblendmode)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIExclusionBlendMode)
///
/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, Built-In
///
///
/// - Parameters:
/// - backgroundImage: The image to use as a background image.
/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil
@available(iOS 13, macOS 10.15, *)
func exclusionBlendMode(backgroundImage: CIImage?) -> CIImage {
guard let backgroundImage else { return self }
let filter = CIFilter.exclusionBlendMode() // CIExclusionBlendMode
filter.inputImage = self
filter.backgroundImage = backgroundImage
return filter.outputImage ?? CIImage.empty()
}
/// Exposure Adjust
///
/// Adjusts the exposure setting for an image similar to the way you control exposure for a camera when you change the F-stop.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228324-exposureadjust)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIExposureAdjust)
///
/// Categories: Color Adjustment, Video, Still Image, Interlaced, Non-Square Pixels, High Dynamic Range, Built-In, CICategoryXMPSerializable
///
///
/// - Parameters:
/// - ev: The amount to adjust the exposure of the image by. The larger the value, the brighter the exposure.
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func exposureAdjust(ev: Float = 0) -> CIImage {
guard ev != 0 else { return self }
let filter = CIFilter.exposureAdjust() // CIExposureAdjust
filter.inputImage = self
filter.ev = ev
return filter.outputImage ?? CIImage.empty()
}
/// False Color
///
/// Maps luminance to a color ramp of two colors. False color is often used to process astronomical and other scientific data, such as ultraviolet and X-ray images.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228325-falsecolor)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIFalseColor)
///
/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - color0: The first color to use for the color ramp.
/// - color1: The second color to use for the color ramp.
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func falseColor(color0: CIColor, color1: CIColor, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.falseColor() // CIFalseColor
filter.inputImage = self
filter.color0 = color0
filter.color1 = color1
return filter.outputImage ?? CIImage.empty()
}
/// Flash
///
/// Transitions from one image to another by creating a flash. The flash originates from a point you specify. Small at first, it rapidly expands until the image frame is completely filled with the flash color. As the color fades, the target image begins to appear.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228326-flashtransition)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIFlashTransition)
///
/// Categories: Transition, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - targetImage: The target image for a transition.
/// - center: The center of the effect as x and y pixel coordinates.
/// - extent: The extent of the flash.
/// - color: The color of the light rays emanating from the flash.
/// - time: The parametric time of the transition. This value drives the transition from start (at time 0) to end (at time 1). (0...1)
/// - maxStriationRadius: The radius of the light rays emanating from the flash. (0...)
/// - striationStrength: The strength of the light rays emanating from the flash. (0...)
/// - striationContrast: The contrast of the light rays emanating from the flash. (0...)
/// - fadeThreshold: The amount of fade between the flash and the target image. The higher the value, the more flash time and the less fade time. (0...1)
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func flashTransition(targetImage: CIImage,
center: CGPoint,
extent: CGRect,
color: CIColor,
time: Float,
maxStriationRadius: Float = 2.58,
striationStrength: Float = 0.5,
striationContrast: Float = 1.375,
fadeThreshold: Float = 0.85,
active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.flashTransition() // CIFlashTransition
filter.inputImage = self
filter.targetImage = targetImage
filter.center = center
filter.extent = extent
filter.color = color
filter.time = time
filter.maxStriationRadius = maxStriationRadius
filter.striationStrength = striationStrength
filter.striationContrast = striationContrast
filter.fadeThreshold = fadeThreshold
return filter.outputImage ?? CIImage.empty()
}
/// Fourfold Reflected Tile
///
/// Produces a tiled image from a source image by applying a 4-way reflected symmetry.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228327-fourfoldreflectedtile)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIFourfoldReflectedTile)
///
/// Categories: Tile Effect, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - center: The center of the effect as x and y pixel coordinates.
/// - angle: The angle in radians of the tiled pattern.
/// - width: The width of a tile. (0...)
/// - acuteAngle: The primary angle for the repeating reflected tile. Small values create thin diamond tiles, and higher values create fatter reflected tiles.
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func fourfoldReflectedTile(center: CGPoint, angle: Float = 0, width: Float, acuteAngle: Float = .pi/2) -> CIImage {
guard angle != 0 || acuteAngle != .pi/2 else { return self }
let filter = CIFilter.fourfoldReflectedTile() // CIFourfoldReflectedTile
filter.inputImage = self
filter.center = center
filter.angle = angle
filter.width = width
filter.acuteAngle = acuteAngle
return filter.outputImage ?? CIImage.empty()
}
/// Fourfold Rotated Tile
///
/// Produces a tiled image from a source image by rotating the source image at increments of 90 degrees.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228328-fourfoldrotatedtile)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIFourfoldRotatedTile)
///
/// Categories: Tile Effect, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - center: The center of the effect as x and y pixel coordinates.
/// - angle: The angle in radians of the tiled pattern.
/// - width: The width of a tile. (0...)
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func fourfoldRotatedTile(center: CGPoint, angle: Float = 0, width: Float) -> CIImage {
guard angle != 0 else { return self }
let filter = CIFilter.fourfoldRotatedTile() // CIFourfoldRotatedTile
filter.inputImage = self
filter.center = center
filter.angle = angle
filter.width = width
return filter.outputImage ?? CIImage.empty()
}
/// Fourfold Translated Tile
///
/// Produces a tiled image from a source image by applying 4 translation operations.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228329-fourfoldtranslatedtile)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIFourfoldTranslatedTile)
///
/// Categories: Tile Effect, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - center: The center of the effect as x and y pixel coordinates.
/// - angle: The angle in radians of the tiled pattern.
/// - width: The width of a tile. (0...)
/// - acuteAngle: The primary angle for the repeating translated tile. Small values create thin diamond tiles, and higher values create fatter translated tiles.
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func fourfoldTranslatedTile(center: CGPoint, angle: Float = 0, width: Float, acuteAngle: Float = .pi/2) -> CIImage {
guard angle != 0 || acuteAngle != .pi/2 else { return self }
let filter = CIFilter.fourfoldTranslatedTile() // CIFourfoldTranslatedTile
filter.inputImage = self
filter.center = center
filter.angle = angle
filter.width = width
filter.acuteAngle = acuteAngle
return filter.outputImage ?? CIImage.empty()
}
/// Gabor Gradients
///
/// Applies multichannel 5 by 5 Gabor gradient filter to an image. The resulting image has maximum horizontal gradient in the red channel and the maximum vertical gradient in the green channel. The gradient values can be positive or negative.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3325508-gaborgradients)
///
/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func gaborGradients(active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.gaborGradients() // CIGaborGradients
filter.inputImage = self
return filter.outputImage ?? CIImage.empty()
}
/// Gamma Adjust
///
/// Adjusts midtone brightness. This filter is typically used to compensate for nonlinear effects of displays. Adjusting the gamma effectively changes the slope of the transition between black and white.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228330-gammaadjust)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIGammaAdjust)
///
/// Categories: Color Adjustment, Video, Still Image, Interlaced, Non-Square Pixels, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - power: A gamma value to use to correct image brightness. The larger the value, the darker the result.
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func gammaAdjust(power: Float = 1) -> CIImage {
guard power != 1 else { return self }
let filter = CIFilter.gammaAdjust() // CIGammaAdjust
filter.inputImage = self
filter.power = power
return filter.outputImage ?? CIImage.empty()
}
/// Gaussian Blur
///
/// Spreads source pixels by an amount specified by a Gaussian distribution.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228331-gaussianblur)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIGaussianBlur)
///
/// Categories: Blur, Still Image, Video, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - radius: The radius determines how many pixels are used to create the blur. The larger the radius, the blurrier the result. (0...)
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func gaussianBlur(radius: Float = 10) -> CIImage {
guard radius != 0 else { return self }
let filter = CIFilter.gaussianBlur() // CIGaussianBlur
filter.inputImage = self
filter.radius = radius
return filter.outputImage ?? CIImage.empty()
}
/// Glass Distortion
///
/// Distorts an image by applying a glass-like texture. The raised portions of the output image are the result of applying a texture map.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401865-glassdistortion)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIGlassDistortion)
///
/// Categories: Distortion Effect, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - textureImage: A texture to apply to the source image.
/// - center: The center of the effect as x and y pixel coordinates.
/// - scale: The amount of texturing of the resulting image. The larger the value, the greater the texturing. (0...)
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 14, macOS 11.0, *)
func glassDistortion(textureImage: CIImage, center: CGPoint, scale: Float) -> CIImage {
guard scale != 0 else { return self }
let filter = CIFilter.glassDistortion() // CIGlassDistortion
filter.inputImage = self
filter.textureImage = textureImage
filter.center = center
filter.scale = scale
return filter.outputImage ?? CIImage.empty()
}
/// Glass Lozenge
///
/// Creates a lozenge-shaped lens and distorts the portion of the image over which the lens is placed.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401866-glasslozenge)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIGlassLozenge)
///
/// Categories: Distortion Effect, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - point0: The x and y position that defines the center of the circle at one end of the lozenge.
/// - point1: The x and y position that defines the center of the circle at the other end of the lozenge.
/// - radius: The radius of the lozenge. The larger the radius, the wider the extent of the distortion. (0...)
/// - refraction: The refraction of the glass. (0...)
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 14, macOS 11.0, *)
func glassLozenge(point0: CGPoint, point1: CGPoint, radius: Float, refraction: Float = 1.7) -> CIImage {
guard refraction != 1 else { return self }
let filter = CIFilter.glassLozenge() // CIGlassLozenge
filter.inputImage = self
filter.point0 = point0
filter.point1 = point1
filter.radius = radius
filter.refraction = refraction
return filter.outputImage ?? CIImage.empty()
}
/// Glide Reflected Tile
///
/// Produces a tiled image from a source image by translating and smearing the image.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228333-glidereflectedtile)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIGlideReflectedTile)
///
/// Categories: Tile Effect, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - center: The center of the effect as x and y pixel coordinates.
/// - angle: The angle in radians of the tiled pattern.
/// - width: The width of a tile. (0...)
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func glideReflectedTile(center: CGPoint, angle: Float = 0, width: Float) -> CIImage {
guard angle != 0 else { return self }
let filter = CIFilter.glideReflectedTile() // CIGlideReflectedTile
filter.inputImage = self
filter.center = center
filter.angle = angle
filter.width = width
return filter.outputImage ?? CIImage.empty()
}
/// Gloom
///
/// Dulls the highlights of an image.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228334-gloom)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIGloom)
///
/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - radius: The radius determines how many pixels are used to create the effect. The larger the radius, the greater the effect. (0...)
/// - intensity: The intensity of the effect. A value of 0.0 is no effect. A value of 1.0 is the maximum effect. (0...)
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func gloom(radius: Float, intensity: Float = 0.5) -> CIImage {
guard radius != 0 || intensity != 0 else { return self }
let filter = CIFilter.gloom() // CIGloom
filter.inputImage = self
filter.radius = radius
filter.intensity = intensity
return filter.outputImage ?? CIImage.empty()
}
/// Guided Filter
///
/// Upsamples a small image to the size of the guide image using the content of the guide to preserve detail.
///
/// ⚠️ No documentation available for CIGuidedFilter
///
/// Categories: Geometry Adjustment, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - aLargerImageToUseAsAGuide: A larger image to use as a guide.
/// - radius: The distance from the center of the effect.
/// - epsilon: Smoothness. A higher value means more smoothing.
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func guided(aLargerImageToUseAsAGuide: CIImage,
radius: Float = 1,
epsilon: Float = 0.0001,
active: Bool = true) -> CIImage {
guard active else { return self }
// Filter not included in CoreImage.CIFilterBuiltins; using dictionary-based method.
guard let filter = CIFilter(name: "CIGuidedFilter", parameters: [
"inputGuideImage": aLargerImageToUseAsAGuide,
"inputRadius": radius,
"inputEpsilon": epsilon,
]) else { return self }
return filter.outputImage ?? CIImage.empty()
}
/// Hard Light Blend Mode
///
/// Either multiplies or screens colors, depending on the source image sample color. If the source image sample color is lighter than 50% gray, the background is lightened, similar to screening. If the source image sample color is darker than 50% gray, the background is darkened, similar to multiplying. If the source image sample color is equal to 50% gray, the source image is not changed. Image samples that are equal to pure black or pure white result in pure black or white. The overall effect is similar to what you would achieve by shining a harsh spotlight on the source image.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228335-hardlightblendmode)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIHardLightBlendMode)
///
/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, Built-In
///
///
/// - Parameters:
/// - backgroundImage: The image to use as a background image.
/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil
@available(iOS 13, macOS 10.15, *)
func hardLightBlendMode(backgroundImage: CIImage?) -> CIImage {
guard let backgroundImage else { return self }
let filter = CIFilter.hardLightBlendMode() // CIHardLightBlendMode
filter.inputImage = self
filter.backgroundImage = backgroundImage
return filter.outputImage ?? CIImage.empty()
}
/// Hatched Screen
///
/// Simulates the hatched pattern of a halftone screen.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228336-hatchedscreen)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIHatchedScreen)
///
/// Categories: Halftone Effect, Video, Still Image, Built-In
///
///
/// - Parameters:
/// - center: The center of the effect as x and y pixel coordinates.
/// - angle: The angle in radians of the pattern.
/// - width: The distance between lines in the pattern. (1...)
/// - sharpness: The amount of sharpening to apply. (0...1)
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func hatchedScreen(center: CGPoint, angle: Float = 0, width: Float, sharpness: Float = 0.7) -> CIImage {
guard angle != 0 else { return self }
let filter = CIFilter.hatchedScreen() // CIHatchedScreen
filter.inputImage = self
filter.center = center
filter.angle = angle
filter.width = width
filter.sharpness = sharpness
return filter.outputImage ?? CIImage.empty()
}
/// Height Field From Mask
///
/// Produces a continuous three-dimensional, loft-shaped height field from a grayscale mask. The white values of the mask define those pixels that are inside the height field while the black values define those pixels that are outside. The field varies smoothly and continuously inside the mask, reaching the value 0 at the edge of the mask. You can use this filter with the Shaded Material filter to produce extremely realistic shaded objects.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228337-heightfieldfrommask)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIHeightFieldFromMask)
///
/// Categories: Stylize, Video, Still Image, Built-In
///
///
/// - Parameters:
/// - radius: The distance from the edge of the mask for the smooth transition is proportional to the input radius. Larger values make the transition smoother and more pronounced. Smaller values make the transition approximate a fillet radius. (0...)
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func heightFieldFromMask(radius: Float, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.heightFieldFromMask() // CIHeightFieldFromMask
filter.inputImage = self
filter.radius = radius
return filter.outputImage ?? CIImage.empty()
}
/// Hexagonal Pixelate
///
/// Displays an image as colored hexagons whose color is an average of the pixels they replace.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228338-hexagonalpixellate)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIHexagonalPixellate)
///
/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - center: The center of the effect as x and y pixel coordinates.
/// - scale: The scale determines the size of the hexagons. Larger values result in larger hexagons. (1...)
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func hexagonalPixellate(center: CGPoint, scale: Float, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.hexagonalPixellate() // CIHexagonalPixellate
filter.inputImage = self
filter.center = center
filter.scale = scale
return filter.outputImage ?? CIImage.empty()
}
/// Highlight and Shadow Adjust
///
/// Adjust the tonal mapping of an image while preserving spatial detail.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228339-highlightshadowadjust)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIHighlightShadowAdjust)
///
/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - radius: Shadow Highlight Radius. (0...)
/// - shadowAmount: The amount of adjustment to the shadows of the image. (-1...1)
/// - highlightAmount: The amount of adjustment to the highlights of the image. (0...1)
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func highlightShadowAdjust(radius: Float = 0, shadowAmount: Float = 0, highlightAmount: Float = 1) -> CIImage {
guard radius != 0 || shadowAmount != 0 || highlightAmount != 1 else { return self }
let filter = CIFilter.highlightShadowAdjust() // CIHighlightShadowAdjust
filter.inputImage = self
filter.radius = radius
filter.shadowAmount = shadowAmount
filter.highlightAmount = highlightAmount
return filter.outputImage ?? CIImage.empty()
}
/// Histogram Display
///
/// Generates a displayable histogram image from the output of the “Area Histogram” filter.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3547122-histogramdisplay)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIHistogramDisplayFilter)
///
/// Categories: Reduction, Video, Still Image, Built-In
///
///
/// - Parameters:
/// - height: The height of the displayable histogram image. (1...200)
/// - highLimit: The fraction of the right portion of the histogram image to make lighter. (0...1)
/// - lowLimit: The fraction of the left portion of the histogram image to make darker. (0...1)
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 14, macOS 11.0, *)
func histogramDisplay(height: Float = 100, highLimit: Float = 1, lowLimit: Float = 0, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.histogramDisplay() // CIHistogramDisplayFilter
filter.inputImage = self
filter.height = height
filter.highLimit = highLimit
filter.lowLimit = lowLimit
return filter.outputImage ?? CIImage.empty()
}
/// Hole Distortion
///
/// Creates a circular area that pushes the image pixels outward, distorting those pixels closest to the circle the most.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401867-holedistortion)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIHoleDistortion)
///
/// Categories: Distortion Effect, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - center: The center of the effect as x and y pixel coordinates.
/// - radius: The radius determines how many pixels are used to create the distortion. The larger the radius, the wider the extent of the distortion. (0.01...)
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 14, macOS 11.0, *)
func holeDistortion(center: CGPoint, radius: Float, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.holeDistortion() // CIHoleDistortion
filter.inputImage = self
filter.center = center
filter.radius = radius
return filter.outputImage ?? CIImage.empty()
}
/// Hue Adjust
///
/// Changes the overall hue, or tint, of the source pixels.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228340-hueadjust)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIHueAdjust)
///
/// Categories: Color Adjustment, Video, Still Image, Interlaced, Non-Square Pixels, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - angle: An angle in radians to use to correct the hue of an image.
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func hueAdjust(angle: Float = 0) -> CIImage {
guard angle != 0 else { return self }
let filter = CIFilter.hueAdjust() // CIHueAdjust
filter.inputImage = self
filter.angle = angle
return filter.outputImage ?? CIImage.empty()
}
/// Hue Blend Mode
///
/// Uses the luminance and saturation values of the background image with the hue of the input image.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228341-hueblendmode)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIHueBlendMode)
///
/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, Built-In
///
///
/// - Parameters:
/// - backgroundImage: The image to use as a background image.
/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil
@available(iOS 13, macOS 10.15, *)
func hueBlendMode(backgroundImage: CIImage?) -> CIImage {
guard let backgroundImage else { return self }
let filter = CIFilter.hueBlendMode() // CIHueBlendMode
filter.inputImage = self
filter.backgroundImage = backgroundImage
return filter.outputImage ?? CIImage.empty()
}
/// KMeans
///
/// Create a palette of the most common colors found in the image.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3547110-kmeans)
///
/// Categories: Reduction, Video, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - extent: A rectangle that defines the extent of the effect.
/// - inputMeans: Specifies the color seeds to use for k-means clustering, either passed as an image or an array of colors.
/// - count: Specifies how many k-means color clusters should be used. (0...128)
/// - passes: Specifies how many k-means passes should be performed. (0...20)
/// - perceptual: Specifies whether the k-means color palette should be computed in a perceptual color space.
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 14, macOS 11.0, *)
func kMeans(extent: CGRect,
inputMeans: CIImage,
count: Int,
passes: Int,
perceptual: Bool = false,
active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.kMeans() // CIKMeans
filter.inputImage = self
filter.extent = extent
filter.inputMeans = inputMeans
filter.count = count
filter.passes = Float(passes)
filter.perceptual = perceptual
return filter.outputImage ?? CIImage.empty()
}
/// Kaleidoscope
///
/// Produces a kaleidoscopic image from a source image by applying 12-way symmetry.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228343-kaleidoscope)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIKaleidoscope)
///
/// Categories: Tile Effect, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - count: The number of reflections in the pattern. (1...)
/// - center: The center of the effect as x and y pixel coordinates.
/// - angle: The angle in radians of reflection.
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func kaleidoscope(count: Int = 6, center: CGPoint, angle: Float = 0) -> CIImage {
guard angle != 0 else { return self }
let filter = CIFilter.kaleidoscope() // CIKaleidoscope
filter.inputImage = self
filter.count = count
filter.center = center
filter.angle = angle
return filter.outputImage ?? CIImage.empty()
}
/// Combined Keystone Correction
///
/// Apply keystone correction to an image with combined horizontal and vertical guides.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3325509-keystonecorrectioncombined)
///
/// Categories: Geometry Adjustment, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - focalLength: 35mm equivalent focal length of the input image.
/// - topLeft: The top left coordinate of the guide.
/// - topRight: The top right coordinate of the guide.
/// - bottomRight: The bottom right coordinate of the guide.
/// - bottomLeft: The bottom left coordinate of the guide.
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func keystoneCorrectionCombined(focalLength: Float = 28,
topLeft: CGPoint,
topRight: CGPoint,
bottomRight: CGPoint,
bottomLeft: CGPoint,
active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.keystoneCorrectionCombined() // CIKeystoneCorrectionCombined
filter.inputImage = self
filter.focalLength = focalLength
filter.topLeft = topLeft
filter.topRight = topRight
filter.bottomRight = bottomRight
filter.bottomLeft = bottomLeft
return filter.outputImage ?? CIImage.empty()
}
/// Horizontal Keystone Correction
///
/// Apply horizontal keystone correction to an image with guides.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3325510-keystonecorrectionhorizontal)
///
/// Categories: Geometry Adjustment, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - focalLength: 35mm equivalent focal length of the input image.
/// - topLeft: The top left coordinate of the guide.
/// - topRight: The top right coordinate of the guide.
/// - bottomRight: The bottom right coordinate of the guide.
/// - bottomLeft: The bottom left coordinate of the guide.
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func keystoneCorrectionHorizontal(focalLength: Float = 28,
topLeft: CGPoint,
topRight: CGPoint,
bottomRight: CGPoint,
bottomLeft: CGPoint,
active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.keystoneCorrectionHorizontal() // CIKeystoneCorrectionHorizontal
filter.inputImage = self
filter.focalLength = focalLength
filter.topLeft = topLeft
filter.topRight = topRight
filter.bottomRight = bottomRight
filter.bottomLeft = bottomLeft
return filter.outputImage ?? CIImage.empty()
}
/// Vertical Keystone Correction
///
/// Apply vertical keystone correction to an image with guides.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3325511-keystonecorrectionvertical)
///
/// Categories: Geometry Adjustment, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - focalLength: 35mm equivalent focal length of the input image.
/// - topLeft: The top left coordinate of the guide.
/// - topRight: The top right coordinate of the guide.
/// - bottomRight: The bottom right coordinate of the guide.
/// - bottomLeft: The bottom left coordinate of the guide.
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func keystoneCorrectionVertical(focalLength: Float = 28,
topLeft: CGPoint,
topRight: CGPoint,
bottomRight: CGPoint,
bottomLeft: CGPoint,
active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.keystoneCorrectionVertical() // CIKeystoneCorrectionVertical
filter.inputImage = self
filter.focalLength = focalLength
filter.topLeft = topLeft
filter.topRight = topRight
filter.bottomRight = bottomRight
filter.bottomLeft = bottomLeft
return filter.outputImage ?? CIImage.empty()
}
/// Lab ∆E
///
/// Produces an image with the Lab ∆E difference values between two images. The result image will contain ∆E 1994 values between 0.0 and 100.0 where 2.0 is considered a just noticeable difference.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228260-labdeltae)
///
/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, Built-In
///
///
/// - Parameters:
/// - image2: The second input image for comparison.
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func labDeltaE(image2: CIImage, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.labDeltaE() // CILabDeltaE
filter.inputImage = self
filter.image2 = image2
return filter.outputImage ?? CIImage.empty()
}
/// Lanczos Scale Transform
///
/// Produces a high-quality, scaled version of a source image. You typically use this filter to scale down an image.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228344-lanczosscaletransform)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CILanczosScaleTransform)
///
/// Categories: Geometry Adjustment, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - scale: The scaling factor to use on the image. Values less than 1.0 scale down the images. Values greater than 1.0 scale up the image. (0...)
/// - aspectRatio: The additional horizontal scaling factor to use on the image. (0...)
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func lanczosScaleTransform(scale: Float = 1, aspectRatio: Float = 1) -> CIImage {
guard scale != 1 || aspectRatio != 1 else { return self }
let filter = CIFilter.lanczosScaleTransform() // CILanczosScaleTransform
filter.inputImage = self
filter.scale = scale
filter.aspectRatio = aspectRatio
return filter.outputImage ?? CIImage.empty()
}
/// Light Tunnel Distortion
///
/// Rotates a portion of the input image specified by the center and radius parameters to give a tunneling effect.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401868-lighttunnel)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CILightTunnel)
///
/// Categories: Distortion Effect, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - center: The center of the effect as x and y pixel coordinates.
/// - rotation: Rotation angle in radians of the light tunnel.
/// - radius: Center radius of the light tunnel.
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 14, macOS 11.0, *)
func lightTunnel(center: CGPoint, rotation: Float = 0, radius: Float) -> CIImage {
guard rotation != 0 || radius != 0 else { return self }
let filter = CIFilter.lightTunnel() // CILightTunnel
filter.inputImage = self
filter.center = center
filter.rotation = rotation
filter.radius = radius
return filter.outputImage ?? CIImage.empty()
}
/// Lighten Blend Mode
///
/// Creates composite image samples by choosing the lighter samples (either from the source image or the background). The result is that the background image samples are replaced by any source image samples that are lighter. Otherwise, the background image samples are left unchanged.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228346-lightenblendmode)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CILightenBlendMode)
///
/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, Built-In
///
///
/// - Parameters:
/// - backgroundImage: The image to use as a background image.
/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil
@available(iOS 13, macOS 10.15, *)
func lightenBlendMode(backgroundImage: CIImage?) -> CIImage {
guard let backgroundImage else { return self }
let filter = CIFilter.lightenBlendMode() // CILightenBlendMode
filter.inputImage = self
filter.backgroundImage = backgroundImage
return filter.outputImage ?? CIImage.empty()
}
/// Line Overlay
///
/// Creates a sketch that outlines the edges of an image in black, leaving the non-outlined portions of the image transparent. The result has alpha and is rendered in black, so it won’t look like much until you render it over another image using source over compositing.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228347-lineoverlay)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CILineOverlay)
///
/// Categories: Built-In, Still Image, Video, Stylize
///
///
/// - Parameters:
/// - nrNoiseLevel: The noise level of the image (used with camera data) that gets removed before tracing the edges of the image. Increasing the noise level helps to clean up the traced edges of the image. (0...)
/// - nrSharpness: The amount of sharpening done when removing noise in the image before tracing the edges of the image. This improves the edge acquisition. (0...)
/// - edgeIntensity: The accentuation factor of the Sobel gradient information when tracing the edges of the image. Higher values find more edges, although typically a low value (such as 1.0) is used. (0...)
/// - threshold: This value determines edge visibility. Larger values thin out the edges. (0...)
/// - contrast: The amount of anti-aliasing to use on the edges produced by this filter. Higher values produce higher contrast edges (they are less anti-aliased). (0.25...)
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func lineOverlay(nrNoiseLevel: Float = 0.07,
nrSharpness: Float = 0.71,
edgeIntensity: Float = 1,
threshold: Float = 0.1,
contrast: Float = 50) -> CIImage {
guard nrNoiseLevel != 0 || nrSharpness != 0 || edgeIntensity != 0 || threshold != 0 || contrast != 1 else { return self }
let filter = CIFilter.lineOverlay() // CILineOverlay
filter.inputImage = self
filter.nrNoiseLevel = nrNoiseLevel
filter.nrSharpness = nrSharpness
filter.edgeIntensity = edgeIntensity
filter.threshold = threshold
filter.contrast = contrast
return filter.outputImage ?? CIImage.empty()
}
/// Line Screen
///
/// Simulates the line pattern of a halftone screen.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228348-linescreen)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CILineScreen)
///
/// Categories: Halftone Effect, Video, Still Image, Built-In
///
///
/// - Parameters:
/// - center: The center of the effect as x and y pixel coordinates.
/// - angle: The angle in radians of the pattern.
/// - width: The distance between lines in the pattern. (1...)
/// - sharpness: The sharpness of the pattern. The larger the value, the sharper the pattern. (0...1)
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func lineScreen(center: CGPoint, angle: Float = 0, width: Float, sharpness: Float = 0.7) -> CIImage {
guard angle != 0 else { return self }
let filter = CIFilter.lineScreen() // CILineScreen
filter.inputImage = self
filter.center = center
filter.angle = angle
filter.width = width
filter.sharpness = sharpness
return filter.outputImage ?? CIImage.empty()
}
/// Linear Burn Blend Mode
///
/// Inverts the unpremultiplied source and background image sample color, inverts the sum, and then blends the result with the background according to the PDF basic compositing formula. Source image values that are white produce no change. Source image values that are black invert the background color values.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228349-linearburnblendmode)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CILinearBurnBlendMode)
///
/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, Built-In
///
///
/// - Parameters:
/// - backgroundImage: The image to use as a background image.
/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil
@available(iOS 13, macOS 10.15, *)
func linearBurnBlendMode(backgroundImage: CIImage?) -> CIImage {
guard let backgroundImage else { return self }
let filter = CIFilter.linearBurnBlendMode() // CILinearBurnBlendMode
filter.inputImage = self
filter.backgroundImage = backgroundImage
return filter.outputImage ?? CIImage.empty()
}
/// Linear Dodge Blend Mode
///
/// Unpremultiplies the source and background image sample colors, adds them, and then blends the result with the background according to the PDF basic compositing formula. Source image values that are black produces output that is the same as the background. Source image values that are non-black brighten the background color values.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228350-lineardodgeblendmode)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CILinearDodgeBlendMode)
///
/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, Built-In
///
///
/// - Parameters:
/// - backgroundImage: The image to use as a background image.
/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil
@available(iOS 13, macOS 10.15, *)
func linearDodgeBlendMode(backgroundImage: CIImage?) -> CIImage {
guard let backgroundImage else { return self }
let filter = CIFilter.linearDodgeBlendMode() // CILinearDodgeBlendMode
filter.inputImage = self
filter.backgroundImage = backgroundImage
return filter.outputImage ?? CIImage.empty()
}
/// Linear Light Blend Mode
///
/// A blend mode that is a combination of linear burn and linear dodge blend modes.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401869-linearlightblendmode)
///
/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, Built-In
///
///
/// - Parameters:
/// - backgroundImage: The image to use as a background image.
/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil
@available(iOS 15, macOS 12.0, *)
func linearLightBlendMode(backgroundImage: CIImage?) -> CIImage {
guard let backgroundImage else { return self }
let filter = CIFilter.linearLightBlendMode() // CILinearLightBlendMode
filter.inputImage = self
filter.backgroundImage = backgroundImage
return filter.outputImage ?? CIImage.empty()
}
/// Linear to sRGB Tone Curve
///
/// Maps color intensity from a linear gamma curve to the sRGB color space.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228352-lineartosrgbtonecurve)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CILinearToSRGBToneCurve)
///
/// Categories: Color Adjustment, Video, Still Image, Interlaced, Non-Square Pixels, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func linearToSRGBToneCurve(active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.linearToSRGBToneCurve() // CILinearToSRGBToneCurve
filter.inputImage = self
return filter.outputImage ?? CIImage.empty()
}
/// Luminosity Blend Mode
///
/// Uses the hue and saturation of the background with the luminance of the source image. This mode creates an effect that is inverse to the effect created by the “Color Blend Mode” filter.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228353-luminosityblendmode)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CILuminosityBlendMode)
///
/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, Built-In
///
///
/// - Parameters:
/// - backgroundImage: The image to use as a background image.
/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil
@available(iOS 13, macOS 10.15, *)
func luminosityBlendMode(backgroundImage: CIImage?) -> CIImage {
guard let backgroundImage else { return self }
let filter = CIFilter.luminosityBlendMode() // CILuminosityBlendMode
filter.inputImage = self
filter.backgroundImage = backgroundImage
return filter.outputImage ?? CIImage.empty()
}
/// Mask to Alpha
///
/// Converts a grayscale image to a white image that is masked by alpha. The white values from the source image produce the inside of the mask; the black values become completely transparent.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228354-masktoalpha)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIMaskToAlpha)
///
/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, Built-In
///
///
/// - Parameters:
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func maskToAlpha(active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.maskToAlpha() // CIMaskToAlpha
filter.inputImage = self
return filter.outputImage ?? CIImage.empty()
}
/// Masked Variable Blur
///
/// Blurs the source image according to the brightness levels in a mask image.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228355-maskedvariableblur)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIMaskedVariableBlur)
///
/// Categories: Blur, Still Image, Video, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - mask: The mask image that determines how much to blur the image. The mask’s green channel value from 0.0 to 1.0 determines if the image is not blurred or blurred by the full radius.
/// - radius: A value that governs the maximum blur radius to apply. (0...)
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func maskedVariableBlur(mask: CIImage, radius: Float = 5) -> CIImage {
guard radius != 0 else { return self }
let filter = CIFilter.maskedVariableBlur() // CIMaskedVariableBlur
filter.inputImage = self
filter.mask = mask
filter.radius = radius
return filter.outputImage ?? CIImage.empty()
}
/// Maximum Component
///
/// Converts an image to grayscale using the maximum of the three color components.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228356-maximumcomponent)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIMaximumComponent)
///
/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - active: should this filter be applied
/// - Returns: a grayscale image from max(r,g,b), or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func maximumComponent(active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.maximumComponent() // CIMaximumComponent
filter.inputImage = self
return filter.outputImage ?? CIImage.empty()
}
/// Maximum
///
/// Computes the maximum value, by color component, of two input images and creates an output image using the maximum values. This is similar to dodging.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228357-maximumcompositing)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIMaximumCompositing)
///
/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - backgroundImage: The image to use as a background image.
/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil
@available(iOS 13, macOS 10.15, *)
func maximumCompositing(backgroundImage: CIImage?) -> CIImage {
guard let backgroundImage else { return self }
let filter = CIFilter.maximumCompositing() // CIMaximumCompositing
filter.inputImage = self
filter.backgroundImage = backgroundImage
return filter.outputImage ?? CIImage.empty()
}
/// Maximum Scale Transform
///
/// Produces a scaled version of a source image that uses the maximum of neighboring pixels instead of linear averaging.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401870-maximumscaletransform)
///
/// Categories: Geometry Adjustment, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - scale: The scaling factor to use on the image. Values less than 1.0 scale down the images. Values greater than 1.0 scale up the image. (0...)
/// - aspectRatio: The additional horizontal scaling factor to use on the image. (0...)
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 18, macOS 15.0, *)
func maximumScaleTransform(scale: Float = 1, aspectRatio: Float = 1) -> CIImage {
guard scale != 1 || aspectRatio != 1 else { return self }
let filter = CIFilter.maximumScaleTransform() // CIMaximumScaleTransform
filter.inputImage = self
filter.scale = scale
filter.aspectRatio = aspectRatio
return filter.outputImage ?? CIImage.empty()
}
/// Median
///
/// Computes the median value for a group of neighboring pixels and replaces each pixel value with the median. The effect is to reduce noise.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228358-median)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIMedianFilter)
///
/// Categories: Blur, Still Image, Video, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func median(active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.median() // CIMedianFilter
filter.inputImage = self
return filter.outputImage ?? CIImage.empty()
}
/// Minimum Component
///
/// Converts an image to grayscale using the minimum of the three color components.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228360-minimumcomponent)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIMinimumComponent)
///
/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - active: should this filter be applied
/// - Returns: a grayscale image from min(r,g,b), or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func minimumComponent(active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.minimumComponent() // CIMinimumComponent
filter.inputImage = self
return filter.outputImage ?? CIImage.empty()
}
/// Minimum
///
/// Computes the minimum value, by color component, of two input images and creates an output image using the minimum values. This is similar to burning.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228361-minimumcompositing)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIMinimumCompositing)
///
/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - backgroundImage: The image to use as a background image.
/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil
@available(iOS 13, macOS 10.15, *)
func minimumCompositing(backgroundImage: CIImage?) -> CIImage {
guard let backgroundImage else { return self }
let filter = CIFilter.minimumCompositing() // CIMinimumCompositing
filter.inputImage = self
filter.backgroundImage = backgroundImage
return filter.outputImage ?? CIImage.empty()
}
/// Mix
///
/// Uses an amount parameter to interpolate between an image and a background image. When value is 0.0 or less, the result is the background image. When the value is 1.0 or more, the result is the image.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228362-mix)
///
/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - backgroundImage: The image to use as a background image.
/// - amount: The amount of the effect.
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func mix(backgroundImage: CIImage?, amount: Float = 1) -> CIImage {
guard amount != 1 else { return self }
let filter = CIFilter.mix() // CIMix
filter.inputImage = self
filter.backgroundImage = backgroundImage
filter.amount = amount
return filter.outputImage ?? CIImage.empty()
}
/// Mod
///
/// Transitions from one image to another by revealing the target image through irregularly shaped holes.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228363-modtransition)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIModTransition)
///
/// Categories: Transition, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - targetImage: The target image for a transition.
/// - center: The center of the effect as x and y pixel coordinates.
/// - time: The parametric time of the transition. This value drives the transition from start (at time 0) to end (at time 1). (0...1)
/// - angle: The angle in radians of the mod hole pattern.
/// - radius: The radius of the undistorted holes in the pattern. (1...)
/// - compression: The amount of stretching applied to the mod hole pattern. Holes in the center are not distorted as much as those at the edge of the image. (1...)
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func modTransition(targetImage: CIImage,
center: CGPoint,
time: Float,
angle: Float = 2,
radius: Float,
compression: Float) -> CIImage {
guard angle != 0 else { return self }
let filter = CIFilter.modTransition() // CIModTransition
filter.inputImage = self
filter.targetImage = targetImage
filter.center = center
filter.time = time
filter.angle = angle
filter.radius = radius
filter.compression = compression
return filter.outputImage ?? CIImage.empty()
}
/// Morphology Gradient
///
/// Finds the edges of an image by returning the difference between the morphological minimum and maximum operations to the image.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228364-morphologygradient)
///
/// Categories: Blur, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - radius: The desired radius of the circular morphological operation to the image. (0...)
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func morphologyGradient(radius: Float) -> CIImage {
guard radius != 0 else { return self }
let filter = CIFilter.morphologyGradient() // CIMorphologyGradient
filter.inputImage = self
filter.radius = radius
return filter.outputImage ?? CIImage.empty()
}
/// Morphology Maximum
///
/// Lightens areas of an image by applying a circular morphological maximum operation to the image.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228365-morphologymaximum)
///
/// Categories: Blur, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - radius: The desired radius of the circular morphological operation to the image.
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func morphologyMaximum(radius: Float = 0) -> CIImage {
guard radius != 0 else { return self }
let filter = CIFilter.morphologyMaximum() // CIMorphologyMaximum
filter.inputImage = self
filter.radius = radius
return filter.outputImage ?? CIImage.empty()
}
/// Morphology Minimum
///
/// Darkens areas of an image by applying a circular morphological maximum operation to the image.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228366-morphologyminimum)
///
/// Categories: Blur, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - radius: The desired radius of the circular morphological operation to the image.
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func morphologyMinimum(radius: Float = 0) -> CIImage {
guard radius != 0 else { return self }
let filter = CIFilter.morphologyMinimum() // CIMorphologyMinimum
filter.inputImage = self
filter.radius = radius
return filter.outputImage ?? CIImage.empty()
}
/// Morphology Rectangle Maximum
///
/// Lightens areas of an image by applying a rectangular morphological maximum operation to the image.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228367-morphologyrectanglemaximum)
///
/// Categories: Blur, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - width: The width in pixels of the morphological operation. (1...)
/// - height: The height in pixels of the morphological operation. (1...)
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func morphologyRectangleMaximum(width: Int, height: Int, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.morphologyRectangleMaximum() // CIMorphologyRectangleMaximum
filter.inputImage = self
filter.width = Float(width)
filter.height = Float(height)
return filter.outputImage ?? CIImage.empty()
}
/// Morphology Rectangle Minimum
///
/// Darkens areas of an image by applying a rectangular morphological maximum operation to the image.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228368-morphologyrectangleminimum)
///
/// Categories: Blur, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - width: The width in pixels of the morphological operation. (1...)
/// - height: The height in pixels of the morphological operation. (1...)
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func morphologyRectangleMinimum(width: Int, height: Int, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.morphologyRectangleMinimum() // CIMorphologyRectangleMinimum
filter.inputImage = self
filter.width = Float(width)
filter.height = Float(height)
return filter.outputImage ?? CIImage.empty()
}
/// Motion Blur
///
/// Blurs an image to simulate the effect of using a camera that moves a specified angle and distance while capturing the image.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228369-motionblur)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIMotionBlur)
///
/// Categories: Blur, Still Image, Video, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - radius: The radius determines how many pixels are used to create the blur. The larger the radius, the blurrier the result. (0...)
/// - angle: The angle in radians of the motion determines which direction the blur smears.
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func motionBlur(radius: Float, angle: Float = 0) -> CIImage {
guard radius != 0 || angle != 0 else { return self }
let filter = CIFilter.motionBlur() // CIMotionBlur
filter.inputImage = self
filter.radius = radius
filter.angle = angle
return filter.outputImage ?? CIImage.empty()
}
/// Multiply Blend Mode
///
/// Multiplies the source image samples with the background image samples. This results in colors that are at least as dark as either of the two contributing sample colors.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228370-multiplyblendmode)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIMultiplyBlendMode)
///
/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, Built-In
///
///
/// - Parameters:
/// - backgroundImage: The image to use as a background image.
/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil
@available(iOS 13, macOS 10.15, *)
func multiplyBlendMode(backgroundImage: CIImage?) -> CIImage {
guard let backgroundImage else { return self }
let filter = CIFilter.multiplyBlendMode() // CIMultiplyBlendMode
filter.inputImage = self
filter.backgroundImage = backgroundImage
return filter.outputImage ?? CIImage.empty()
}
/// Multiply
///
/// Multiplies the color component of two input images and creates an output image using the multiplied values. This filter is typically used to add a spotlight or similar lighting effect to an image.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228371-multiplycompositing)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIMultiplyCompositing)
///
/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - backgroundImage: The image to use as a background image.
/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil
@available(iOS 13, macOS 10.15, *)
func multiplyCompositing(backgroundImage: CIImage?) -> CIImage {
guard let backgroundImage else { return self }
let filter = CIFilter.multiplyCompositing() // CIMultiplyCompositing
filter.inputImage = self
filter.backgroundImage = backgroundImage
return filter.outputImage ?? CIImage.empty()
}
/// Nine Part Stretched
///
/// Distorts an image by stretching an image based on two input breakpoints.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401871-ninepartstretched)
///
/// Categories: Distortion Effect, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - breakpoint0: Lower left corner of image to retain before stretching begins.
/// - breakpoint1: Upper right corner of image to retain after stretching ends.
/// - growAmount: Vector indicating how much image should grow in pixels in both dimensions.
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 14, macOS 11.0, *)
func ninePartStretched(breakpoint0: CGPoint,
breakpoint1: CGPoint,
growAmount: CGPoint = .init(x: 100, y: 100),
active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.ninePartStretched() // CINinePartStretched
filter.inputImage = self
filter.breakpoint0 = breakpoint0
filter.breakpoint1 = breakpoint1
filter.growAmount = growAmount
return filter.outputImage ?? CIImage.empty()
}
/// Nine Part Tiled
///
/// Distorts an image by tiling an image based on two input breakpoints.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401872-nineparttiled)
///
/// Categories: Distortion Effect, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - breakpoint0: Lower left corner of image to retain before tiling begins.
/// - breakpoint1: Upper right corner of image to retain after tiling ends.
/// - growAmount: Vector indicating how much image should grow in pixels in both dimensions.
/// - flipYTiles: Indicates that Y-Axis flip should occur.
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 14, macOS 11.0, *)
func ninePartTiled(breakpoint0: CGPoint,
breakpoint1: CGPoint,
growAmount: CGPoint = .init(x: 100, y: 100),
flipYTiles: Bool = true,
active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.ninePartTiled() // CINinePartTiled
filter.inputImage = self
filter.breakpoint0 = breakpoint0
filter.breakpoint1 = breakpoint1
filter.growAmount = growAmount
filter.flipYTiles = flipYTiles
return filter.outputImage ?? CIImage.empty()
}
/// Noise Reduction
///
/// Reduces noise using a threshold value to define what is considered noise. Small changes in luminance below that value are considered noise and get a noise reduction treatment, which is a local blur. Changes above the threshold value are considered edges, so they are sharpened.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228372-noisereduction)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CINoiseReduction)
///
/// Categories: Blur, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - noiseLevel: The amount of noise reduction. The larger the value, the more noise reduction. (0...)
/// - sharpness: The sharpness of the final image. The larger the value, the sharper the result. (0...)
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func noiseReduction(noiseLevel: Float = 0.02, sharpness: Float = 0.4) -> CIImage {
guard noiseLevel != 0 || sharpness != 0 else { return self }
let filter = CIFilter.noiseReduction() // CINoiseReduction
filter.inputImage = self
filter.noiseLevel = noiseLevel
filter.sharpness = sharpness
return filter.outputImage ?? CIImage.empty()
}
/// Op Tile
///
/// Segments an image, applying any specified scaling and rotation, and then assembles the image again to give an op art appearance.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228373-optile)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIOpTile)
///
/// Categories: Tile Effect, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - center: The center of the effect as x and y pixel coordinates.
/// - scale: The scale determines the number of tiles in the effect. (0...)
/// - angle: The angle in radians of a tile.
/// - width: The width of a tile. (0...)
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func opTile(center: CGPoint, scale: Float = 2.8, angle: Float = 0, width: Float) -> CIImage {
guard scale != 1 || angle != 0 else { return self }
let filter = CIFilter.opTile() // CIOpTile
filter.inputImage = self
filter.center = center
filter.scale = scale
filter.angle = angle
filter.width = width
return filter.outputImage ?? CIImage.empty()
}
/// Overlay Blend Mode
///
/// Either multiplies or screens the source image samples with the background image samples, depending on the background color. The result is to overlay the existing image samples while preserving the highlights and shadows of the background. The background color mixes with the source image to reflect the lightness or darkness of the background.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228374-overlayblendmode)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIOverlayBlendMode)
///
/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, Built-In
///
///
/// - Parameters:
/// - backgroundImage: The image to use as a background image.
/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil
@available(iOS 13, macOS 10.15, *)
func overlayBlendMode(backgroundImage: CIImage?) -> CIImage {
guard let backgroundImage else { return self }
let filter = CIFilter.overlayBlendMode() // CIOverlayBlendMode
filter.inputImage = self
filter.backgroundImage = backgroundImage
return filter.outputImage ?? CIImage.empty()
}
/// Page Curl
///
/// Transitions from one image to another by simulating a curling page, revealing the new image as the page curls.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228375-pagecurltransition)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIPageCurlTransition)
///
/// Categories: Transition, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - targetImage: The target image for a transition.
/// - backsideImage: The image that appears on the back of the source image, as the page curls to reveal the target image.
/// - shadingImage: An image that looks like a shaded sphere enclosed in a square image.
/// - extent: The extent of the effect.
/// - time: The parametric time of the transition. This value drives the transition from start (at time 0) to end (at time 1). (0...1)
/// - angle: The angle in radians of the curling page.
/// - radius: The radius of the curl. (0.01...)
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func pageCurlTransition(targetImage: CIImage,
backsideImage: CIImage,
shadingImage: CIImage,
extent: CGRect,
time: Float,
angle: Float = 0,
radius: Float) -> CIImage {
guard angle != 0 else { return self }
let filter = CIFilter.pageCurlTransition() // CIPageCurlTransition
filter.inputImage = self
filter.targetImage = targetImage
filter.backsideImage = backsideImage
filter.shadingImage = shadingImage
filter.extent = extent
filter.time = time
filter.angle = angle
filter.radius = radius
return filter.outputImage ?? CIImage.empty()
}
/// Page Curl With Shadow
///
/// Transitions from one image to another by simulating a curling page, revealing the new image as the page curls.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228376-pagecurlwithshadowtransition)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIPageCurlWithShadowTransition)
///
/// Categories: Transition, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - targetImage: The target image for a transition.
/// - backsideImage: The image that appears on the back of the source image, as the page curls to reveal the target image.
/// - extent: The extent of the effect.
/// - time: The parametric time of the transition. This value drives the transition from start (at time 0) to end (at time 1). (0...1)
/// - angle: The angle in radians of the curling page.
/// - radius: The radius of the curl. (0.01...)
/// - shadowSize: The maximum size in pixels of the shadow. (0...1)
/// - shadowAmount: The strength of the shadow. (0...1)
/// - shadowExtent: The rectagular portion of input image that will cast a shadow.
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func pageCurlWithShadowTransition(targetImage: CIImage,
backsideImage: CIImage,
extent: CGRect = .zero,
time: Float,
angle: Float = 0,
radius: Float,
shadowSize: Float,
shadowAmount: Float,
shadowExtent: CGRect = .zero) -> CIImage {
guard angle != 0 else { return self }
let filter = CIFilter.pageCurlWithShadowTransition() // CIPageCurlWithShadowTransition
filter.inputImage = self
filter.targetImage = targetImage
filter.backsideImage = backsideImage
filter.extent = extent
filter.time = time
filter.angle = angle
filter.radius = radius
filter.shadowSize = shadowSize
filter.shadowAmount = shadowAmount
filter.shadowExtent = shadowExtent
return filter.outputImage ?? CIImage.empty()
}
/// Palette Centroid
///
/// Calculate the mean (x,y) image coordinates of a color palette.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228377-palettecentroid)
///
/// Categories: Color Effect, Video, Still Image, Built-In
///
///
/// - Parameters:
/// - paletteImage: The input color palette, obtained using “CIKMeans“ filter.
/// - perceptual: Specifies whether the color palette should be applied in a perceptual color space.
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func paletteCentroid(paletteImage: CIImage, perceptual: Bool = false, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.paletteCentroid() // CIPaletteCentroid
filter.inputImage = self
filter.paletteImage = paletteImage
filter.perceptual = perceptual
return filter.outputImage ?? CIImage.empty()
}
/// Palettize
///
/// Paint an image from a color palette obtained using “CIKMeans“.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228378-palettize)
///
/// Categories: Color Effect, Video, Still Image, Built-In
///
///
/// - Parameters:
/// - paletteImage: The input color palette, obtained using “CIKMeans“ filter.
/// - perceptual: Specifies whether the color palette should be applied in a perceptual color space.
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func palettize(paletteImage: CIImage, perceptual: Bool = false, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.palettize() // CIPalettize
filter.inputImage = self
filter.paletteImage = paletteImage
filter.perceptual = perceptual
return filter.outputImage ?? CIImage.empty()
}
/// Parallelogram Tile
///
/// Warps an image by reflecting it in a parallelogram, and then tiles the result.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228379-parallelogramtile)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIParallelogramTile)
///
/// Categories: Tile Effect, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - center: The center of the effect as x and y pixel coordinates.
/// - angle: The angle in radians of the tiled pattern.
/// - acuteAngle: The primary angle for the repeating parallelogram tile. Small values create thin diamond tiles, and higher values create fatter parallelogram tiles.
/// - width: The width of a tile. (0...)
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func parallelogramTile(center: CGPoint, angle: Float = 0, acuteAngle: Float = .pi/2, width: Float) -> CIImage {
guard angle != 0 || acuteAngle != .pi/2 else { return self }
let filter = CIFilter.parallelogramTile() // CIParallelogramTile
filter.inputImage = self
filter.center = center
filter.angle = angle
filter.acuteAngle = acuteAngle
filter.width = width
return filter.outputImage ?? CIImage.empty()
}
/// Person Segmentation
///
/// Returns a segmentation mask that is red in the portions of an image that are likely to be persons. The returned image may have a different size and aspect ratio from the input image.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401873-personsegmentation)
///
/// Categories: Video, Still Image, Built-In, Stylize
///
///
/// - Parameters:
/// - qualityLevel: Determines the size and quality of the resulting segmentation mask. The value can be a number where 0 is accurate, 1 is balanced, and 2 is fast. (0...2)
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 15, macOS 12.0, *)
func personSegmentation(qualityLevel: Int, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.personSegmentation() // CIPersonSegmentation
filter.inputImage = self
filter.qualityLevel = qualityLevel
return filter.outputImage ?? CIImage.empty()
}
/// Perspective Correction
///
/// Applies a perspective correction, transforming an arbitrary quadrilateral region in the source image to a rectangular output image.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228380-perspectivecorrection)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIPerspectiveCorrection)
///
/// Categories: Geometry Adjustment, Still Image, Video, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - topLeft: The top left coordinate to be perspective corrected.
/// - topRight: The top right coordinate to be perspective corrected.
/// - bottomRight: The bottom right coordinate to be perspective corrected.
/// - bottomLeft: The bottom left coordinate to be perspective corrected.
/// - crop: A rectangle that specifies the extent of the corrected image
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func perspectiveCorrection(topLeft: CGPoint,
topRight: CGPoint,
bottomRight: CGPoint,
bottomLeft: CGPoint,
crop: Bool = true,
active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.perspectiveCorrection() // CIPerspectiveCorrection
filter.inputImage = self
filter.topLeft = topLeft
filter.topRight = topRight
filter.bottomRight = bottomRight
filter.bottomLeft = bottomLeft
filter.crop = crop
return filter.outputImage ?? CIImage.empty()
}
/// Perspective Rotate
///
/// Apply a homogenous rotation transform to an image.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3325512-perspectiverotate)
///
/// Categories: Geometry Adjustment, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - focalLength: 35mm equivalent focal length of the input image.
/// - pitch: Pitch angle in radians.
/// - yaw: Yaw angle in radians.
/// - roll: Roll angle in radians.
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func perspectiveRotate(focalLength: Float = 28, pitch: Float = 0, yaw: Float = 0, roll: Float = 0) -> CIImage {
guard pitch != 0 || yaw != 0 || roll != 0 else { return self }
let filter = CIFilter.perspectiveRotate() // CIPerspectiveRotate
filter.inputImage = self
filter.focalLength = focalLength
filter.pitch = pitch
filter.yaw = yaw
filter.roll = roll
return filter.outputImage ?? CIImage.empty()
}
/// Perspective Tile
///
/// Applies a perspective transform to an image and then tiles the result.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228381-perspectivetile)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIPerspectiveTile)
///
/// Categories: Tile Effect, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - topLeft: The top left coordinate of a tile.
/// - topRight: The top right coordinate of a tile.
/// - bottomRight: The bottom right coordinate of a tile.
/// - bottomLeft: The bottom left coordinate of a tile.
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func perspectiveTile(topLeft: CGPoint,
topRight: CGPoint,
bottomRight: CGPoint,
bottomLeft: CGPoint,
active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.perspectiveTile() // CIPerspectiveTile
filter.inputImage = self
filter.topLeft = topLeft
filter.topRight = topRight
filter.bottomRight = bottomRight
filter.bottomLeft = bottomLeft
return filter.outputImage ?? CIImage.empty()
}
/// Perspective Transform
///
/// Alters the geometry of an image to simulate the observer changing viewing position. You can use the perspective filter to skew an image.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228382-perspectivetransform)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIPerspectiveTransform)
///
/// Categories: Geometry Adjustment, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - topLeft: The top left coordinate to map the image to.
/// - topRight: The top right coordinate to map the image to.
/// - bottomRight: The bottom right coordinate to map the image to.
/// - bottomLeft: The bottom left coordinate to map the image to.
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func perspectiveTransform(topLeft: CGPoint,
topRight: CGPoint,
bottomRight: CGPoint,
bottomLeft: CGPoint,
active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.perspectiveTransform() // CIPerspectiveTransform
filter.inputImage = self
filter.topLeft = topLeft
filter.topRight = topRight
filter.bottomRight = bottomRight
filter.bottomLeft = bottomLeft
return filter.outputImage ?? CIImage.empty()
}
/// Perspective Transform with Extent
///
/// Alters the geometry of an image to simulate the observer changing viewing position. You can use the perspective filter to skew an image.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228383-perspectivetransformwithextent)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIPerspectiveTransformWithExtent)
///
/// Categories: Geometry Adjustment, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - extent: A rectangle that defines the extent of the effect.
/// - topLeft: The top left coordinate to map the image to.
/// - topRight: The top right coordinate to map the image to.
/// - bottomRight: The bottom right coordinate to map the image to.
/// - bottomLeft: The bottom left coordinate to map the image to.
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func perspectiveTransformWithExtent(extent: CGRect,
topLeft: CGPoint,
topRight: CGPoint,
bottomRight: CGPoint,
bottomLeft: CGPoint,
active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.perspectiveTransformWithExtent() // CIPerspectiveTransformWithExtent
filter.inputImage = self
filter.extent = extent
filter.topLeft = topLeft
filter.topRight = topRight
filter.bottomRight = bottomRight
filter.bottomLeft = bottomLeft
return filter.outputImage ?? CIImage.empty()
}
/// Photo Effect Chrome
///
/// Applies a preconfigured set of effects that imitate vintage photography film with exaggerated color.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228384-photoeffectchrome)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIPhotoEffectChrome)
///
/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In, CICategoryXMPSerializable
///
///
/// - Parameters:
/// - extrapolate: If true, then the color effect will be extrapolated if the input image contains RGB component values outside the range 0.0 to 1.0.
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func photoEffectChrome(extrapolate: Bool = false, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.photoEffectChrome() // CIPhotoEffectChrome
filter.inputImage = self
filter.extrapolate = extrapolate
return filter.outputImage ?? CIImage.empty()
}
/// Photo Effect Fade
///
/// Applies a preconfigured set of effects that imitate vintage photography film with diminished color.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228385-photoeffectfade)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIPhotoEffectFade)
///
/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In, CICategoryXMPSerializable
///
///
/// - Parameters:
/// - extrapolate: If true, then the color effect will be extrapolated if the input image contains RGB component values outside the range 0.0 to 1.0.
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func photoEffectFade(extrapolate: Bool = false, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.photoEffectFade() // CIPhotoEffectFade
filter.inputImage = self
filter.extrapolate = extrapolate
return filter.outputImage ?? CIImage.empty()
}
/// Photo Effect Instant
///
/// Applies a preconfigured set of effects that imitate vintage photography film with distorted colors.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228386-photoeffectinstant)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIPhotoEffectInstant)
///
/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In, CICategoryXMPSerializable
///
///
/// - Parameters:
/// - extrapolate: If true, then the color effect will be extrapolated if the input image contains RGB component values outside the range 0.0 to 1.0.
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func photoEffectInstant(extrapolate: Bool = false, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.photoEffectInstant() // CIPhotoEffectInstant
filter.inputImage = self
filter.extrapolate = extrapolate
return filter.outputImage ?? CIImage.empty()
}
/// Photo Effect Mono
///
/// Applies a preconfigured set of effects that imitate black-and-white photography film with low contrast.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228387-photoeffectmono)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIPhotoEffectMono)
///
/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In, CICategoryXMPSerializable
///
///
/// - Parameters:
/// - extrapolate: If true, then the color effect will be extrapolated if the input image contains RGB component values outside the range 0.0 to 1.0.
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func photoEffectMono(extrapolate: Bool = false, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.photoEffectMono() // CIPhotoEffectMono
filter.inputImage = self
filter.extrapolate = extrapolate
return filter.outputImage ?? CIImage.empty()
}
/// Photo Effect Noir
///
/// Applies a preconfigured set of effects that imitate black-and-white photography film with exaggerated contrast.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228388-photoeffectnoir)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIPhotoEffectNoir)
///
/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In, CICategoryXMPSerializable
///
///
/// - Parameters:
/// - extrapolate: If true, then the color effect will be extrapolated if the input image contains RGB component values outside the range 0.0 to 1.0.
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func photoEffectNoir(extrapolate: Bool = false, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.photoEffectNoir() // CIPhotoEffectNoir
filter.inputImage = self
filter.extrapolate = extrapolate
return filter.outputImage ?? CIImage.empty()
}
/// Photo Effect Process
///
/// Applies a preconfigured set of effects that imitate vintage photography film with emphasized cool colors.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228389-photoeffectprocess)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIPhotoEffectProcess)
///
/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In, CICategoryXMPSerializable
///
///
/// - Parameters:
/// - extrapolate: If true, then the color effect will be extrapolated if the input image contains RGB component values outside the range 0.0 to 1.0.
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func photoEffectProcess(extrapolate: Bool = false, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.photoEffectProcess() // CIPhotoEffectProcess
filter.inputImage = self
filter.extrapolate = extrapolate
return filter.outputImage ?? CIImage.empty()
}
/// Photo Effect Tonal
///
/// Applies a preconfigured set of effects that imitate black-and-white photography film without significantly altering contrast.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228390-photoeffecttonal)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIPhotoEffectTonal)
///
/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In, CICategoryXMPSerializable
///
///
/// - Parameters:
/// - extrapolate: If true, then the color effect will be extrapolated if the input image contains RGB component values outside the range 0.0 to 1.0.
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func photoEffectTonal(extrapolate: Bool = false, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.photoEffectTonal() // CIPhotoEffectTonal
filter.inputImage = self
filter.extrapolate = extrapolate
return filter.outputImage ?? CIImage.empty()
}
/// Photo Effect Transfer
///
/// Applies a preconfigured set of effects that imitate vintage photography film with emphasized warm colors.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228391-photoeffecttransfer)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIPhotoEffectTransfer)
///
/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In, CICategoryXMPSerializable
///
///
/// - Parameters:
/// - extrapolate: If true, then the color effect will be extrapolated if the input image contains RGB component values outside the range 0.0 to 1.0.
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func photoEffectTransfer(extrapolate: Bool = false, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.photoEffectTransfer() // CIPhotoEffectTransfer
filter.inputImage = self
filter.extrapolate = extrapolate
return filter.outputImage ?? CIImage.empty()
}
/// Pin Light Blend Mode
///
/// Unpremultiplies the source and background image sample color, combines them according to the relative difference, and then blends the result with the background according to the PDF basic compositing formula. Source image values that are brighter than the destination will produce an output that is lighter than the destination. Source image values that are darker than the destination will produce an output that is darker than the destination.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228392-pinlightblendmode)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIPinLightBlendMode)
///
/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, Built-In
///
///
/// - Parameters:
/// - backgroundImage: The image to use as a background image.
/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil
@available(iOS 13, macOS 10.15, *)
func pinLightBlendMode(backgroundImage: CIImage?) -> CIImage {
guard let backgroundImage else { return self }
let filter = CIFilter.pinLightBlendMode() // CIPinLightBlendMode
filter.inputImage = self
filter.backgroundImage = backgroundImage
return filter.outputImage ?? CIImage.empty()
}
/// Pinch Distortion
///
/// Creates a rectangular-shaped area that pinches source pixels inward, distorting those pixels closest to the rectangle the most.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401874-pinchdistortion)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIPinchDistortion)
///
/// Categories: Distortion Effect, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - center: The center of the effect as x and y pixel coordinates.
/// - radius: The radius determines how many pixels are used to create the distortion. The larger the radius, the wider the extent of the distortion. (0...)
/// - scale: The amount of pinching. A value of 0.0 has no effect. A value of 1.0 is the maximum pinch. (0...)
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 14, macOS 11.0, *)
func pinchDistortion(center: CGPoint, radius: Float, scale: Float = 0.5) -> CIImage {
guard radius != 0 || scale != 0 else { return self }
let filter = CIFilter.pinchDistortion() // CIPinchDistortion
filter.inputImage = self
filter.center = center
filter.radius = radius
filter.scale = scale
return filter.outputImage ?? CIImage.empty()
}
/// Pixelate
///
/// Makes an image blocky by mapping the image to colored squares whose color is defined by the replaced pixels.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228393-pixellate)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIPixellate)
///
/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - center: The center of the effect as x and y pixel coordinates.
/// - scale: The scale determines the size of the squares. Larger values result in larger squares. (1...)
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func pixellate(center: CGPoint, scale: Float, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.pixellate() // CIPixellate
filter.inputImage = self
filter.center = center
filter.scale = scale
return filter.outputImage ?? CIImage.empty()
}
/// Pointillize
///
/// Renders the source image in a pointillistic style.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228394-pointillize)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIPointillize)
///
/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - radius: The radius of the circles in the resulting pattern. (1...)
/// - center: The center of the effect as x and y pixel coordinates.
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func pointillize(radius: Float, center: CGPoint, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.pointillize() // CIPointillize
filter.inputImage = self
filter.radius = radius
filter.center = center
return filter.outputImage ?? CIImage.empty()
}
/// Ripple
///
/// Transitions from one image to another by creating a circular wave that expands from the center point, revealing the new image in the wake of the wave.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228397-rippletransition)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIRippleTransition)
///
/// Categories: Transition, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - targetImage: The target image for a transition.
/// - shadingImage: An image that looks like a shaded sphere enclosed in a square image.
/// - center: The center of the effect as x and y pixel coordinates.
/// - extent: A rectangle that defines the extent of the effect.
/// - time: The parametric time of the transition. This value drives the transition from start (at time 0) to end (at time 1). (0...1)
/// - width: The width of the ripple. (1...)
/// - scale: A value that determines whether the ripple starts as a bulge (higher value) or a dimple (lower value). (-50...)
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func rippleTransition(targetImage: CIImage,
shadingImage: CIImage,
center: CGPoint,
extent: CGRect,
time: Float,
width: Float,
scale: Float = 50) -> CIImage {
guard scale != 0 else { return self }
let filter = CIFilter.rippleTransition() // CIRippleTransition
filter.inputImage = self
filter.targetImage = targetImage
filter.shadingImage = shadingImage
filter.center = center
filter.extent = extent
filter.time = time
filter.width = width
filter.scale = scale
return filter.outputImage ?? CIImage.empty()
}
/// Row Average
///
/// Calculates the average color for each row of the specified area in an image, returning the result in a 1D image.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3547123-rowaverage)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIRowAverage)
///
/// Categories: Reduction, Video, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - extent: A rectangle that specifies the subregion of the image that you want to process.
/// - active: should this filter be applied
/// - Returns: a 1-pixel high image that contains the average color for each scan row, or identity if `active` is false
@available(iOS 14, macOS 11.0, *)
func rowAverage(extent: CGRect, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.rowAverage() // CIRowAverage
filter.inputImage = self
filter.extent = extent
return filter.outputImage ?? CIImage.empty()
}
/// sRGB Tone Curve to Linear
///
/// Maps color intensity from the sRGB color space to a linear gamma curve.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228398-srgbtonecurvetolinear)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CISRGBToneCurveToLinear)
///
/// Categories: Color Adjustment, Video, Still Image, Interlaced, Non-Square Pixels, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func sRGBToneCurveToLinear(active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.sRGBToneCurveToLinear() // CISRGBToneCurveToLinear
filter.inputImage = self
return filter.outputImage ?? CIImage.empty()
}
/// Saliency Map Filter
///
/// Generates output image as a saliency map of the input image.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228399-saliencymap)
///
/// Categories: Video, Still Image, Built-In, Stylize
///
///
/// - Parameters:
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func saliencyMap(active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.saliencyMap() // CISaliencyMapFilter
filter.inputImage = self
return filter.outputImage ?? CIImage.empty()
}
// ℹ️ CISampleNearest already has a CIImage method: func samplingNearest() -> CIImage
// https://developer.apple.com/documentation/coreimage/ciimage/2867429-samplingnearest
/// Saturation Blend Mode
///
/// Uses the luminance and hue values of the background with the saturation of the source image. Areas of the background that have no saturation (that is, pure gray areas) do not produce a change.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228400-saturationblendmode)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CISaturationBlendMode)
///
/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, Built-In
///
///
/// - Parameters:
/// - backgroundImage: The image to use as a background image.
/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil
@available(iOS 13, macOS 10.15, *)
func saturationBlendMode(backgroundImage: CIImage?) -> CIImage {
guard let backgroundImage else { return self }
let filter = CIFilter.saturationBlendMode() // CISaturationBlendMode
filter.inputImage = self
filter.backgroundImage = backgroundImage
return filter.outputImage ?? CIImage.empty()
}
/// Screen Blend Mode
///
/// Multiplies the inverse of the source image samples with the inverse of the background image samples. This results in colors that are at least as light as either of the two contributing sample colors.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228401-screenblendmode)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIScreenBlendMode)
///
/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, Built-In
///
///
/// - Parameters:
/// - backgroundImage: The image to use as a background image.
/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil
@available(iOS 13, macOS 10.15, *)
func screenBlendMode(backgroundImage: CIImage?) -> CIImage {
guard let backgroundImage else { return self }
let filter = CIFilter.screenBlendMode() // CIScreenBlendMode
filter.inputImage = self
filter.backgroundImage = backgroundImage
return filter.outputImage ?? CIImage.empty()
}
/// Sepia Tone
///
/// Maps the colors of an image to various shades of brown.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228402-sepiatone)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CISepiaTone)
///
/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In, CICategoryXMPSerializable
///
///
/// - Parameters:
/// - intensity: The intensity of the sepia effect. A value of 1.0 creates a monochrome sepia image. A value of 0.0 has no effect on the image. (0...)
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func sepiaTone(intensity: Float = 1) -> CIImage {
guard intensity != 0 else { return self }
let filter = CIFilter.sepiaTone() // CISepiaTone
filter.inputImage = self
filter.intensity = intensity
return filter.outputImage ?? CIImage.empty()
}
/// Shaded Material
///
/// Produces a shaded image from a height field. The height field is defined to have greater heights with lighter shades, and lesser heights (lower areas) with darker shades. You can combine this filter with the “Height Field From Mask” filter to produce quick shadings of masks, such as text.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228403-shadedmaterial)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIShadedMaterial)
///
/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - shadingImage: The image to use as the height field. The resulting image has greater heights with lighter shades, and lesser heights (lower areas) with darker shades.
/// - scale: The scale of the effect. The higher the value, the more dramatic the effect. (0...)
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func shadedMaterial(shadingImage: CIImage, scale: Float, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.shadedMaterial() // CIShadedMaterial
filter.inputImage = self
filter.shadingImage = shadingImage
filter.scale = scale
return filter.outputImage ?? CIImage.empty()
}
/// Sharpen Luminance
///
/// Increases image detail by sharpening. It operates on the luminance of the image; the chrominance of the pixels remains unaffected.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228404-sharpenluminance)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CISharpenLuminance)
///
/// Categories: Sharpen, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - sharpness: The amount of sharpening to apply. Larger values are sharper.
/// - radius: The distance from the center of the effect.
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func sharpenLuminance(sharpness: Float = 0.4, radius: Float = 1.69) -> CIImage {
guard sharpness != 0 || radius != 0 else { return self }
let filter = CIFilter.sharpenLuminance() // CISharpenLuminance
filter.inputImage = self
filter.sharpness = sharpness
filter.radius = radius
return filter.outputImage ?? CIImage.empty()
}
/// Sixfold Reflected Tile
///
/// Produces a tiled image from a source image by applying a 6-way reflected symmetry.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228405-sixfoldreflectedtile)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CISixfoldReflectedTile)
///
/// Categories: Tile Effect, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - center: The center of the effect as x and y pixel coordinates.
/// - angle: The angle in radians of the tiled pattern.
/// - width: The width of a tile. (0...)
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func sixfoldReflectedTile(center: CGPoint, angle: Float = 0, width: Float) -> CIImage {
guard angle != 0 else { return self }
let filter = CIFilter.sixfoldReflectedTile() // CISixfoldReflectedTile
filter.inputImage = self
filter.center = center
filter.angle = angle
filter.width = width
return filter.outputImage ?? CIImage.empty()
}
/// Sixfold Rotated Tile
///
/// Produces a tiled image from a source image by rotating the source image at increments of 60 degrees.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228406-sixfoldrotatedtile)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CISixfoldRotatedTile)
///
/// Categories: Tile Effect, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - center: The center of the effect as x and y pixel coordinates.
/// - angle: The angle in radians of the tiled pattern.
/// - width: The width of a tile. (0...)
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func sixfoldRotatedTile(center: CGPoint, angle: Float = 0, width: Float) -> CIImage {
guard angle != 0 else { return self }
let filter = CIFilter.sixfoldRotatedTile() // CISixfoldRotatedTile
filter.inputImage = self
filter.center = center
filter.angle = angle
filter.width = width
return filter.outputImage ?? CIImage.empty()
}
/// Sobel Gradients
///
/// Applies multichannel 3 by 3 Sobel gradient filter to an image. The resulting image has maximum horizontal gradient in the red channel and the maximum vertical gradient in the green channel. The gradient values can be positive or negative.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401876-sobelgradients)
///
/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 17, macOS 14.0, *)
func sobelGradients(active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.sobelGradients() // CISobelGradients
filter.inputImage = self
return filter.outputImage ?? CIImage.empty()
}
/// Soft Light Blend Mode
///
/// Either darkens or lightens colors, depending on the source image sample color. If the source image sample color is lighter than 50% gray, the background is lightened, similar to dodging. If the source image sample color is darker than 50% gray, the background is darkened, similar to burning. If the source image sample color is equal to 50% gray, the background is not changed. Image samples that are equal to pure black or pure white produce darker or lighter areas, but do not result in pure black or white. The overall effect is similar to what you would achieve by shining a diffuse spotlight on the source image.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228408-softlightblendmode)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CISoftLightBlendMode)
///
/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, Built-In
///
///
/// - Parameters:
/// - backgroundImage: The image to use as a background image.
/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil
@available(iOS 13, macOS 10.15, *)
func softLightBlendMode(backgroundImage: CIImage?) -> CIImage {
guard let backgroundImage else { return self }
let filter = CIFilter.softLightBlendMode() // CISoftLightBlendMode
filter.inputImage = self
filter.backgroundImage = backgroundImage
return filter.outputImage ?? CIImage.empty()
}
/// Source Atop
///
/// Places the source image over the background image, then uses the luminance of the background image to determine what to show. The composite shows the background image and only those portions of the source image that are over visible parts of the background.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228409-sourceatopcompositing)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CISourceAtopCompositing)
///
/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - backgroundImage: The image to use as a background image.
/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil
@available(iOS 13, macOS 10.15, *)
func sourceAtopCompositing(backgroundImage: CIImage?) -> CIImage {
guard let backgroundImage else { return self }
let filter = CIFilter.sourceAtopCompositing() // CISourceAtopCompositing
filter.inputImage = self
filter.backgroundImage = backgroundImage
return filter.outputImage ?? CIImage.empty()
}
/// Source In
///
/// Uses the background image to define what to leave in the input image, effectively cropping the input image.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228410-sourceincompositing)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CISourceInCompositing)
///
/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - backgroundImage: The image to use as a background image.
/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil
@available(iOS 13, macOS 10.15, *)
func sourceInCompositing(backgroundImage: CIImage?) -> CIImage {
guard let backgroundImage else { return self }
let filter = CIFilter.sourceInCompositing() // CISourceInCompositing
filter.inputImage = self
filter.backgroundImage = backgroundImage
return filter.outputImage ?? CIImage.empty()
}
/// Source Out
///
/// Uses the background image to define what to take out of the input image.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228411-sourceoutcompositing)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CISourceOutCompositing)
///
/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - backgroundImage: The image to use as a background image.
/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil
@available(iOS 13, macOS 10.15, *)
func sourceOutCompositing(backgroundImage: CIImage?) -> CIImage {
guard let backgroundImage else { return self }
let filter = CIFilter.sourceOutCompositing() // CISourceOutCompositing
filter.inputImage = self
filter.backgroundImage = backgroundImage
return filter.outputImage ?? CIImage.empty()
}
/// Source Over
///
/// Places the input image over the input background image.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228412-sourceovercompositing)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CISourceOverCompositing)
///
/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - backgroundImage: The image to use as a background image.
/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil
@available(iOS 13, macOS 10.15, *)
func sourceOverCompositing(backgroundImage: CIImage?) -> CIImage {
guard let backgroundImage else { return self }
let filter = CIFilter.sourceOverCompositing() // CISourceOverCompositing
filter.inputImage = self
filter.backgroundImage = backgroundImage
return filter.outputImage ?? CIImage.empty()
}
/// Spot Color
///
/// Replaces one or more color ranges with spot colors.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228413-spotcolor)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CISpotColor)
///
/// Categories: Built-In, Still Image, Video, High Dynamic Range, Stylize
///
///
/// - Parameters:
/// - centerColor1: The center value of the first color range to replace.
/// - replacementColor1: A replacement color for the first color range.
/// - closeness1: A value that indicates how close the first color must match before it is replaced. (0...)
/// - contrast1: The contrast of the first replacement color. (0...)
/// - centerColor2: The center value of the second color range to replace.
/// - replacementColor2: A replacement color for the second color range.
/// - closeness2: A value that indicates how close the second color must match before it is replaced. (0...)
/// - contrast2: The contrast of the second replacement color. (0...)
/// - centerColor3: The center value of the third color range to replace.
/// - replacementColor3: A replacement color for the third color range.
/// - closeness3: A value that indicates how close the third color must match before it is replaced. (0...)
/// - contrast3: The contrast of the third replacement color. (0...)
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func spotColor(centerColor1: CIColor,
replacementColor1: CIColor,
closeness1: Float = 0.22,
contrast1: Float = 0.98,
centerColor2: CIColor,
replacementColor2: CIColor,
closeness2: Float = 0.15,
contrast2: Float = 0.98,
centerColor3: CIColor,
replacementColor3: CIColor,
closeness3: Float = 0.5,
contrast3: Float = 0.99,
active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.spotColor() // CISpotColor
filter.inputImage = self
filter.centerColor1 = centerColor1
filter.replacementColor1 = replacementColor1
filter.closeness1 = closeness1
filter.contrast1 = contrast1
filter.centerColor2 = centerColor2
filter.replacementColor2 = replacementColor2
filter.closeness2 = closeness2
filter.contrast2 = contrast2
filter.centerColor3 = centerColor3
filter.replacementColor3 = replacementColor3
filter.closeness3 = closeness3
filter.contrast3 = contrast3
return filter.outputImage ?? CIImage.empty()
}
/// Spot Light
///
/// Applies a directional spotlight effect to an image.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228414-spotlight)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CISpotLight)
///
/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - lightPosition: The x and y position of the spotlight.
/// - lightPointsAt: The x and y position that the spotlight points at.
/// - brightness: The brightness of the spotlight. (0...)
/// - concentration: The spotlight size. The smaller the value, the more tightly focused the light beam. (0.001...)
/// - color: The color of the spotlight.
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func spotLight(lightPosition: CIVector,
lightPointsAt: CIVector,
brightness: Float,
concentration: Float = 0.1,
color: CIColor = CIColor.white) -> CIImage {
guard concentration != 20 else { return self }
let filter = CIFilter.spotLight() // CISpotLight
filter.inputImage = self
filter.lightPosition = lightPosition
filter.lightPointsAt = lightPointsAt
filter.brightness = brightness
filter.concentration = concentration
filter.color = color
return filter.outputImage ?? CIImage.empty()
}
/// Straighten
///
/// Rotates a source image by the specified angle in radians. The image is then scaled and cropped so that the rotated image fits the extent of the input image.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228416-straighten)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIStraightenFilter)
///
/// Categories: Geometry Adjustment, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - angle: The angle in radians of the effect.
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func straighten(angle: Float = 0) -> CIImage {
guard angle != 0 else { return self }
let filter = CIFilter.straighten() // CIStraightenFilter
filter.inputImage = self
filter.angle = angle
return filter.outputImage ?? CIImage.empty()
}
/// Stretch Crop
///
/// Distorts an image by stretching and or cropping it to fit a target size.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401877-stretchcrop)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIStretchCrop)
///
/// Categories: Distortion Effect, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - size: The size in pixels of the output image.
/// - cropAmount: Determines if and how much cropping should be used to achieve the target size. If value is 0 then only stretching is used. If 1 then only cropping is used. (0...1)
/// - centerStretchAmount: Determine how much the center of the image is stretched if stretching is used. If value is 0 then the center of the image maintains the original aspect ratio. If 1 then the image is stretched uniformly. (0...1)
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 14, macOS 11.0, *)
func stretchCrop(size: CGPoint,
cropAmount: Float = 0.25,
centerStretchAmount: Float = 0.25,
active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.stretchCrop() // CIStretchCrop
filter.inputImage = self
filter.size = size
filter.cropAmount = cropAmount
filter.centerStretchAmount = centerStretchAmount
return filter.outputImage ?? CIImage.empty()
}
/// Subtract Blend Mode
///
/// Unpremultiplies the source and background image sample colors, subtracts the source from the background, and then blends the result with the background according to the PDF basic compositing formula. Source image values that are black produces output that is the same as the background. Source image values that are non-black darken the background color values.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228418-subtractblendmode)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CISubtractBlendMode)
///
/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, Built-In
///
///
/// - Parameters:
/// - backgroundImage: The image to use as a background image.
/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil
@available(iOS 13, macOS 10.15, *)
func subtractBlendMode(backgroundImage: CIImage?) -> CIImage {
guard let backgroundImage else { return self }
let filter = CIFilter.subtractBlendMode() // CISubtractBlendMode
filter.inputImage = self
filter.backgroundImage = backgroundImage
return filter.outputImage ?? CIImage.empty()
}
/// Swipe
///
/// Transitions from one image to another by simulating a swiping action.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228420-swipetransition)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CISwipeTransition)
///
/// Categories: Transition, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - targetImage: The target image for a transition.
/// - extent: The extent of the effect.
/// - color: The color of the swipe.
/// - time: The parametric time of the transition. This value drives the transition from start (at time 0) to end (at time 1). (0...1)
/// - angle: The angle in radians of the swipe.
/// - width: The width of the swipe. (0.1...)
/// - opacity: The opacity of the swipe. (0...)
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func swipeTransition(targetImage: CIImage,
extent: CGRect,
color: CIColor = CIColor.white,
time: Float,
angle: Float = 0,
width: Float,
opacity: Float = 0) -> CIImage {
guard angle != 0 || opacity != 0 else { return self }
let filter = CIFilter.swipeTransition() // CISwipeTransition
filter.inputImage = self
filter.targetImage = targetImage
filter.extent = extent
filter.color = color
filter.time = time
filter.angle = angle
filter.width = width
filter.opacity = opacity
return filter.outputImage ?? CIImage.empty()
}
/// Temperature and Tint
///
/// Adapts the reference white point for an image.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228421-temperatureandtint)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CITemperatureAndTint)
///
/// Categories: Color Adjustment, Video, Still Image, Interlaced, Non-Square Pixels, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - neutral: A vector containing the source white point defined by color temperature and tint or chromaticity (x,y).
/// - targetNeutral: A vector containing the desired white point defined by color temperature and tint or chromaticity (x,y).
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func temperatureAndTint(neutral: CIVector, targetNeutral: CIVector, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.temperatureAndTint() // CITemperatureAndTint
filter.inputImage = self
filter.neutral = neutral
filter.targetNeutral = targetNeutral
return filter.outputImage ?? CIImage.empty()
}
/// Thermal
///
/// Apply a “Thermal” style effect to an image.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228423-thermal)
///
/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, Built-In
///
///
/// - Parameters:
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func thermal(active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.thermal() // CIThermal
filter.inputImage = self
return filter.outputImage ?? CIImage.empty()
}
/// Tone Curve
///
/// Adjusts tone response of the R, G, and B channels of an image. The input points are five x,y values that are interpolated using a spline curve. The curve is applied in a perceptual (gamma 2) version of the working space.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228424-tonecurve)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIToneCurve)
///
/// Categories: Color Adjustment, Video, Still Image, Interlaced, Non-Square Pixels, Built-In
///
///
/// - Parameters:
/// - point0: A vector containing the position of the first point of the tone curve
/// - point1: A vector containing the position of the second point of the tone curve
/// - point2: A vector containing the position of the third point of the tone curve
/// - point3: A vector containing the position of the fourth point of the tone curve
/// - point4: A vector containing the position of the fifth point of the tone curve
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func toneCurve(point0: CGPoint,
point1: CGPoint = .init(x: 0.25, y: 0.25),
point2: CGPoint = .init(x: 0.5, y: 0.5),
point3: CGPoint = .init(x: 0.75, y: 0.75),
point4: CGPoint = .init(x: 1, y: 1)) -> CIImage {
guard point1 != .init(x: 0.25, y: 0.25) || point2 != .init(x: 0.5, y: 0.5) || point3 != .init(x: 0.75, y: 0.75) || point4 != .init(x: 1, y: 1) else { return self }
let filter = CIFilter.toneCurve() // CIToneCurve
filter.inputImage = self
filter.point0 = point0
filter.point1 = point1
filter.point2 = point2
filter.point3 = point3
filter.point4 = point4
return filter.outputImage ?? CIImage.empty()
}
/// Tone Map Headroom
///
/// Apply a global tone curve to an image that reduces colors from a source headroom value to a target headroom value.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401878-tonemapheadroom)
///
/// Categories: Color Adjustment, Video, Interlaced, High Dynamic Range, Non-Square Pixels, Still Image, Built-In
///
///
/// - Parameters:
/// - sourceHeadroom: Specifies the headroom of the input image. (1...32)
/// - targetHeadroom: Specifies the target headroom of the output image. (1...32)
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 18, macOS 15.0, *)
func toneMapHeadroom(sourceHeadroom: Float, targetHeadroom: Float = 1, active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.toneMapHeadroom() // CIToneMapHeadroom
filter.inputImage = self
filter.sourceHeadroom = sourceHeadroom
filter.targetHeadroom = targetHeadroom
return filter.outputImage ?? CIImage.empty()
}
/// Torus Lens Distortion
///
/// Creates a torus-shaped lens and distorts the portion of the image over which the lens is placed.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401879-toruslensdistortion)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CITorusLensDistortion)
///
/// Categories: Distortion Effect, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - center: The center of the effect as x and y pixel coordinates.
/// - radius: The outer radius of the torus. (0...)
/// - width: The width of the ring. (0...)
/// - refraction: The refraction of the glass. (0...)
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 14, macOS 11.0, *)
func torusLensDistortion(center: CGPoint, radius: Float, width: Float, refraction: Float = 1.7) -> CIImage {
guard radius != 0 || width != 0 || refraction != 1 else { return self }
let filter = CIFilter.torusLensDistortion() // CITorusLensDistortion
filter.inputImage = self
filter.center = center
filter.radius = radius
filter.width = width
filter.refraction = refraction
return filter.outputImage ?? CIImage.empty()
}
/// Triangle Kaleidoscope
///
/// Maps a triangular portion of image to a triangular area and then generates a kaleidoscope effect.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228425-trianglekaleidoscope)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CITriangleKaleidoscope)
///
/// Categories: Tile Effect, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - point: The x and y position to use as the center of the triangular area in the input image.
/// - size: The size in pixels of the triangle.
/// - rotation: Rotation angle in radians of the triangle.
/// - decay: The decay determines how fast the color fades from the center triangle.
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func triangleKaleidoscope(point: CGPoint,
size: Float = 700,
rotation: Float,
decay: Float = 0.85,
active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.triangleKaleidoscope() // CITriangleKaleidoscope
filter.inputImage = self
filter.point = point
filter.size = size
filter.rotation = rotation
filter.decay = decay
return filter.outputImage ?? CIImage.empty()
}
/// Triangle Tile
///
/// Maps a triangular portion of image to a triangular area and then tiles the result.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228426-triangletile)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CITriangleTile)
///
/// Categories: Tile Effect, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - center: The center of the effect as x and y pixel coordinates.
/// - angle: The angle in radians of the tiled pattern.
/// - width: The width of a tile. (0...)
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func triangleTile(center: CGPoint, angle: Float = 0, width: Float) -> CIImage {
guard angle != 0 else { return self }
let filter = CIFilter.triangleTile() // CITriangleTile
filter.inputImage = self
filter.center = center
filter.angle = angle
filter.width = width
return filter.outputImage ?? CIImage.empty()
}
/// Twelvefold Reflected Tile
///
/// Produces a tiled image from a source image by rotating the source image at increments of 30 degrees.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228427-twelvefoldreflectedtile)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CITwelvefoldReflectedTile)
///
/// Categories: Tile Effect, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - center: The center of the effect as x and y pixel coordinates.
/// - angle: The angle in radians of the tiled pattern.
/// - width: The width of a tile. (0...)
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func twelvefoldReflectedTile(center: CGPoint, angle: Float = 0, width: Float) -> CIImage {
guard angle != 0 else { return self }
let filter = CIFilter.twelvefoldReflectedTile() // CITwelvefoldReflectedTile
filter.inputImage = self
filter.center = center
filter.angle = angle
filter.width = width
return filter.outputImage ?? CIImage.empty()
}
/// Twirl Distortion
///
/// Rotates pixels around a point to give a twirling effect. You can specify the number of rotations as well as the center and radius of the effect.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401880-twirldistortion)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CITwirlDistortion)
///
/// Categories: Distortion Effect, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - center: The center of the effect as x and y pixel coordinates.
/// - radius: The radius determines how many pixels are used to create the distortion. The larger the radius, the wider the extent of the distortion. (0...)
/// - angle: The angle in radians of the twirl. Values can be positive or negative.
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 14, macOS 11.0, *)
func twirlDistortion(center: CGPoint, radius: Float, angle: Float = .pi) -> CIImage {
guard angle != 0 else { return self }
let filter = CIFilter.twirlDistortion() // CITwirlDistortion
filter.inputImage = self
filter.center = center
filter.radius = radius
filter.angle = angle
return filter.outputImage ?? CIImage.empty()
}
/// Unsharp Mask
///
/// Increases the contrast of the edges between pixels of different colors in an image.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228428-unsharpmask)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIUnsharpMask)
///
/// Categories: Sharpen, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - radius: The radius around a given pixel to apply the unsharp mask. The larger the radius, the more of the image is affected. (0...)
/// - intensity: The intensity of the effect. The larger the value, the more contrast in the affected area. (0...)
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func unsharpMask(radius: Float, intensity: Float = 0.5) -> CIImage {
guard radius != 0 || intensity != 0 else { return self }
let filter = CIFilter.unsharpMask() // CIUnsharpMask
filter.inputImage = self
filter.radius = radius
filter.intensity = intensity
return filter.outputImage ?? CIImage.empty()
}
/// Vibrance
///
/// Adjusts the saturation of an image while keeping pleasing skin tones.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228429-vibrance)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIVibrance)
///
/// Categories: Color Adjustment, Video, Still Image, Interlaced, Non-Square Pixels, Built-In
///
///
/// - Parameters:
/// - amount: The amount to adjust the saturation. (-1...1)
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func vibrance(amount: Float = 0) -> CIImage {
guard amount != 0 else { return self }
let filter = CIFilter.vibrance() // CIVibrance
filter.inputImage = self
filter.amount = amount
return filter.outputImage ?? CIImage.empty()
}
/// Vignette
///
/// Applies a vignette shading to the corners of an image.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228431-vignette)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIVignette)
///
/// Categories: Color Effect, Video, Interlaced, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - intensity: The intensity of the effect. (-1...1)
/// - radius: The distance from the center of the effect. (0...2)
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func vignette(intensity: Float = 0, radius: Float = 1) -> CIImage {
guard intensity != 0 else { return self }
let filter = CIFilter.vignette() // CIVignette
filter.inputImage = self
filter.intensity = intensity
filter.radius = radius
return filter.outputImage ?? CIImage.empty()
}
/// Vignette Effect
///
/// Modifies the brightness of an image around the periphery of a specified region.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228430-vignetteeffect)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIVignetteEffect)
///
/// Categories: Color Effect, Video, Interlaced, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - center: The center of the effect as x and y pixel coordinates.
/// - radius: The distance from the center of the effect. (0...)
/// - intensity: The intensity of the effect. (-1...1)
/// - falloff: The falloff of the effect. (0...1)
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func vignetteEffect(center: CGPoint, radius: Float, intensity: Float = 1, falloff: Float = 0.5) -> CIImage {
guard intensity != 0 else { return self }
let filter = CIFilter.vignetteEffect() // CIVignetteEffect
filter.inputImage = self
filter.center = center
filter.radius = radius
filter.intensity = intensity
filter.falloff = falloff
return filter.outputImage ?? CIImage.empty()
}
/// Vivid Light Blend Mode
///
/// A blend mode that is a combination of color burn and color dodge blend modes.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401881-vividlightblendmode)
///
/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, Built-In
///
///
/// - Parameters:
/// - backgroundImage: The image to use as a background image.
/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil
@available(iOS 15, macOS 12.0, *)
func vividLightBlendMode(backgroundImage: CIImage?) -> CIImage {
guard let backgroundImage else { return self }
let filter = CIFilter.vividLightBlendMode() // CIVividLightBlendMode
filter.inputImage = self
filter.backgroundImage = backgroundImage
return filter.outputImage ?? CIImage.empty()
}
/// Vortex Distortion
///
/// Rotates pixels around a point to simulate a vortex. You can specify the number of rotations as well the center and radius of the effect.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401882-vortexdistortion)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIVortexDistortion)
///
/// Categories: Distortion Effect, Video, Still Image, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - center: The center of the effect as x and y pixel coordinates.
/// - radius: The radius determines how many pixels are used to create the distortion. The larger the radius, the wider the extent of the distortion. (0...)
/// - angle: The angle in radians of the effect.
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 14, macOS 11.0, *)
func vortexDistortion(center: CGPoint, radius: Float, angle: Float) -> CIImage {
guard radius != 0 || angle != 0 else { return self }
let filter = CIFilter.vortexDistortion() // CIVortexDistortion
filter.inputImage = self
filter.center = center
filter.radius = radius
filter.angle = angle
return filter.outputImage ?? CIImage.empty()
}
/// White Point Adjust
///
/// Adjusts the reference white point for an image and maps all colors in the source using the new reference.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228432-whitepointadjust)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIWhitePointAdjust)
///
/// Categories: Color Adjustment, Video, Still Image, Interlaced, Non-Square Pixels, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - color: A color to use as the white point.
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func whitePointAdjust(color: CIColor = CIColor.white) -> CIImage {
guard color != CIColor.white else { return self }
let filter = CIFilter.whitePointAdjust() // CIWhitePointAdjust
filter.inputImage = self
filter.color = color
return filter.outputImage ?? CIImage.empty()
}
/// X-Ray
///
/// Apply an “XRay” style effect to an image.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228433-xray)
///
/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, Built-In
///
///
/// - Parameters:
/// - active: should this filter be applied
/// - Returns: processed new `CIImage`, or identity if `active` is false
@available(iOS 13, macOS 10.15, *)
func xRay(active: Bool = true) -> CIImage {
guard active else { return self }
let filter = CIFilter.xRay() // CIXRay
filter.inputImage = self
return filter.outputImage ?? CIImage.empty()
}
/// Zoom Blur
///
/// Simulates the effect of zooming the camera while capturing the image.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228434-zoomblur)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIZoomBlur)
///
/// Categories: Blur, Still Image, Video, Built-In, High Dynamic Range
///
///
/// - Parameters:
/// - center: The center of the effect as x and y pixel coordinates.
/// - amount: The zoom-in amount. Larger values result in more zooming in.
/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied
@available(iOS 13, macOS 10.15, *)
func zoomBlur(center: CGPoint, amount: Float) -> CIImage {
guard amount != 0 else { return self }
let filter = CIFilter.zoomBlur() // CIZoomBlur
filter.inputImage = self
filter.center = center
filter.amount = amount
return filter.outputImage ?? CIImage.empty()
}
//
// MARK: GENERATORS
//
/// Attributed Text Image Generator
///
/// Generate an image attributed string.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228267-attributedtextimagegenerator)
///
/// Categories: Generator, Video, Still Image, Built-In
///
///
/// - Parameters:
/// - text: The attributed text to render.
/// - scaleFactor: The scale of the font to use for the generated text. (0...)
/// - padding: A value for an additional number of pixels to pad around the text’s bounding box. (0...200)
/// - Returns: an image attributed string
@available(iOS 16, macOS 13.0, *)
static func attributedTextImageGenerator(text: NSAttributedString, scaleFactor: Float = 1, padding: Int) -> CIImage {
let filter = CIFilter.attributedTextImageGenerator() // CIAttributedTextImageGenerator
filter.text = text
filter.scaleFactor = scaleFactor
filter.padding = Float(padding)
return filter.outputImage ?? CIImage.empty()
}
/// Aztec Code Generator
///
/// Generates an Aztec code (two-dimensional barcode) from input data.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228268-azteccodegenerator)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIAztecCodeGenerator)
///
/// Categories: Generator, Still Image, Built-In
///
///
/// - Parameters:
/// - message: The message to encode in the Aztec Barcode
/// - correctionLevel: Aztec error correction value between 5 and 95 (5...95)
/// - layers: Aztec layers value between 1 and 32. (1...32)
/// - compactStyle: A Boolean that specifies whether to force a compact style Aztec code.
/// - Returns: an Aztec code (two-dimensional barcode) from input data
@available(iOS 13, macOS 10.15, *)
static func aztecCodeGenerator(message: Data, correctionLevel: Int, layers: Int, compactStyle: Bool) -> CIImage {
let filter = CIFilter.aztecCodeGenerator() // CIAztecCodeGenerator
filter.message = message
filter.correctionLevel = Float(correctionLevel)
filter.layers = Float(layers)
filter.compactStyle = Float(compactStyle ? 1 : 0)
return filter.outputImage ?? CIImage.empty()
}
/// Barcode Generator
///
/// Generate a barcode image from a CIBarcodeDescriptor.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228269-barcodegenerator)
///
/// Categories: Generator, Video, Still Image, Built-In
///
///
/// - Parameters:
/// - barcodeDescriptor: The CIBarcodeDescription object to generate an image for.
/// - Returns: a barcode image from a CIBarcodeDescriptor
@available(iOS 13, macOS 10.15, *)
static func barcodeGenerator(barcodeDescriptor: CIBarcodeDescriptor) -> CIImage {
let filter = CIFilter.barcodeGenerator() // CIBarcodeGenerator
filter.barcodeDescriptor = barcodeDescriptor
return filter.outputImage ?? CIImage.empty()
}
/// Blurred Rectangle Generator
///
/// Generates a blurred rectangle image with the specified extent, blur sigma, and color.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401849-blurredrectanglegenerator)
///
/// Categories: Generator, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - extent: A rectangle that defines the extent of the effect.
/// - sigma: The sigma for a gaussian blur. (0...)
/// - color: A color.
/// - Returns: a blurred rectangle image with the specified extent, blur sigma, and color
@available(iOS 17, macOS 14.0, *)
static func blurredRectangleGenerator(extent: CGRect, sigma: Float, color: CIColor = CIColor.white) -> CIImage {
let filter = CIFilter.blurredRectangleGenerator() // CIBlurredRectangleGenerator
filter.extent = extent
filter.sigma = sigma
filter.color = color
return filter.outputImage ?? CIImage.empty()
}
/// Checkerboard
///
/// Generates a pattern of squares of alternating colors. You can specify the size, colors, and the sharpness of the pattern.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228279-checkerboardgenerator)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CICheckerboardGenerator)
///
/// Categories: Generator, Video, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - center: The center of the effect as x and y pixel coordinates.
/// - color0: A color to use for the first set of squares.
/// - color1: A color to use for the second set of squares.
/// - width: The width of the squares in the pattern.
/// - sharpness: The sharpness of the edges in pattern. The smaller the value, the more blurry the pattern. Values range from 0.0 to 1.0. (0...1)
/// - Returns: a checkerboard pattern
@available(iOS 13, macOS 10.15, *)
static func checkerboardGenerator(center: CGPoint,
color0: CIColor = CIColor.white,
color1: CIColor = CIColor.black,
width: Float,
sharpness: Float = 1) -> CIImage {
let filter = CIFilter.checkerboardGenerator() // CICheckerboardGenerator
filter.center = center
filter.color0 = color0
filter.color1 = color1
filter.width = width
filter.sharpness = sharpness
return filter.outputImage ?? CIImage.empty()
}
/// Code 128 Barcode Generator
///
/// Generates a Code 128 one-dimensional barcode from input data.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228281-code128barcodegenerator)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CICode128BarcodeGenerator)
///
/// Categories: Generator, Still Image, Built-In
///
///
/// - Parameters:
/// - message: The message to encode in the Code 128 Barcode
/// - quietSpace: The number of empty white pixels that should surround the barcode. (0...100)
/// - barcodeHeight: The height of the generated barcode in pixels. (1...500)
/// - Returns: a Code 128 one-dimensional barcode from input data
@available(iOS 13, macOS 10.15, *)
static func code128BarcodeGenerator(message: Data, quietSpace: Int, barcodeHeight: Int) -> CIImage {
let filter = CIFilter.code128BarcodeGenerator() // CICode128BarcodeGenerator
filter.message = message
filter.quietSpace = Float(quietSpace)
filter.barcodeHeight = Float(barcodeHeight)
return filter.outputImage ?? CIImage.empty()
}
// ℹ️ CIConstantColorGenerator already has a CIImage initializer: init(color: CIColor)
/// Gaussian Gradient
///
/// Generates a gradient that varies from one color to another using a Gaussian distribution.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228332-gaussiangradient)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIGaussianGradient)
///
/// Categories: Gradient, Video, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - center: The center of the effect as x and y pixel coordinates.
/// - color0: The first color to use in the gradient.
/// - color1: The second color to use in the gradient.
/// - radius: The radius of the Gaussian distribution. (0...)
/// - Returns: a gradient that varies from one color to another using a Gaussian distribution
@available(iOS 13, macOS 10.15, *)
static func gaussianGradient(center: CGPoint,
color0: CIColor = CIColor.white,
color1: CIColor = CIColor.clear,
radius: Float) -> CIImage {
let filter = CIFilter.gaussianGradient() // CIGaussianGradient
filter.center = center
filter.color0 = color0
filter.color1 = color1
filter.radius = radius
return filter.outputImage ?? CIImage.empty()
}
/// Hue/Saturation/Value Gradient
///
/// Generates a color wheel that shows hues and saturations for a specified value.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228342-huesaturationvaluegradient)
///
/// Categories: Gradient, Video, Still Image, Built-In
///
///
/// - Parameters:
/// - value: The color value used to generate the color wheel. (0...)
/// - radius: The distance from the center of the effect. (0...)
/// - softness: The softness of the generated color wheel (0...)
/// - dither: A boolean value specifying whether the distort the generated output (0...)
/// - colorSpace: The CGColorSpaceRef that the color wheel should be generated in.
/// - Returns: a color wheel that shows hues and saturations for a specified value
@available(iOS 13, macOS 10.15, *)
static func hueSaturationValueGradient(value: Float = 1,
radius: Float,
softness: Float = 1,
dither: Float = 1,
colorSpace: CGColorSpace = CGColorSpace(name: CGColorSpace.sRGB)!) -> CIImage {
let filter = CIFilter.hueSaturationValueGradient() // CIHueSaturationValueGradient
filter.value = value
filter.radius = radius
filter.softness = softness
filter.dither = dither
filter.colorSpace = colorSpace
return filter.outputImage ?? CIImage.empty()
}
/// Lenticular Halo
///
/// Simulates a halo that is generated by the diffraction associated with the spread of a lens. This filter is typically applied to another image to simulate lens flares and similar effects.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228345-lenticularhalogenerator)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CILenticularHaloGenerator)
///
/// Categories: Generator, Video, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - center: The center of the effect as x and y pixel coordinates.
/// - color: A color.
/// - haloRadius: The radius of the halo. (0...)
/// - haloWidth: The width of the halo, from its inner radius to its outer radius. (0...)
/// - haloOverlap: The overlap of red, green, and blue halos. A value of 1 results in a full overlap. (0...)
/// - striationStrength: The intensity of the halo colors. Larger values are more intense. (0...)
/// - striationContrast: The contrast of the halo colors. Larger values are higher contrast. (0...)
/// - time: The duration of the effect. (0...1)
/// - Returns: new `CIImage`
@available(iOS 13, macOS 10.15, *)
static func lenticularHaloGenerator(center: CGPoint,
color: CIColor,
haloRadius: Float,
haloWidth: Float,
haloOverlap: Float = 0.77,
striationStrength: Float = 0.5,
striationContrast: Float = 1,
time: Float = 0) -> CIImage {
let filter = CIFilter.lenticularHaloGenerator() // CILenticularHaloGenerator
filter.center = center
filter.color = color
filter.haloRadius = haloRadius
filter.haloWidth = haloWidth
filter.haloOverlap = haloOverlap
filter.striationStrength = striationStrength
filter.striationContrast = striationContrast
filter.time = time
return filter.outputImage ?? CIImage.empty()
}
/// Linear Gradient
///
/// Generates a gradient that varies along a linear axis between two defined endpoints.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228351-lineargradient)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CILinearGradient)
///
/// Categories: Gradient, Video, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - point0: The starting position of the gradient -- where the first color begins.
/// - point1: The ending position of the gradient -- where the second color begins.
/// - color0: The first color to use in the gradient.
/// - color1: The second color to use in the gradient.
/// - Returns: a gradient that varies along a linear axis between two defined endpoints
@available(iOS 13, macOS 10.15, *)
static func linearGradient(point0: CGPoint = .zero,
point1: CGPoint,
color0: CIColor = CIColor.white,
color1: CIColor = CIColor.black) -> CIImage {
let filter = CIFilter.linearGradient() // CILinearGradient
filter.point0 = point0
filter.point1 = point1
filter.color0 = color0
filter.color1 = color1
return filter.outputImage ?? CIImage.empty()
}
/// Mesh Generator
///
/// Generates a mesh from an array of line segments.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228359-meshgenerator)
///
/// Categories: Generator, Video, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - width: The width in pixels of the effect. (0...)
/// - color: A color.
/// - mesh: An array of line segments stored as an array of CIVectors each containing a start point and end point.
/// - Returns: a mesh from an array of line segments
@available(iOS 13, macOS 10.15, *)
static func meshGenerator(width: Float, color: CIColor = CIColor.white, mesh: [Any]) -> CIImage {
let filter = CIFilter.meshGenerator() // CIMeshGenerator
filter.width = width
filter.color = color
filter.mesh = mesh
return filter.outputImage ?? CIImage.empty()
}
/// PDF417 Barcode Generator
///
/// Generates a PDF417 code (two-dimensional barcode) from input data.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228261-pdf417barcodegenerator)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIPDF417BarcodeGenerator)
///
/// Categories: Generator, Video, Still Image, Built-In
///
///
/// - Parameters:
/// - message: The message to encode in the PDF417 Barcode
/// - minWidth: The minimum width of the generated barcode in pixels. (56...583)
/// - maxWidth: The maximum width of the generated barcode in pixels. (56...583)
/// - minHeight: The minimum height of the generated barcode in pixels. (13...283)
/// - maxHeight: The maximum height of the generated barcode in pixels. (13...283)
/// - dataColumns: The number of data columns in the generated barcode (1...30)
/// - rows: The number of rows in the generated barcode (3...90)
/// - preferredAspectRatio: The preferred aspect ratio of the generated barcode (0...)
/// - compactionMode: The compaction mode of the generated barcode. (0...3)
/// - compactStyle: A Boolean that specifies whether to force a compact style Aztec code.
/// - correctionLevel: The correction level ratio of the generated barcode (0...8)
/// - alwaysSpecifyCompaction: A Boolean value specifying whether to force compaction style.
/// - Returns: a PDF417 code (two-dimensional barcode) from input data
@available(iOS 13, macOS 10.15, *)
static func pdf417BarcodeGenerator(message: Data,
minWidth: Int,
maxWidth: Int,
minHeight: Int,
maxHeight: Int,
dataColumns: Int,
rows: Int,
preferredAspectRatio: Float,
compactionMode: Int,
compactStyle: Bool,
correctionLevel: Int,
alwaysSpecifyCompaction: Bool) -> CIImage {
let filter = CIFilter.pdf417BarcodeGenerator() // CIPDF417BarcodeGenerator
filter.message = message
filter.minWidth = Float(minWidth)
filter.maxWidth = Float(maxWidth)
filter.minHeight = Float(minHeight)
filter.maxHeight = Float(maxHeight)
filter.dataColumns = Float(dataColumns)
filter.rows = Float(rows)
filter.preferredAspectRatio = preferredAspectRatio
filter.compactionMode = Float(compactionMode)
filter.compactStyle = Float(compactStyle ? 1 : 0)
filter.correctionLevel = Float(correctionLevel)
filter.alwaysSpecifyCompaction = Float(alwaysSpecifyCompaction ? 1 : 0)
return filter.outputImage ?? CIImage.empty()
}
/// QR Code Generator
///
/// Generates a Quick Response code (two-dimensional barcode) from input data.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228262-qrcodegenerator)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIQRCodeGenerator)
///
/// Categories: Generator, Still Image, Built-In
///
///
/// - Parameters:
/// - message: The message to encode in the QR Code
/// - correctionLevel: QR Code correction level L, M, Q, or H.
/// - Returns: a Quick Response code (two-dimensional barcode) from input data
@available(iOS 13, macOS 10.15, *)
static func qrCodeGenerator(message: Data, correctionLevel: String = "M") -> CIImage {
let filter = CIFilter.qrCodeGenerator() // CIQRCodeGenerator
filter.message = message
filter.correctionLevel = correctionLevel
return filter.outputImage ?? CIImage.empty()
}
/// Radial Gradient
///
/// Generates a gradient that varies radially between two circles having the same center. It is valid for one of the two circles to have a radius of 0.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228395-radialgradient)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIRadialGradient)
///
/// Categories: Gradient, Video, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - center: The center of the effect as x and y pixel coordinates.
/// - radius0: The radius of the starting circle to use in the gradient. (0...)
/// - radius1: The radius of the ending circle to use in the gradient. (0...)
/// - color0: The first color to use in the gradient.
/// - color1: The second color to use in the gradient.
/// - Returns: a gradient that varies radially between two circles having the same center
@available(iOS 13, macOS 10.15, *)
static func radialGradient(center: CGPoint,
radius0: Float,
radius1: Float,
color0: CIColor = CIColor.white,
color1: CIColor = CIColor.black) -> CIImage {
let filter = CIFilter.radialGradient() // CIRadialGradient
filter.center = center
filter.radius0 = radius0
filter.radius1 = radius1
filter.color0 = color0
filter.color1 = color1
return filter.outputImage ?? CIImage.empty()
}
/// Random Generator
///
/// Generates an image of infinite extent whose pixel values are made up of four independent, uniformly-distributed random numbers in the 0 to 1 range.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228396-randomgenerator)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIRandomGenerator)
///
/// Categories: Generator, Video, Still Image, Built-In
///
///
/// - Parameters:
/// - Returns: an image of infinite extent whose pixel values are made up of four independent, uniformly-distributed random numbers in the 0 to 1 range
@available(iOS 13, macOS 10.15, *)
static func randomGenerator() -> CIImage {
let filter = CIFilter.randomGenerator() // CIRandomGenerator
return filter.outputImage ?? CIImage.empty()
}
/// Rounded Rectangle Generator
///
/// Generates a rounded rectangle image with the specified extent, corner radius, and color.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3335007-roundedrectanglegenerator)
///
/// Categories: Generator, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - extent: A rectangle that defines the extent of the effect.
/// - radius: The distance from the center of the effect. (0...)
/// - color: A color.
/// - Returns: a rounded rectangle image with the specified extent, corner radius, and color
@available(iOS 13, macOS 10.15, *)
static func roundedRectangleGenerator(extent: CGRect, radius: Float, color: CIColor = CIColor.white) -> CIImage {
let filter = CIFilter.roundedRectangleGenerator() // CIRoundedRectangleGenerator
filter.extent = extent
filter.radius = radius
filter.color = color
return filter.outputImage ?? CIImage.empty()
}
/// Rounded Rectangle Stroke Generator
///
/// Generates a rounded rectangle stroke image with the specified extent, corner radius, stroke width, and color.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401875-roundedrectanglestrokegenerator)
///
/// Categories: Generator, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - extent: A rectangle that defines the extent of the effect.
/// - radius: The distance from the center of the effect. (0...)
/// - color: A color.
/// - width: The width in pixels of the effect. (0...)
/// - Returns: a rounded rectangle stroke image with the specified extent, corner radius, stroke width, and color
@available(iOS 17, macOS 14.0, *)
static func roundedRectangleStrokeGenerator(extent: CGRect,
radius: Float,
color: CIColor = CIColor.white,
width: Float) -> CIImage {
let filter = CIFilter.roundedRectangleStrokeGenerator() // CIRoundedRectangleStrokeGenerator
filter.extent = extent
filter.radius = radius
filter.color = color
filter.width = width
return filter.outputImage ?? CIImage.empty()
}
/// Smooth Linear Gradient
///
/// Generates a gradient that uses an S-curve function to blend colors along a linear axis between two defined endpoints.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228407-smoothlineargradient)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CISmoothLinearGradient)
///
/// Categories: Gradient, Video, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - point0: The starting position of the gradient -- where the first color begins.
/// - point1: The ending position of the gradient -- where the second color begins.
/// - color0: The first color to use in the gradient.
/// - color1: The second color to use in the gradient.
/// - Returns: a gradient that uses an S-curve function to blend colors along a linear axis between two defined endpoints
@available(iOS 13, macOS 10.15, *)
static func smoothLinearGradient(point0: CGPoint = .zero,
point1: CGPoint,
color0: CIColor = CIColor.white,
color1: CIColor = CIColor.black) -> CIImage {
let filter = CIFilter.smoothLinearGradient() // CISmoothLinearGradient
filter.point0 = point0
filter.point1 = point1
filter.color0 = color0
filter.color1 = color1
return filter.outputImage ?? CIImage.empty()
}
/// Star Shine
///
/// Generates a starburst pattern that is similar to a supernova; can be used to simulate a lens flare.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228415-starshinegenerator)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIStarShineGenerator)
///
/// Categories: Generator, Video, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - center: The center of the effect as x and y pixel coordinates.
/// - color: The color to use for the outer shell of the circular star.
/// - radius: The radius of the star. (0...)
/// - crossScale: The size of the cross pattern. (0...)
/// - crossAngle: The angle in radians of the cross pattern.
/// - crossOpacity: The opacity of the cross pattern. (-8...)
/// - crossWidth: The width of the cross pattern. (0...)
/// - epsilon: The length of the cross spikes. (-8...)
/// - Returns: a starburst pattern that is similar to a supernova; can be used to simulate a lens flare
@available(iOS 13, macOS 10.15, *)
static func starShineGenerator(center: CGPoint,
color: CIColor,
radius: Float,
crossScale: Float = 15,
crossAngle: Float = 0.6,
crossOpacity: Float = -2,
crossWidth: Float,
epsilon: Float = -2) -> CIImage {
let filter = CIFilter.starShineGenerator() // CIStarShineGenerator
filter.center = center
filter.color = color
filter.radius = radius
filter.crossScale = crossScale
filter.crossAngle = crossAngle
filter.crossOpacity = crossOpacity
filter.crossWidth = crossWidth
filter.epsilon = epsilon
return filter.outputImage ?? CIImage.empty()
}
/// Stripes
///
/// Generates a stripe pattern. You can control the color of the stripes, the spacing, and the contrast.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228417-stripesgenerator)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIStripesGenerator)
///
/// Categories: Generator, Video, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - center: The center of the effect as x and y pixel coordinates.
/// - color0: A color to use for the odd stripes.
/// - color1: A color to use for the even stripes.
/// - width: The width of a stripe.
/// - sharpness: The sharpness of the stripe pattern. The smaller the value, the more blurry the pattern. Values range from 0.0 to 1.0. (0...1)
/// - Returns: a stripe pattern
@available(iOS 13, macOS 10.15, *)
static func stripesGenerator(center: CGPoint,
color0: CIColor = CIColor.white,
color1: CIColor = CIColor.black,
width: Float,
sharpness: Float = 1) -> CIImage {
let filter = CIFilter.stripesGenerator() // CIStripesGenerator
filter.center = center
filter.color0 = color0
filter.color1 = color1
filter.width = width
filter.sharpness = sharpness
return filter.outputImage ?? CIImage.empty()
}
/// Sunbeams
///
/// Generates a sun effect. You typically use the output of the sunbeams filter as input to a composite filter.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228419-sunbeamsgenerator)
/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CISunbeamsGenerator)
///
/// Categories: Generator, Video, Still Image, High Dynamic Range, Built-In
///
///
/// - Parameters:
/// - center: The center of the effect as x and y pixel coordinates.
/// - color: The color of the sun.
/// - sunRadius: The radius of the sun. (0...)
/// - maxStriationRadius: The radius of the sunbeams. (0...)
/// - striationStrength: The intensity of the sunbeams. Higher values result in more intensity. (0...)
/// - striationContrast: The contrast of the sunbeams. Higher values result in more contrast. (0...)
/// - time: The duration of the effect. (0...1)
/// - Returns: a sun effect
@available(iOS 13, macOS 10.15, *)
static func sunbeamsGenerator(center: CGPoint,
color: CIColor,
sunRadius: Float,
maxStriationRadius: Float = 2.58,
striationStrength: Float = 0.5,
striationContrast: Float = 1.375,
time: Float = 0) -> CIImage {
let filter = CIFilter.sunbeamsGenerator() // CISunbeamsGenerator
filter.center = center
filter.color = color
filter.sunRadius = sunRadius
filter.maxStriationRadius = maxStriationRadius
filter.striationStrength = striationStrength
filter.striationContrast = striationContrast
filter.time = time
return filter.outputImage ?? CIImage.empty()
}
/// Text Image Generator
///
/// Generate an image from a string and font information.
///
/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228422-textimagegenerator)
///
/// Categories: Generator, Video, Still Image, Built-In
///
///
/// - Parameters:
/// - text: The text to render.
/// - fontName: The name of the font to use for the generated text.
/// - fontSize: The size of the font to use for the generated text. (0...)
/// - scaleFactor: The scale of the font to use for the generated text. (0...)
/// - padding: The number of additional pixels to pad around the text’s bounding box. (0...200)
/// - Returns: an image from a string and font information
@available(iOS 16, macOS 13.0, *)
static func textImageGenerator(text: String,
fontName: String = "HelveticaNeue",
fontSize: Float = 12,
scaleFactor: Float = 1,
padding: Int) -> CIImage {
let filter = CIFilter.textImageGenerator() // CITextImageGenerator
filter.text = text
filter.fontName = fontName
filter.fontSize = fontSize
filter.scaleFactor = scaleFactor
filter.padding = Float(padding)
return filter.outputImage ?? CIImage.empty()
}
}
================================================
FILE: Sources/Image-Extensions.swift
================================================
//
// Image-Extensions.swift
// SwiftUI Core Image
//
// Created by Dan Wood on 5/9/23.
//
import Foundation
import CoreGraphics
import CoreImage
import SwiftUI
public extension Image {
private static let context = CIContext(options: nil)
init(ciImage: CIImage) {
#if canImport(UIKit)
// Note that making a UIImage and then using that to initialize the Image doesn't seem to work, but CGImage is fine.
if let cgImage = Self.context.createCGImage(ciImage, from: ciImage.extent) {
self.init(cgImage, scale: 1.0, orientation: .up, label: Text(""))
} else {
self.init(systemName: "questionmark")
}
#elseif canImport(AppKit)
// Looks like the NSCIImageRep is slightly better optimized for repeated runs,
// I'm guessing that it doesn't actually render the bitmap unless it needs to.
let rep = NSCIImageRep(ciImage: ciImage)
guard rep.size.width <= 10000, rep.size.height <= 10000 else { // simple test to make sure we don't have overflow extent
self.init(nsImage: NSImage())
return
}
let nsImage = NSImage(size: rep.size) // size affects aspect ratio but not resolution
nsImage.addRepresentation(rep)
self.init(nsImage: nsImage)
#endif
}
}