Repository: GodzzZZZ/MFPictureBrowser Branch: master Commit: 965b0ce7ea2e Files: 161 Total size: 896.3 KB Directory structure: gitextract_yxwgostq/ ├── .gitignore ├── LICENSE ├── MFPictureBrowser.podspec ├── MFPictureBrowserDemo/ │ ├── MFPictureBrowserDemo/ │ │ ├── AppDelegate.h │ │ ├── AppDelegate.m │ │ ├── Base.lproj/ │ │ │ ├── LaunchScreen.storyboard │ │ │ └── Main.storyboard │ │ ├── Image.xcassets/ │ │ │ ├── 10.imageset/ │ │ │ │ └── Contents.json │ │ │ ├── 11.imageset/ │ │ │ │ └── Contents.json │ │ │ ├── 3.imageset/ │ │ │ │ └── Contents.json │ │ │ ├── 4.imageset/ │ │ │ │ └── Contents.json │ │ │ ├── 5.imageset/ │ │ │ │ └── Contents.json │ │ │ ├── 6.imageset/ │ │ │ │ └── Contents.json │ │ │ ├── 7.imageset/ │ │ │ │ └── Contents.json │ │ │ ├── 8.imageset/ │ │ │ │ └── Contents.json │ │ │ ├── 9.imageset/ │ │ │ │ └── Contents.json │ │ │ ├── Contents.json │ │ │ ├── ic_messages_pictype_gif_30x30_.imageset/ │ │ │ │ └── Contents.json │ │ │ ├── ic_messages_pictype_long_pic_30x30_.imageset/ │ │ │ │ └── Contents.json │ │ │ └── placeholder.imageset/ │ │ │ └── Contents.json │ │ ├── Info.plist │ │ ├── ListViewController.h │ │ ├── ListViewController.m │ │ ├── LocalImageViewController.h │ │ ├── LocalImageViewController.m │ │ ├── MFDisplayPhotoCollectionViewCell.h │ │ ├── MFDisplayPhotoCollectionViewCell.m │ │ ├── MFPictureBrowser/ │ │ │ ├── MFPictureBrowser.h │ │ │ ├── MFPictureBrowser.m │ │ │ ├── MFPictureModelProtocol.h │ │ │ ├── MFPictureView.h │ │ │ ├── MFPictureView.m │ │ │ ├── MFRunLoopDistribution.h │ │ │ ├── MFRunLoopDistribution.m │ │ │ ├── UIImage+ForceDecoded.h │ │ │ ├── UIImage+ForceDecoded.m │ │ │ ├── UIImageView+TransitionImage.h │ │ │ └── UIImageView+TransitionImage.m │ │ ├── MFPictureModel.h │ │ ├── MFPictureModel.m │ │ ├── RemoteImageViewController.h │ │ ├── RemoteImageViewController.m │ │ └── main.m │ ├── MFPictureBrowserDemo.xcodeproj/ │ │ ├── project.pbxproj │ │ └── project.xcworkspace/ │ │ └── contents.xcworkspacedata │ ├── MFPictureBrowserDemo.xcworkspace/ │ │ └── contents.xcworkspacedata │ ├── Podfile │ └── Pods/ │ ├── MFCategory/ │ │ ├── Classes/ │ │ │ ├── Manager/ │ │ │ │ ├── MFCookiesManager.h │ │ │ │ ├── MFCookiesManager.m │ │ │ │ ├── MFDeviceManager.h │ │ │ │ ├── MFDeviceManager.m │ │ │ │ ├── MFManager.h │ │ │ │ ├── MFManager.m │ │ │ │ ├── MFREManager.h │ │ │ │ ├── MFREManager.m │ │ │ │ ├── MFSavePhotoManager.h │ │ │ │ └── MFSavePhotoManager.m │ │ │ ├── NSArrayCategory/ │ │ │ │ ├── NSArray+Distinct.h │ │ │ │ └── NSArray+Distinct.m │ │ │ ├── NSDataCategory/ │ │ │ │ ├── NSData+MFHexString.h │ │ │ │ └── NSData+MFHexString.m │ │ │ ├── NSDateFormatterCategory/ │ │ │ │ ├── NSDateFormatter+Cache.h │ │ │ │ └── NSDateFormatter+Cache.m │ │ │ ├── NSStringCategory/ │ │ │ │ ├── NSString+File.h │ │ │ │ ├── NSString+File.m │ │ │ │ ├── NSString+MFBase64Encoder.h │ │ │ │ ├── NSString+MFBase64Encoder.m │ │ │ │ ├── NSString+MFMD5Encoder.h │ │ │ │ └── NSString+MFMD5Encoder.m │ │ │ ├── NSTimerCategory/ │ │ │ │ ├── NSTimer+MFWeakTimer.h │ │ │ │ └── NSTimer+MFWeakTimer.m │ │ │ ├── UIColorCategory/ │ │ │ │ ├── UIColor+MFHexColor.h │ │ │ │ ├── UIColor+MFHexColor.m │ │ │ │ ├── UIColor+MFLDColor.h │ │ │ │ └── UIColor+MFLDColor.m │ │ │ ├── UIImageCategory/ │ │ │ │ ├── UIImage+Current.h │ │ │ │ └── UIImage+Current.m │ │ │ └── UIViewCategory/ │ │ │ ├── UIView+MFExpandTouchSize.h │ │ │ ├── UIView+MFExpandTouchSize.m │ │ │ ├── UIView+MFFrame.h │ │ │ ├── UIView+MFFrame.m │ │ │ ├── UIView+MFSnapshot.h │ │ │ ├── UIView+MFSnapshot.m │ │ │ ├── UIView+Shadow.h │ │ │ └── UIView+Shadow.m │ │ ├── LICENSE │ │ └── README.md │ ├── Pods.xcodeproj/ │ │ └── project.pbxproj │ ├── Target Support Files/ │ │ ├── MFCategory/ │ │ │ ├── MFCategory-dummy.m │ │ │ ├── MFCategory-prefix.pch │ │ │ └── MFCategory.xcconfig │ │ ├── Pods-MFPictureBrowserDemo/ │ │ │ ├── Pods-MFPictureBrowserDemo-acknowledgements.markdown │ │ │ ├── Pods-MFPictureBrowserDemo-acknowledgements.plist │ │ │ ├── Pods-MFPictureBrowserDemo-dummy.m │ │ │ ├── Pods-MFPictureBrowserDemo-frameworks.sh │ │ │ ├── Pods-MFPictureBrowserDemo-resources.sh │ │ │ ├── Pods-MFPictureBrowserDemo.debug.xcconfig │ │ │ └── Pods-MFPictureBrowserDemo.release.xcconfig │ │ ├── YYCache/ │ │ │ ├── YYCache-dummy.m │ │ │ ├── YYCache-prefix.pch │ │ │ └── YYCache.xcconfig │ │ ├── YYImage/ │ │ │ ├── YYImage-dummy.m │ │ │ ├── YYImage-prefix.pch │ │ │ └── YYImage.xcconfig │ │ └── YYWebImage/ │ │ ├── YYWebImage-dummy.m │ │ ├── YYWebImage-prefix.pch │ │ └── YYWebImage.xcconfig │ ├── YYCache/ │ │ ├── LICENSE │ │ ├── README.md │ │ └── YYCache/ │ │ ├── YYCache.h │ │ ├── YYCache.m │ │ ├── YYDiskCache.h │ │ ├── YYDiskCache.m │ │ ├── YYKVStorage.h │ │ ├── YYKVStorage.m │ │ ├── YYMemoryCache.h │ │ └── YYMemoryCache.m │ ├── YYImage/ │ │ ├── LICENSE │ │ ├── README.md │ │ ├── Vendor/ │ │ │ └── WebP.framework/ │ │ │ ├── Headers/ │ │ │ │ ├── config.h │ │ │ │ ├── decode.h │ │ │ │ ├── demux.h │ │ │ │ ├── encode.h │ │ │ │ ├── extras.h │ │ │ │ ├── format_constants.h │ │ │ │ ├── mux.h │ │ │ │ ├── mux_types.h │ │ │ │ └── types.h │ │ │ └── WebP │ │ └── YYImage/ │ │ ├── YYAnimatedImageView.h │ │ ├── YYAnimatedImageView.m │ │ ├── YYFrameImage.h │ │ ├── YYFrameImage.m │ │ ├── YYImage.h │ │ ├── YYImage.m │ │ ├── YYImageCoder.h │ │ ├── YYImageCoder.m │ │ ├── YYSpriteSheetImage.h │ │ └── YYSpriteSheetImage.m │ └── YYWebImage/ │ ├── LICENSE │ ├── README.md │ └── YYWebImage/ │ ├── Categories/ │ │ ├── CALayer+YYWebImage.h │ │ ├── CALayer+YYWebImage.m │ │ ├── MKAnnotationView+YYWebImage.h │ │ ├── MKAnnotationView+YYWebImage.m │ │ ├── UIButton+YYWebImage.h │ │ ├── UIButton+YYWebImage.m │ │ ├── UIImage+YYWebImage.h │ │ ├── UIImage+YYWebImage.m │ │ ├── UIImageView+YYWebImage.h │ │ ├── UIImageView+YYWebImage.m │ │ ├── _YYWebImageSetter.h │ │ └── _YYWebImageSetter.m │ ├── YYImageCache.h │ ├── YYImageCache.m │ ├── YYWebImage.h │ ├── YYWebImageManager.h │ ├── YYWebImageManager.m │ ├── YYWebImageOperation.h │ └── YYWebImageOperation.m └── README.md ================================================ FILE CONTENTS ================================================ ================================================ FILE: .gitignore ================================================ # Created by https://www.gitignore.io/api/macos,objective-c ### macOS ### *.DS_Store .AppleDouble .LSOverride # Icon must end with two \r Icon # Thumbnails ._* # Files that might appear in the root of a volume .DocumentRevisions-V100 .fseventsd .Spotlight-V100 .TemporaryItems .Trashes .VolumeIcon.icns .com.apple.timemachine.donotpresent # Directories potentially created on remote AFP share .AppleDB .AppleDesktop Network Trash Folder Temporary Items .apdisk ### Objective-C ### # Xcode # # gitignore contributors: remember to update Global/Xcode.gitignore, Objective-C.gitignore & Swift.gitignore ## Build generated build/ DerivedData/ ## Various settings *.pbxuser !default.pbxuser *.mode1v3 !default.mode1v3 *.mode2v3 !default.mode2v3 *.perspectivev3 !default.perspectivev3 xcuserdata/ ## Other *.moved-aside *.xccheckout *.xcscmblueprint ## Obj-C/Swift specific *.hmap *.ipa *.dSYM.zip *.dSYM # CocoaPods - Refactored to standalone file # Carthage - Refactored to standalone file # fastlane # # It is recommended to not store the screenshots in the git repo. Instead, use fastlane to re-generate the # screenshots whenever they are needed. # For more information about the recommended setup visit: # https://docs.fastlane.tools/best-practices/source-control/#source-control fastlane/report.xml fastlane/Preview.html fastlane/screenshots fastlane/test_output # Code Injection # # After new code Injection tools there's a generated folder /iOSInjectionProject # https://github.com/johnno1962/injectionforxcode iOSInjectionProject/ ### Objective-C Patch ### # End of https://www.gitignore.io/api/macos,objective-c ================================================ FILE: LICENSE ================================================ MIT License Copyright (c) 2018 清风 Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ================================================ FILE: MFPictureBrowser.podspec ================================================ # # Be sure to run `pod spec lint MFPictureBrowser.podspec' to ensure this is a # valid spec and to remove all comments including this before submitting the spec. # # To learn more about Podspec attributes see http://docs.cocoapods.org/specification.html # To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/ # Pod::Spec.new do |s| # ――― Spec Metadata ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― # s.name = "MFPictureBrowser" s.version = "0.6.2" s.summary = "Elegant Picture Browser" # This description is used to generate tags and improve search results. # * Think: What does it do? Why did you write it? What is the focus? # * Try to keep it short, snappy and to the point. # * Write the description between the DESC delimiters below. # * Finally, don't worry about the indent, CocoaPods strips it! s.homepage = "https://github.com/GodzzZZZ/MFPictureBrowser" # s.screenshots = "www.example.com/screenshots_1.gif", "www.example.com/screenshots_2.gif" # ――― Spec License ――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― # # s.license = "MIT" # ――― Author Metadata ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― # # # Specify the authors of the library, with email addresses. Email addresses # of the authors are extracted from the SCM log. E.g. $ git log. CocoaPods also # accepts just a name if you'd rather not provide an email address. # # Specify a social_media_url where others can refer to, for example a twitter # profile URL. # s.author = { 'GodzzZZZ' => 'GodzzZZZ@qq.com' } # ――― Platform Specifics ――――――――――――――――――――――――――――――――――――――――――――――――――――――― # # # If this Pod runs only on iOS or OS X, then specify the platform and # the deployment target. You can optionally include the target after the platform. # # s.platform = :ios s.platform = :ios, "9.0" # When using multiple platforms # s.ios.deployment_target = "5.0" # s.osx.deployment_target = "10.7" # s.watchos.deployment_target = "2.0" # s.tvos.deployment_target = "9.0" # ――― Source Location ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― # # s.source = { :git => "https://github.com/GodzzZZZ/MFPictureBrowser.git", :tag => "#{s.version}" } # ――― Source Code ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― # # # CocoaPods is smart about how it includes source code. For source files # giving a folder will include any swift, h, m, mm, c & cpp files. # For header files it will include any header in the folder. # Not including the public_header_files will make all headers public. # s.source_files = "Classes", "MFPictureBrowserDemo/MFPictureBrowserDemo/MFPictureBrowser/*.{h,m}" # s.public_header_files = "Classes/**/*.h" # ――― Resources ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― # # # A list of resources included with the Pod. These are copied into the # target bundle with a build phase script. Anything else will be cleaned. # You can preserve files from being cleaned, please don't preserve # non-essential files like tests, examples and documentation. # # s.resource = "icon.png" # s.resources = "Image/*.png" # s.preserve_paths = "FilesToSave", "MoreFilesToSave" # ――― Project Linking ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― # # # Link your library with frameworks, or libraries. Libraries do not include # the lib prefix of their name. # # s.framework = "SomeFramework" s.frameworks = "UIKit", "Foundation" # s.library = "iconv" # s.libraries = "iconv", "xml2" # ――― Project Settings ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― # s.requires_arc = true s.dependency 'YYWebImage' s.dependency 'YYImage/WebP' s.dependency 'MFCategory' # s.xcconfig = { "HEADER_SEARCH_PATHS" => "$(SDKROOT)/usr/include/libxml2" } end ================================================ FILE: MFPictureBrowserDemo/MFPictureBrowserDemo/AppDelegate.h ================================================ #import @interface AppDelegate : UIResponder @property (strong, nonatomic) UIWindow *window; @end ================================================ FILE: MFPictureBrowserDemo/MFPictureBrowserDemo/AppDelegate.m ================================================ #import "AppDelegate.h" #import @interface AppDelegate () @end @implementation AppDelegate - (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions { [YYImageCache sharedCache].memoryCache.costLimit = 50*1024*1024; [YYImageCache sharedCache].memoryCache.shouldRemoveAllObjectsOnMemoryWarning = YES; return YES; } - (void)applicationWillResignActive:(UIApplication *)application { // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state. // Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game. } - (void)applicationDidEnterBackground:(UIApplication *)application { // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later. // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits. } - (void)applicationWillEnterForeground:(UIApplication *)application { // Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background. } - (void)applicationDidBecomeActive:(UIApplication *)application { // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface. } - (void)applicationWillTerminate:(UIApplication *)application { // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:. } @end ================================================ FILE: MFPictureBrowserDemo/MFPictureBrowserDemo/Base.lproj/LaunchScreen.storyboard ================================================ ================================================ FILE: MFPictureBrowserDemo/MFPictureBrowserDemo/Base.lproj/Main.storyboard ================================================ ================================================ FILE: MFPictureBrowserDemo/MFPictureBrowserDemo/Image.xcassets/10.imageset/Contents.json ================================================ { "images" : [ { "idiom" : "universal", "filename" : "10.jpg", "scale" : "1x" }, { "idiom" : "universal", "scale" : "2x" }, { "idiom" : "universal", "scale" : "3x" } ], "info" : { "version" : 1, "author" : "xcode" } } ================================================ FILE: MFPictureBrowserDemo/MFPictureBrowserDemo/Image.xcassets/11.imageset/Contents.json ================================================ { "images" : [ { "idiom" : "universal", "filename" : "11.jpg", "scale" : "1x" }, { "idiom" : "universal", "scale" : "2x" }, { "idiom" : "universal", "scale" : "3x" } ], "info" : { "version" : 1, "author" : "xcode" } } ================================================ FILE: MFPictureBrowserDemo/MFPictureBrowserDemo/Image.xcassets/3.imageset/Contents.json ================================================ { "images" : [ { "idiom" : "universal", "filename" : "3.jpg", "scale" : "1x" }, { "idiom" : "universal", "scale" : "2x" }, { "idiom" : "universal", "scale" : "3x" } ], "info" : { "version" : 1, "author" : "xcode" } } ================================================ FILE: MFPictureBrowserDemo/MFPictureBrowserDemo/Image.xcassets/4.imageset/Contents.json ================================================ { "images" : [ { "idiom" : "universal", "filename" : "4.jpg", "scale" : "1x" }, { "idiom" : "universal", "scale" : "2x" }, { "idiom" : "universal", "scale" : "3x" } ], "info" : { "version" : 1, "author" : "xcode" } } ================================================ FILE: MFPictureBrowserDemo/MFPictureBrowserDemo/Image.xcassets/5.imageset/Contents.json ================================================ { "images" : [ { "idiom" : "universal", "filename" : "5.jpg", "scale" : "1x" }, { "idiom" : "universal", "scale" : "2x" }, { "idiom" : "universal", "scale" : "3x" } ], "info" : { "version" : 1, "author" : "xcode" } } ================================================ FILE: MFPictureBrowserDemo/MFPictureBrowserDemo/Image.xcassets/6.imageset/Contents.json ================================================ { "images" : [ { "idiom" : "universal", "filename" : "6.jpg", "scale" : "1x" }, { "idiom" : "universal", "scale" : "2x" }, { "idiom" : "universal", "scale" : "3x" } ], "info" : { "version" : 1, "author" : "xcode" } } ================================================ FILE: MFPictureBrowserDemo/MFPictureBrowserDemo/Image.xcassets/7.imageset/Contents.json ================================================ { "images" : [ { "idiom" : "universal", "filename" : "7.jpg", "scale" : "1x" }, { "idiom" : "universal", "scale" : "2x" }, { "idiom" : "universal", "scale" : "3x" } ], "info" : { "version" : 1, "author" : "xcode" } } ================================================ FILE: MFPictureBrowserDemo/MFPictureBrowserDemo/Image.xcassets/8.imageset/Contents.json ================================================ { "images" : [ { "idiom" : "universal", "filename" : "8.jpg", "scale" : "1x" }, { "idiom" : "universal", "scale" : "2x" }, { "idiom" : "universal", "scale" : "3x" } ], "info" : { "version" : 1, "author" : "xcode" } } ================================================ FILE: MFPictureBrowserDemo/MFPictureBrowserDemo/Image.xcassets/9.imageset/Contents.json ================================================ { "images" : [ { "idiom" : "universal", "filename" : "9.jpg", "scale" : "1x" }, { "idiom" : "universal", "scale" : "2x" }, { "idiom" : "universal", "scale" : "3x" } ], "info" : { "version" : 1, "author" : "xcode" } } ================================================ FILE: MFPictureBrowserDemo/MFPictureBrowserDemo/Image.xcassets/Contents.json ================================================ { "info" : { "version" : 1, "author" : "xcode" } } ================================================ FILE: MFPictureBrowserDemo/MFPictureBrowserDemo/Image.xcassets/ic_messages_pictype_gif_30x30_.imageset/Contents.json ================================================ { "images" : [ { "idiom" : "universal", "scale" : "1x" }, { "idiom" : "universal", "scale" : "2x" }, { "idiom" : "universal", "filename" : "ic_messages_pictype_gif_30x30_@3x.png", "scale" : "3x" } ], "info" : { "version" : 1, "author" : "xcode" } } ================================================ FILE: MFPictureBrowserDemo/MFPictureBrowserDemo/Image.xcassets/ic_messages_pictype_long_pic_30x30_.imageset/Contents.json ================================================ { "images" : [ { "idiom" : "universal", "scale" : "1x" }, { "idiom" : "universal", "scale" : "2x" }, { "idiom" : "universal", "filename" : "ic_messages_pictype_long_pic_30x30_@3x.png", "scale" : "3x" } ], "info" : { "version" : 1, "author" : "xcode" } } ================================================ FILE: MFPictureBrowserDemo/MFPictureBrowserDemo/Image.xcassets/placeholder.imageset/Contents.json ================================================ { "images" : [ { "idiom" : "universal", "scale" : "1x" }, { "idiom" : "universal", "filename" : "placeholder@2x.png", "scale" : "2x" }, { "idiom" : "universal", "filename" : "placeholder@3x.png", "scale" : "3x" } ], "info" : { "version" : 1, "author" : "xcode" } } ================================================ FILE: MFPictureBrowserDemo/MFPictureBrowserDemo/Info.plist ================================================ CFBundleDevelopmentRegion $(DEVELOPMENT_LANGUAGE) CFBundleExecutable $(EXECUTABLE_NAME) CFBundleIdentifier $(PRODUCT_BUNDLE_IDENTIFIER) CFBundleInfoDictionaryVersion 6.0 CFBundleName $(PRODUCT_NAME) CFBundlePackageType APPL CFBundleShortVersionString 1.0 CFBundleVersion 1 LSRequiresIPhoneOS NSAppTransportSecurity NSAllowsArbitraryLoads UILaunchStoryboardName LaunchScreen UIMainStoryboardFile Main UIRequiredDeviceCapabilities armv7 UIRequiresFullScreen UISupportedInterfaceOrientations UIInterfaceOrientationPortrait UISupportedInterfaceOrientations~ipad UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight ================================================ FILE: MFPictureBrowserDemo/MFPictureBrowserDemo/ListViewController.h ================================================ #import @interface ListViewController : UIViewController @end ================================================ FILE: MFPictureBrowserDemo/MFPictureBrowserDemo/ListViewController.m ================================================ #import "ListViewController.h" #import "RemoteImageViewController.h" #import "LocalImageViewController.h" #import "MFPictureModel.h" @interface ListViewController () < UITableViewDelegate, UITableViewDataSource > @property (nonatomic, strong) NSArray *list; @property (nonatomic, strong) UITableView *tableView; @end @implementation ListViewController - (NSArray *)list { if (!_list) { _list = @[ @"网络图片", @"本地图片", @"网络福利", @"本地福利", ]; } return _list; } - (UITableView *)tableView { if (!_tableView) { _tableView = [[UITableView alloc] initWithFrame:self.view.bounds style:(UITableViewStyleGrouped)]; _tableView.delegate = self; _tableView.dataSource = self; } return _tableView; } - (void)viewDidLoad { [super viewDidLoad]; [self.view addSubview:self.tableView]; } - (NSInteger)tableView:(UITableView *)tableView numberOfRowsInSection:(NSInteger)section { return self.list.count; } - (CGFloat)tableView:(UITableView *)tableView heightForRowAtIndexPath:(NSIndexPath *)indexPath { return 44; } - (UITableViewCell *)tableView:(UITableView *)tableView cellForRowAtIndexPath:(NSIndexPath *)indexPath { UITableViewCell *cell = [tableView dequeueReusableCellWithIdentifier:@"cell"]; if (!cell) { cell = [[UITableViewCell alloc] initWithStyle:(UITableViewCellStyleDefault) reuseIdentifier:@"cell"]; } cell.textLabel.text = self.list[indexPath.row]; return cell; } - (void)tableView:(UITableView *)tableView didSelectRowAtIndexPath:(NSIndexPath *)indexPath { [tableView deselectRowAtIndexPath:indexPath animated:true]; if (!indexPath.row) { RemoteImageViewController *remote = [[RemoteImageViewController alloc] init]; remote.picList = @[ [[MFPictureModel alloc] initWithURL:@"https://pic2.zhimg.com/80/v2-9d0d69e867ed790715fa11d1c55f3151_hd.jpg" imageName:nil imageType:MFImageTypeOther], [[MFPictureModel alloc] initWithURL:@"https://ww3.sinaimg.cn/mw690/79ba7be1jw1e5jdfqobcdg20bh06gwwz.gif" imageName:nil imageType:MFImageTypeGIF], [[MFPictureModel alloc] initWithURL:@"https://b-ssl.duitang.com/uploads/item/201609/03/20160903092531_ZTaFm.gif" imageName:nil imageType:MFImageTypeGIF], [[MFPictureModel alloc] initWithURL:@"https://b-ssl.duitang.com/uploads/item/201609/03/20160903092605_3KdcV.gif" imageName:nil imageType:MFImageTypeGIF], [[MFPictureModel alloc] initWithURL:@"https://pic2.zhimg.com/e336f051665a796be2d86ab37aa1ffb9_r.jpg" imageName:nil imageType:MFImageTypeLongImage], [[MFPictureModel alloc] initWithURL:@"https://b-ssl.duitang.com/uploads/item/201609/03/20160903085932_PTrKh.gif" imageName:nil imageType:MFImageTypeGIF], [[MFPictureModel alloc] initWithURL:@"https://b-ssl.duitang.com/uploads/item/201609/03/20160903085850_ZHaP5.gif" imageName:nil imageType:MFImageTypeGIF], [[MFPictureModel alloc] initWithURL:@"https://p.upyun.com/demo/webp/webp/png-3.webp" imageName:nil imageType:MFImageTypeStaticWebP], [[MFPictureModel alloc] initWithURL:@"https://p.upyun.com/demo/webp/webp/animated-gif-3.webp" imageName:nil imageType:MFImageTypeAnimatedWebP], ].mutableCopy; [self.navigationController pushViewController:remote animated:true]; }else if (indexPath.row == 1) { LocalImageViewController *local = [[LocalImageViewController alloc] init]; local.picList = @[ [[MFPictureModel alloc] initWithURL:nil imageName:@"1.gif" imageType:MFImageTypeGIF], [[MFPictureModel alloc] initWithURL:nil imageName:@"2.gif" imageType:MFImageTypeGIF], [[MFPictureModel alloc] initWithURL:nil imageName:@"1.webp" imageType:MFImageTypeStaticWebP], [[MFPictureModel alloc] initWithURL:nil imageName:@"2.webp" imageType:MFImageTypeAnimatedWebP], [[MFPictureModel alloc] initWithURL:nil imageName:@"4.jpg" imageType:MFImageTypeOther], [[MFPictureModel alloc] initWithURL:nil imageName:@"5.jpg" imageType:MFImageTypeLongImage], ].mutableCopy; [self.navigationController pushViewController:local animated:true]; }else if (indexPath.row == 2) { RemoteImageViewController *remoteWelfare = [[RemoteImageViewController alloc] init]; remoteWelfare.picList = @[ [[MFPictureModel alloc] initWithURL:@"http://7xi8d6.com1.z0.glb.clouddn.com/20180122090204_A4hNiG_Screenshot.jpeg" imageName:nil imageType:MFImageTypeOther], [[MFPictureModel alloc] initWithURL:@"http://7xi8d6.com1.z0.glb.clouddn.com/20171114101305_NIAzCK_rakukoo_14_11_2017_10_12_58_703.jpeg" imageName:nil imageType:MFImageTypeOther], [[MFPictureModel alloc] initWithURL:@"https://ws1.sinaimg.cn/large/610dc034ly1fjndz4dh39j20u00u0ada.jpg" imageName:nil imageType:MFImageTypeOther], [[MFPictureModel alloc] initWithURL:@"https://ws1.sinaimg.cn/large/610dc034ly1fibksd2mbmj20u011iacx.jpg" imageName:nil imageType:MFImageTypeOther], [[MFPictureModel alloc] initWithURL:@"http://7xi8d6.com1.z0.glb.clouddn.com/2017-05-12-18380140_455327614813449_854681840315793408_n.jpg" imageName:nil imageType:MFImageTypeOther], [[MFPictureModel alloc] initWithURL:@"http://ww1.sinaimg.cn/large/61e74233ly1feuogwvg27j20p00zkqe7.jpg" imageName:nil imageType:MFImageTypeOther], [[MFPictureModel alloc] initWithURL:@"http://7xi8d6.com1.z0.glb.clouddn.com/2017-03-13-17265708_396005157434387_3099040288153272320_n.jpg" imageName:nil imageType:MFImageTypeOther], [[MFPictureModel alloc] initWithURL:@"http://7xi8d6.com1.z0.glb.clouddn.com/2017-03-02-16906481_1495916493759925_5770648570629718016_n.jpg" imageName:nil imageType:MFImageTypeOther], [[MFPictureModel alloc] initWithURL:@"http://ww2.sinaimg.cn/large/610dc034gw1f9lmfwy2nij20u00u076w.jpg" imageName:nil imageType:MFImageTypeOther], ].mutableCopy; [self.navigationController pushViewController:remoteWelfare animated:YES]; }else { LocalImageViewController *localWelfare = [[LocalImageViewController alloc] init]; localWelfare.picList = @[ [[MFPictureModel alloc] initWithURL:nil imageName:@"6.jpg" imageType:MFImageTypeOther], [[MFPictureModel alloc] initWithURL:nil imageName:@"7.jpg" imageType:MFImageTypeOther], [[MFPictureModel alloc] initWithURL:nil imageName:@"8.jpg" imageType:MFImageTypeOther], [[MFPictureModel alloc] initWithURL:nil imageName:@"9.jpg" imageType:MFImageTypeOther], [[MFPictureModel alloc] initWithURL:nil imageName:@"10.jpg" imageType:MFImageTypeOther], [[MFPictureModel alloc] initWithURL:nil imageName:@"11.jpg" imageType:MFImageTypeOther], ].mutableCopy; [self.navigationController pushViewController:localWelfare animated:YES]; } } @end ================================================ FILE: MFPictureBrowserDemo/MFPictureBrowserDemo/LocalImageViewController.h ================================================ #import @interface LocalImageViewController : UIViewController @property (nonatomic, strong) NSMutableArray *picList; @end ================================================ FILE: MFPictureBrowserDemo/MFPictureBrowserDemo/LocalImageViewController.m ================================================ #import "LocalImageViewController.h" #import "MFPictureBrowser.h" #import "MFDisplayPhotoCollectionViewCell.h" #import #import "MFPictureBrowser/UIImageView+TransitionImage.h" #import "MFPictureBrowser/UIImage+ForceDecoded.h" #import "MFPictureModel.h" @interface LocalImageViewController () < UICollectionViewDelegate, UICollectionViewDataSource, UICollectionViewDelegateFlowLayout, MFPictureBrowserDelegate > @property (nonatomic, strong) UICollectionView *collectionView; @property (nonatomic, assign) NSInteger currentIndex; @end @implementation LocalImageViewController - (UICollectionView *)collectionView { if (!_collectionView) { UICollectionViewFlowLayout *flow = [[UICollectionViewFlowLayout alloc] init]; _collectionView = [[UICollectionView alloc] initWithFrame:CGRectMake(10, 10, [UIScreen mainScreen].bounds.size.width - 20, [UIScreen mainScreen].bounds.size.height - 20) collectionViewLayout:flow]; _collectionView.delegate = self; _collectionView.dataSource = self; _collectionView.backgroundColor = [UIColor whiteColor]; _collectionView.bounces = NO; } return _collectionView; } - (NSMutableArray *)picList { if (!_picList) { _picList = @[].mutableCopy; } return _picList; } - (void)viewDidLoad { [super viewDidLoad]; self.view.backgroundColor = [UIColor whiteColor]; [self.view addSubview:self.collectionView]; [self.collectionView registerClass:[MFDisplayPhotoCollectionViewCell class] forCellWithReuseIdentifier:@"reuseCell"]; } - (void)viewDidDisappear:(BOOL)animated { [super viewDidDisappear:animated]; YYImageCache *cache = [YYWebImageManager sharedManager].cache; [cache.memoryCache removeAllObjects]; [cache.diskCache removeAllObjects]; } - (NSInteger)numberOfSectionsInCollectionView:(UICollectionView *)collectionView { return 1; } - (NSInteger)collectionView:(UICollectionView *)collectionView numberOfItemsInSection:(NSInteger)section { return self.picList.count; } - (UICollectionViewCell *)collectionView: (UICollectionView *)collectionView cellForItemAtIndexPath: (NSIndexPath *)indexPath { MFDisplayPhotoCollectionViewCell *cell = [collectionView dequeueReusableCellWithReuseIdentifier:@"reuseCell" forIndexPath:indexPath]; [cell.button addTarget:self action:@selector(buttonClick:) forControlEvents:UIControlEventTouchUpInside]; MFPictureModel *pictureModel = self.picList[indexPath.row]; __weak MFDisplayPhotoCollectionViewCell *weakCell = cell; if (pictureModel.hidden) { weakCell.displayImageView.alpha = 0; }else { weakCell.displayImageView.alpha = 1; } if (pictureModel.imageType == MFImageTypeGIF) { if (pictureModel.posterImage) { weakCell.displayImageView.image = pictureModel.posterImage; [self configTagImageView:weakCell.tagImageView imageType:pictureModel.imageType]; }else { dispatch_async(dispatch_get_global_queue(0, 0), ^{ NSURL *imageURL = [[NSBundle mainBundle] URLForResource:pictureModel.imageName withExtension:nil]; NSData *animatedData = [NSData dataWithContentsOfURL:imageURL]; UIImage *animatedImage = [UIImage forceDecodedImageWithData:animatedData]; pictureModel.animatedImage = animatedImage; if (animatedImage) { pictureModel.posterImage = animatedImage.images.firstObject; dispatch_async(dispatch_get_main_queue(), ^{ [weakCell.displayImageView animatedTransitionImage:pictureModel.posterImage]; [self configTagImageView:weakCell.tagImageView imageType:pictureModel.imageType]; }); } }); } }else if (pictureModel.imageType == MFImageTypeStaticWebP || pictureModel.imageType == MFImageTypeAnimatedWebP) { if (pictureModel.posterImage) { weakCell.displayImageView.image = pictureModel.posterImage; [self configTagImageView:weakCell.tagImageView imageType:pictureModel.imageType]; }else { dispatch_async(dispatch_get_global_queue(0, 0), ^{ NSURL *imageURL = [[NSBundle mainBundle] URLForResource:pictureModel.imageName withExtension:nil]; NSData *webpData = [NSData dataWithContentsOfURL:imageURL]; YYImage *webpImage = [YYImage imageWithData:webpData]; if (webpImage) { pictureModel.posterImage = webpImage; pictureModel.webpAnimatedImage = webpImage; dispatch_async(dispatch_get_main_queue(), ^{ [weakCell.displayImageView animatedTransitionImage:webpImage]; [self configTagImageView:weakCell.tagImageView imageType:pictureModel.imageType]; }); } }); } }else { UIImage *image = [UIImage imageNamed:pictureModel.imageName]; weakCell.displayImageView.image = image; pictureModel.posterImage = image; [self configTagImageView:weakCell.tagImageView imageType:pictureModel.imageType]; } return cell; } - (void)configTagImageView:(UIImageView *)tagImageView imageType:(MFImageType)imageType { if (imageType == MFImageTypeLongImage) { tagImageView.image = [UIImage imageNamed:@"ic_messages_pictype_long_pic_30x30_"]; }else if (imageType == MFImageTypeGIF || imageType == MFImageTypeAnimatedWebP) { tagImageView.image = [UIImage imageNamed:@"ic_messages_pictype_gif_30x30_"]; }else { tagImageView.image = nil; } tagImageView.alpha = 0; if (tagImageView.image) { tagImageView.alpha = 1; } } - (void)buttonClick:(UIButton *)sender { MFDisplayPhotoCollectionViewCell *cell = (MFDisplayPhotoCollectionViewCell *)sender.superview.superview; NSIndexPath *indexPath = [self.collectionView indexPathForCell:cell]; MFPictureBrowser *browser = [[MFPictureBrowser alloc] init]; browser.delegate = self; self.currentIndex = indexPath.row; MFPictureModel *pictureModel = self.picList[indexPath.row]; pictureModel.hidden = true; [self.collectionView reloadItemsAtIndexPaths:@[indexPath]]; [browser showImageFromView:cell.displayImageView picturesCount:self.picList.count currentPictureIndex:indexPath.row]; } - (CGSize)collectionView: (UICollectionView *)collectionView layout: (UICollectionViewLayout*)collectionViewLayout sizeForItemAtIndexPath: (NSIndexPath *)indexPath{ return CGSizeMake(([UIScreen mainScreen].bounds.size.width - 20 - 20)/3, ([UIScreen mainScreen].bounds.size.width - 20 - 20)/3); } - (CGFloat)collectionView: (UICollectionView *)collectionView layout: (UICollectionViewLayout*)collectionViewLayout minimumLineSpacingForSectionAtIndex: (NSInteger)section{ return 5.0f; } - (CGFloat)collectionView: (UICollectionView *)collectionView layout: (UICollectionViewLayout*)collectionViewLayout minimumInteritemSpacingForSectionAtIndex: (NSInteger)section{ return 5.0f; } - (UIEdgeInsets)collectionView:(UICollectionView *)collectionView layout:(UICollectionViewLayout *)collectionViewLayout insetForSectionAtIndex:(NSInteger)section { return UIEdgeInsetsMake(5, 5, 5, 5); } - (void)collectionView:(UICollectionView *)collectionView didSelectItemAtIndexPath:(NSIndexPath *)indexPath { [collectionView deselectItemAtIndexPath:indexPath animated:YES]; } - (UIImageView *)pictureBrowser:(MFPictureBrowser *)pictureBrowser imageViewAtIndex:(NSInteger)index { NSIndexPath *indexPath = [NSIndexPath indexPathForRow:index inSection:0]; MFDisplayPhotoCollectionViewCell *cell = (MFDisplayPhotoCollectionViewCell *)[self.collectionView cellForItemAtIndexPath:indexPath]; return cell.displayImageView; } - (id)pictureBrowser:(MFPictureBrowser *)pictureBrowser pictureModelAtIndex:(NSInteger)index { MFPictureModel *pictureModel = self.picList[index]; return pictureModel; } - (void)pictureBrowser:(MFPictureBrowser *)pictureBrowser image:(UIImage *)image animatedImage:(UIImage *)animatedImage didLoadAtIndex:(NSInteger)index { NSIndexPath *indexPath = [NSIndexPath indexPathForRow:index inSection:0]; MFPictureModel *pictureModel = self.picList[index]; if (animatedImage) { pictureModel.posterImage = animatedImage.images.firstObject; }else if (image) { pictureModel.posterImage = image; } [self.collectionView reloadItemsAtIndexPaths:@[indexPath]]; } - (void)pictureBrowser:(MFPictureBrowser *)pictureBrowser scrollToIndex:(NSInteger)index { MFPictureModel *pictureModel = self.picList[self.currentIndex]; pictureModel.hidden = false; [self.collectionView reloadItemsAtIndexPaths:@[[NSIndexPath indexPathForRow:self.currentIndex inSection:0]]]; self.currentIndex = index; MFPictureModel *currentPictureModel = self.picList[self.currentIndex]; currentPictureModel.hidden = true; [self.collectionView reloadItemsAtIndexPaths:@[[NSIndexPath indexPathForRow:self.currentIndex inSection:0]]]; } - (void)pictureBrowser:(MFPictureBrowser *)pictureBrowser dimissAtIndex:(NSInteger)index { MFPictureModel *pictureModel = self.picList[index]; pictureModel.hidden = false; [self.collectionView reloadItemsAtIndexPaths:@[[NSIndexPath indexPathForRow:index inSection:0]]]; } @end ================================================ FILE: MFPictureBrowserDemo/MFPictureBrowserDemo/MFDisplayPhotoCollectionViewCell.h ================================================ #import #import @interface MFDisplayPhotoCollectionViewCell : UICollectionViewCell @property (nonatomic ,strong) YYAnimatedImageView *displayImageView; @property (nonatomic, strong) UIImageView *tagImageView; @property (nonatomic, strong) UIButton *button; @end ================================================ FILE: MFPictureBrowserDemo/MFPictureBrowserDemo/MFDisplayPhotoCollectionViewCell.m ================================================ #import "MFDisplayPhotoCollectionViewCell.h" @implementation MFDisplayPhotoCollectionViewCell - (instancetype)initWithFrame:(CGRect)frame { self = [super initWithFrame:frame]; if (self) { self.contentView.backgroundColor = [UIColor clearColor]; self.displayImageView = [[YYAnimatedImageView alloc] initWithFrame:CGRectMake(0, 0, frame.size.width, frame.size.height)]; self.displayImageView.layer.cornerRadius = 2; self.displayImageView.layer.masksToBounds = YES; self.displayImageView.contentMode = UIViewContentModeScaleAspectFill; [self.contentView addSubview:self.displayImageView]; self.displayImageView.autoPlayAnimatedImage = false; self.tagImageView = [[UIImageView alloc] initWithFrame:CGRectMake(frame.size.width - 32, frame.size.height - 30, 30, 30)]; self.tagImageView.alpha = 0; [self.displayImageView addSubview:self.tagImageView]; self.button = [UIButton buttonWithType:UIButtonTypeCustom]; self.button.frame = self.contentView.bounds; [self.contentView addSubview:self.button]; } return self; } @end ================================================ FILE: MFPictureBrowserDemo/MFPictureBrowserDemo/MFPictureBrowser/MFPictureBrowser.h ================================================ // Copyright © 2018年 GodzzZZZ. All rights reserved. #import #import "MFPictureModelProtocol.h" @class MFPictureBrowser; @protocol MFPictureBrowserDelegate /** 获取对应索引的视图 @param pictureBrowser 图片浏览器 @param index 索引 @return 视图 */ - (UIImageView *)pictureBrowser:(MFPictureBrowser *)pictureBrowser imageViewAtIndex:(NSInteger)index; /** 返回协议对象 @param pictureBrowser 图片浏览器 @param index 索引 @return 协议对象 */ - (id)pictureBrowser:(MFPictureBrowser *)pictureBrowser pictureModelAtIndex:(NSInteger)index; @optional /** 滚动到指定页时会调用该方法 @param pictureBrowser 图片浏览器 @param index 索引 */ - (void)pictureBrowser:(MFPictureBrowser *)pictureBrowser scrollToIndex:(NSInteger)index; /** 网络图片加载完毕的回调 @param pictureBrowser 图片浏览器 @param image 加载成功返回的image @param animatedImage 加载成功返回的animatedImage @param index 索引 */ - (void)pictureBrowser:(MFPictureBrowser *)pictureBrowser image:(UIImage *)image animatedImage:(UIImage *)animatedImage didLoadAtIndex:(NSInteger)index; /** browser did dimiss时的回调 @param pictureBrowser 图片浏览器 @param index 索引 */ - (void)pictureBrowser:(MFPictureBrowser *)pictureBrowser dimissAtIndex:(NSInteger)index; /** 长按会调用此方法 @param pictureBrowser 图片浏览器 @param index 索引 */ - (void)pictureBrowser:(MFPictureBrowser *)pictureBrowser longPressAtIndex:(NSInteger)index; @end @interface MFPictureBrowser : UIView @property (nonatomic, weak) id delegate; /** 图片之间的间距,默认: 20 */ @property (nonatomic, assign) CGFloat imagesSpacing; /** 页数文字中心点,默认:居中,中心 y 距离底部 20 */ @property (nonatomic, assign) CGPoint pageTextCenter; /** 页数文字字体,默认:系统字体,16号 */ @property (nonatomic, strong) UIFont *pageTextFont; /** 页数文字颜色,默认:白色 */ @property (nonatomic, strong) UIColor *pageTextColor; /** 显示图片浏览器 @param fromView 用户点击的视图 @param picturesCount 图片的张数 @param currentPictureIndex 当前用户点击的图片索引 */ - (void)showImageFromView:(UIImageView *)fromView picturesCount:(NSInteger)picturesCount currentPictureIndex:(NSInteger)currentPictureIndex; /** 让图片浏览器消失 */ - (void)dismiss; @end ================================================ FILE: MFPictureBrowserDemo/MFPictureBrowserDemo/MFPictureBrowser/MFPictureBrowser.m ================================================ // Copyright © 2018年 GodzzZZZ. All rights reserved. #import "MFPictureBrowser.h" #import "MFPictureView.h" #import #import @interface MFPictureBrowser() < UIScrollViewDelegate, MFPictureViewDelegate > /// MFPictureView数组,最多保存9个MFPictureView @property (nonatomic, strong) NSMutableArray *pictureViews; @property (nonatomic, assign) NSInteger picturesCount; @property (nonatomic, assign) NSInteger currentIndex; @property (nonatomic, weak) UIScrollView *scrollView; @property (nonatomic, weak) UILabel *pageTextLabel; @property (nonatomic, weak) UITapGestureRecognizer *dismissTapGesture; @property (nonatomic, strong) UIImageView *endView; @property (nonatomic, assign) BOOL animationInProgress; @end @implementation MFPictureBrowser - (instancetype)init { self = [super init]; if (self) { [self configuration]; } return self; } - (void)configuration { self.frame = [UIScreen mainScreen].bounds; self.backgroundColor = [UIColor clearColor]; // 设置默认属性 self.imagesSpacing = 20; self.pageTextFont = [UIFont systemFontOfSize:16]; self.pageTextCenter = CGPointMake(self.width * 0.5, self.height - 20); self.pageTextColor = [UIColor whiteColor]; // 初始化数组 self.pictureViews = @[].mutableCopy; // 初始化 scrollView UIScrollView *scrollView = [[UIScrollView alloc] initWithFrame:CGRectMake(- self.imagesSpacing * 0.5, 0, self.width + self.imagesSpacing, self.height)]; scrollView.showsVerticalScrollIndicator = false; scrollView.showsHorizontalScrollIndicator = false; scrollView.pagingEnabled = true; scrollView.delegate = self; [self addSubview:scrollView]; self.scrollView = scrollView; // 初始化label UILabel *label = [[UILabel alloc] init]; label.alpha = 0; label.textColor = self.pageTextColor; label.center = self.pageTextCenter; label.font = self.pageTextFont; [self addSubview:label]; self.pageTextLabel = label; // 添加手势事件 UILongPressGestureRecognizer *longPressGesture = [[UILongPressGestureRecognizer alloc] initWithTarget:self action:@selector(longPressGesture:)]; [self addGestureRecognizer:longPressGesture]; UITapGestureRecognizer *tapGesture = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(tapGesture:)]; [self addGestureRecognizer:tapGesture]; self.dismissTapGesture = tapGesture; self.animationInProgress = false; } #pragma mark - public - (void)showImageFromView:(UIImageView *)fromView picturesCount:(NSInteger)picturesCount currentPictureIndex:(NSInteger)currentPictureIndex { [self _showFromView:fromView picturesCount:picturesCount currentPictureIndex:currentPictureIndex]; for (NSInteger i = 0; i < picturesCount; i++) { MFPictureView *pictureView = [self _createImagePictureViewAtIndex:i]; [self.pictureViews addObject:pictureView]; } MFPictureView *pictureView = self.pictureViews[currentPictureIndex]; [self _showPictureView:pictureView fromView:fromView]; } #pragma mark - private - (void)_showFromView:(UIImageView *)fromView picturesCount:(NSInteger)picturesCount currentPictureIndex:(NSInteger)currentPictureIndex { NSAssert(picturesCount > 0 && currentPictureIndex < picturesCount && picturesCount <= 9, @"Parameter is not correct"); NSAssert(self.delegate != nil, @"Please set up delegate for pictureBrowser"); NSAssert([_delegate respondsToSelector:@selector(pictureBrowser:imageViewAtIndex:)], @"Please implement delegate method of pictureBrowser:imageViewAtIndex:"); NSAssert([_delegate respondsToSelector:@selector(pictureBrowser:pictureModelAtIndex:)], @"Please implement delegate method of pictureBrowser:pictureModelAtIndex:"); // 记录值并设置位置 self.picturesCount = picturesCount; self.currentIndex = currentPictureIndex; // 添加到 window 上 [[UIApplication sharedApplication].keyWindow addSubview:self]; // 计算 scrollView 的 contentSize self.scrollView.contentSize = CGSizeMake(picturesCount * _scrollView.width, _scrollView.height); // 滚动到指定位置 [self.scrollView setContentOffset:CGPointMake(currentPictureIndex * _scrollView.width, 0) animated:false]; } - (MFPictureView *)_createImagePictureViewAtIndex:(NSInteger)index { id pictureModel = [_delegate pictureBrowser:self pictureModelAtIndex:index]; UIImageView *imageView = [_delegate pictureBrowser:self imageViewAtIndex:index]; MFPictureView *pictureView = [[MFPictureView alloc] initWithPictureModel:pictureModel]; [self _configPictureView:pictureView index:index imageView:imageView]; return pictureView; } - (void)_configPictureView:(MFPictureView *)pictureView index:(NSInteger)index imageView:(UIImageView *)imageView { [self.dismissTapGesture requireGestureRecognizerToFail:pictureView.imageView.gestureRecognizers.firstObject]; pictureView.pictureDelegate = self; [self.scrollView addSubview:pictureView]; pictureView.index = index; pictureView.size = self.size; pictureView.pictureSize = imageView.image.size; CGPoint center = pictureView.center; center.x = index * _scrollView.width + _scrollView.width * 0.5; pictureView.center = center; } - (void)_showPictureView:(MFPictureView *)pictureView fromView:(UIImageView *)fromView{ [self _hideStautsBar]; CGRect rect = [fromView convertRect:fromView.bounds toView:nil]; [pictureView animationShowWithFromRect:rect animationBlock:^{ self.backgroundColor = [UIColor blackColor]; if (self.picturesCount != 1) { self.pageTextLabel.alpha = 1; }else { self.pageTextLabel.alpha = 0; } } completionBlock:^{}]; } - (void)dismiss { CGFloat x = [UIScreen mainScreen].bounds.size.width * 0.5; CGFloat y = [UIScreen mainScreen].bounds.size.height * 0.5; CGRect rect = CGRectMake(x, y, 0, 0); self.endView = [_delegate pictureBrowser:self imageViewAtIndex:self.currentIndex]; if (self.endView.superview != nil) { rect = [_endView convertRect:_endView.bounds toView:nil]; }else { rect = _endView.frame; } // 取消未完成请求 for (MFPictureView *pictureView in self.pictureViews) { if (pictureView.operation) { [pictureView.operation cancel]; } } // 取到当前显示的 pictureView MFPictureView *pictureView = [[_pictureViews filteredArrayUsingPredicate:[NSPredicate predicateWithFormat:@"index == %d", self.currentIndex]] firstObject]; // 执行关闭动画 __weak __typeof(self)weakSelf = self; [pictureView animationDismissWithToRect:rect animationBlock:^{ self.backgroundColor = [UIColor clearColor]; self.pageTextLabel.alpha = 0; [self _showStatusBar]; } completionBlock:^{ [self removeFromSuperview]; __strong __typeof(weakSelf)strongSelf = weakSelf; [strongSelf.pictureViews removeAllObjects]; if ([_delegate respondsToSelector:@selector(pictureBrowser:dimissAtIndex:)] && !strongSelf.animationInProgress) { [_delegate pictureBrowser:strongSelf dimissAtIndex:strongSelf.currentIndex]; } self.animationInProgress = true; }]; } #pragma mark - gesture - (void)tapGesture:(UITapGestureRecognizer *)gesture { [self dismiss]; } - (void)longPressGesture:(UILongPressGestureRecognizer *)gesture { if (gesture.state == UIGestureRecognizerStateEnded) { if ([_delegate respondsToSelector:@selector(pictureBrowser:longPressAtIndex:)]) { [_delegate pictureBrowser:self longPressAtIndex:self.currentIndex]; } } } #pragma mark - 状态栏状态 - (void)_hideStautsBar { UIView *statusBar = [[UIApplication sharedApplication] valueForKey:@"statusBar"]; [UIView animateWithDuration:0.15 animations:^{ statusBar.transform = CGAffineTransformMakeTranslation(0, -statusBar.height); }]; } - (void)_showStatusBar { UIView *statusBar = [[UIApplication sharedApplication] valueForKey:@"statusBar"]; [UIView animateWithDuration:0.15 animations:^{ statusBar.transform = CGAffineTransformIdentity; }]; } - (void)setPageTextFont:(UIFont *)pageTextFont { _pageTextFont = pageTextFont; self.pageTextLabel.font = pageTextFont; } - (void)setPageTextColor:(UIColor *)pageTextColor { _pageTextColor = pageTextColor; self.pageTextLabel.textColor = pageTextColor; } - (void)setPageTextCenter:(CGPoint)pageTextCenter { _pageTextCenter = pageTextCenter; [self.pageTextLabel sizeToFit]; self.pageTextLabel.center = pageTextCenter; } - (void)setImagesSpacing:(CGFloat)imagesSpacing { _imagesSpacing = imagesSpacing; self.scrollView.frame = CGRectMake(- _imagesSpacing * 0.5, 0, self.width + _imagesSpacing, self.height); } - (void)setCurrentIndex:(NSInteger)currentIndex { _currentIndex = currentIndex; [self _setPageText:currentIndex]; } - (void)_setPageText:(NSUInteger)index { _pageTextLabel.text = [NSString stringWithFormat:@"%zd / %zd", index + 1, self.picturesCount]; [_pageTextLabel sizeToFit]; _pageTextLabel.center = self.pageTextCenter; } #pragma mark - UIScrollViewDelegate - (void)scrollViewDidScroll:(UIScrollView *)scrollView { NSUInteger index = (scrollView.contentOffset.x / scrollView.width + 0.5); if (self.currentIndex != index) { if ([_delegate respondsToSelector:@selector(pictureBrowser:scrollToIndex:)]) { [_delegate pictureBrowser:self scrollToIndex:index]; } self.currentIndex = index; } } #pragma mark - MFPictureViewDelegate - (void)pictureView:(MFPictureView *)pictureView didClickAtIndex:(NSInteger)index{ [self dismiss]; } - (void)pictureView:(MFPictureView *)pictureView scale:(CGFloat)scale { self.backgroundColor = [UIColor colorWithRed:0 green:0 blue:0 alpha:1 - scale]; } - (void)pictureView:(MFPictureView *)pictureView image:(UIImage *)image animatedImage:(UIImage *)animatedImage didLoadAtIndex:(NSInteger)index { if ([_delegate respondsToSelector:@selector(pictureBrowser:image:animatedImage:didLoadAtIndex:)]) { [_delegate pictureBrowser:self image:image animatedImage:animatedImage didLoadAtIndex:index]; } } @end ================================================ FILE: MFPictureBrowserDemo/MFPictureBrowserDemo/MFPictureBrowser/MFPictureModelProtocol.h ================================================ // Copyright © 2018年 GodzzZZZ. All rights reserved. #import #import #import typedef NS_ENUM(NSInteger, MFImageType) { MFImageTypeUnknown, MFImageTypeOther, MFImageTypeGIF, MFImageTypeAnimatedWebP, MFImageTypeStaticWebP, MFImageTypeLongImage }; @protocol MFPictureModelProtocol @property (nonatomic, strong) NSString *imageName; @property (nonatomic, strong) NSString *imageURL; @property (nonatomic, assign) MFImageType imageType; @property (nonatomic, strong) UIImage *placeholderImage; @property (nonatomic, strong) UIImage *posterImage; @property (nonatomic, strong) UIImage *animatedImage; @property (nonatomic, strong) YYImage *webpAnimatedImage; @property (nonatomic, assign) BOOL compressed; @end ================================================ FILE: MFPictureBrowserDemo/MFPictureBrowserDemo/MFPictureBrowser/MFPictureView.h ================================================ // Copyright © 2018年 GodzzZZZ. All rights reserved. #import #import "MFPictureModelProtocol.h" #import #import @class MFPictureView; @protocol MFPictureViewDelegate - (void)pictureView:(MFPictureView *)pictureView didClickAtIndex:(NSInteger)index; - (void)pictureView:(MFPictureView *)pictureView scale:(CGFloat)scale; - (void)pictureView:(MFPictureView *)pictureView image:(UIImage *)image animatedImage:(UIImage *)animatedImage didLoadAtIndex:(NSInteger)index; @end @interface MFPictureView : UIScrollView // 当前视图所在的索引 @property (nonatomic, assign) NSInteger index; // 图片的大小 @property (nonatomic, assign) CGSize pictureSize; // 协议对象 @property (nonatomic, strong) id pictureModel; // 当前显示图片的控件 @property (nonatomic, strong, readonly) YYAnimatedImageView *imageView; // 代理 @property (nonatomic, weak) id pictureDelegate; // 下载 operation @property (nonatomic, strong) YYWebImageOperation *operation; - (instancetype)init NS_UNAVAILABLE; + (instancetype)new NS_UNAVAILABLE; - (instancetype)initWithPictureModel:(id)pictureModel; /** 动画显示 @param rect 从哪个位置开始做动画 @param animationBlock 附带的动画信息 @param completionBlock 结束的回调 */ - (void)animationShowWithFromRect:(CGRect)rect animationBlock:(void(^)(void))animationBlock completionBlock:(void(^)(void))completionBlock; /** 动画消失 @param rect 回到哪个位置 @param animationBlock 附带的动画信息 @param completionBlock 结束的回调 */ - (void)animationDismissWithToRect:(CGRect)rect animationBlock:(void(^)(void))animationBlock completionBlock:(void(^)(void))completionBlock; @end ================================================ FILE: MFPictureBrowserDemo/MFPictureBrowserDemo/MFPictureBrowser/MFPictureView.m ================================================ // Copyright © 2018年 GodzzZZZ. All rights reserved. #import "MFPictureView.h" #import "UIImageView+TransitionImage.h" #import "UIImage+ForceDecoded.h" #import "MFRunLoopDistribution.h" #import static inline void _mf_dispatch_async_on_main_queue(void (^block)(void)) { dispatch_async(dispatch_get_main_queue(), block); } static inline void _mf_dispatch_async_on_global_queue(void (^block)(void)) { dispatch_async(dispatch_get_global_queue(0, 0), block); } @interface MFPictureView() < UIScrollViewDelegate > @property (nonatomic, assign) CGSize showPictureSize; @property (nonatomic, assign) BOOL doubleClicks; @property (nonatomic, assign) CGPoint lastContentOffset; @property (nonatomic, assign) CGFloat scale; @property (nonatomic, assign) CGFloat offsetY; @property (nonatomic, assign, getter = isAnimationInProgress) BOOL animationInProgress; @property (nonatomic, assign, getter = isLoadingFinished) BOOL loadingFinished; @property (nonatomic, assign, getter = isLocalImage) BOOL localImage; @property (nonatomic, assign, getter = isAnimatedImage) BOOL animatedImage; @property (nonatomic, strong) UIProgressView *progressView; @end @implementation MFPictureView - (instancetype)initWithPictureModel:(id)pictureModel { self = [super init]; if (self) { self.localImage = false; if (pictureModel.imageName) { self.localImage = true; } self.animatedImage = false; if (pictureModel.imageType == MFImageTypeGIF || pictureModel.imageType == MFImageTypeAnimatedWebP) { self.animatedImage = true; } [self setupUI]; self.pictureModel = pictureModel; } return self; } - (void)setupUI { self.delegate = self; self.alwaysBounceVertical = true; self.backgroundColor = [UIColor clearColor]; self.showsHorizontalScrollIndicator = false; self.showsVerticalScrollIndicator = false; self.maximumZoomScale = 2; YYAnimatedImageView *imageView = [[YYAnimatedImageView alloc] init]; imageView.clipsToBounds = true; imageView.layer.cornerRadius = 3; imageView.contentMode = UIViewContentModeScaleAspectFill; imageView.frame = self.bounds; imageView.userInteractionEnabled = true; _imageView = imageView; [self addSubview:imageView]; if (!self.isLocalImage || (self.isLocalImage && self.isAnimatedImage)) { UIProgressView *progressView = [[UIProgressView alloc] initWithFrame:CGRectMake(0, [UIScreen mainScreen].bounds.size.height - 3, [UIScreen mainScreen].bounds.size.width, 3)]; progressView.progressViewStyle = UIProgressViewStyleDefault; progressView.progressTintColor = [UIColor colorWithWhite:1 alpha:0.2]; progressView.trackTintColor = [UIColor blackColor]; [self addSubview:progressView]; _progressView = progressView; } UITapGestureRecognizer *doubleTapGesture = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(doubleClick:)]; doubleTapGesture.numberOfTapsRequired = 2; [imageView addGestureRecognizer:doubleTapGesture]; } #pragma mark - public - (void)animationShowWithFromRect:(CGRect)rect animationBlock:(void (^)(void))animationBlock completionBlock:(void (^)(void))completionBlock { self.imageView.frame = rect; self.animationInProgress = true; [UIView animateWithDuration:0.25 delay:0 options:7 << 16 animations:^{ if (animationBlock) { animationBlock(); } self.imageView.frame = [self getImageActualFrame:self.showPictureSize]; } completion:^(BOOL finished) { if (finished) { if (completionBlock) { completionBlock(); } self.animationInProgress = false; } }]; } - (void)animationDismissWithToRect:(CGRect)rect animationBlock:(void (^)(void))animationBlock completionBlock:(void (^)(void))completionBlock { self.progressView.alpha = 0; [UIView animateWithDuration:0.25 delay:0 options:7 << 16 animations:^{ if (animationBlock) { animationBlock(); } CGRect toRect = rect; toRect.origin.y += self.offsetY; // 这一句话用于在放大的时候去关闭 toRect.origin.x += self.contentOffset.x; self.imageView.frame = toRect; } completion:^(BOOL finished) { if (finished) { if (completionBlock) { completionBlock(); } } }]; } #pragma mark - private IMPORTANT METHOD!!! - (void)_configWithLocalPictureModel:(id)pictureModel { if (pictureModel.imageType == MFImageTypeGIF) { UIImage *animatedImage = pictureModel.animatedImage; if (animatedImage) { [self _hideProgressView]; [self setPictureSize:animatedImage.size]; self.imageView.image = animatedImage; }else { [self _showProgressView]; [UIView animateWithDuration:1 animations:^{ [self.progressView setProgress:0.8 animated:true]; }]; UIImage *image = pictureModel.posterImage ?: pictureModel.placeholderImage; [self setPictureSize:image.size]; self.imageView.image = image; _mf_dispatch_async_on_global_queue(^{ NSURL *imageURL = [[NSBundle mainBundle] URLForResource:pictureModel.imageName withExtension:nil]; NSData *animatedData = [NSData dataWithContentsOfURL:imageURL]; UIImage *animatedImage = [UIImage forceDecodedImageWithData:animatedData compressed:pictureModel.compressed]; _mf_dispatch_async_on_main_queue(^{ if (animatedImage) { pictureModel.animatedImage = animatedImage; pictureModel.posterImage = animatedImage.images.firstObject; [self setPictureSize:animatedImage.size]; self.imageView.image = animatedImage; __weak __typeof(self)weakSelf = self; if ([_pictureDelegate respondsToSelector:@selector(pictureView:image:animatedImage:didLoadAtIndex:)]) { [_pictureDelegate pictureView:weakSelf image:nil animatedImage:animatedImage didLoadAtIndex:weakSelf.index]; } [self _hideProgressView]; }else { [self _hideProgressView]; } }); }); } }else if (pictureModel.imageType == MFImageTypeStaticWebP) { UIImage *webpImage = pictureModel.posterImage; if (webpImage) { [self _hideProgressView]; [self setPictureSize:webpImage.size]; self.imageView.image = webpImage; }else { [self _showProgressView]; [UIView animateWithDuration:1 animations:^{ [self.progressView setProgress:0.8 animated:true]; }]; UIImage *image = pictureModel.posterImage ?: pictureModel.placeholderImage; [self setPictureSize:image.size]; self.imageView.image = image; _mf_dispatch_async_on_global_queue(^{ NSURL *imageURL = [[NSBundle mainBundle] URLForResource:pictureModel.imageName withExtension:nil]; NSData *webpData = [NSData dataWithContentsOfURL:imageURL]; UIImage *webpImage = [YYImage imageWithData:webpData]; pictureModel.posterImage = webpImage; _mf_dispatch_async_on_main_queue(^{ if (webpImage) { [self setPictureSize:webpImage.size]; self.imageView.image = webpImage; __weak __typeof(self)weakSelf = self; if ([_pictureDelegate respondsToSelector:@selector(pictureView:image:animatedImage:didLoadAtIndex:)]) { [_pictureDelegate pictureView:weakSelf image:webpImage animatedImage:nil didLoadAtIndex:weakSelf.index]; } [self _hideProgressView]; }else { [self _hideProgressView]; } }); }); } }else if (pictureModel.imageType == MFImageTypeAnimatedWebP) { YYImage *webpAnimatedImage = pictureModel.webpAnimatedImage; if (webpAnimatedImage) { [self _hideProgressView]; [self setPictureSize:webpAnimatedImage.size]; self.imageView.image = webpAnimatedImage; }else { [self _showProgressView]; [UIView animateWithDuration:1 animations:^{ [self.progressView setProgress:0.8 animated:true]; }]; UIImage *image = pictureModel.posterImage ?: pictureModel.placeholderImage; [self setPictureSize:image.size]; self.imageView.image = image; _mf_dispatch_async_on_global_queue(^{ NSURL *imageURL = [[NSBundle mainBundle] URLForResource:pictureModel.imageName withExtension:nil]; NSData *webpData = [NSData dataWithContentsOfURL:imageURL]; YYImage *webpImage = [YYImage imageWithData:webpData]; pictureModel.webpAnimatedImage = webpImage; _mf_dispatch_async_on_main_queue(^{ if (webpImage) { [self setPictureSize:webpImage.size]; self.imageView.image = webpImage; __weak __typeof(self)weakSelf = self; if ([_pictureDelegate respondsToSelector:@selector(pictureView:image:animatedImage:didLoadAtIndex:)]) { [_pictureDelegate pictureView:weakSelf image:webpImage animatedImage:nil didLoadAtIndex:weakSelf.index]; } [self _hideProgressView]; }else { [self _hideProgressView]; } }); }); } }else { [self _hideProgressView]; UIImage *posterImage = pictureModel.posterImage; if (posterImage) { [self setPictureSize:posterImage.size]; self.imageView.image = posterImage; }else { UIImage *image = [UIImage imageNamed:pictureModel.imageName]; if (image) { pictureModel.posterImage = image; [self setPictureSize:image.size]; self.imageView.image = image; __weak __typeof(self)weakSelf = self; if ([_pictureDelegate respondsToSelector:@selector(pictureView:image:animatedImage:didLoadAtIndex:)]) { [_pictureDelegate pictureView:weakSelf image:image animatedImage:nil didLoadAtIndex:weakSelf.index]; } } } } } - (void)_configWithRemotePictureModel:(id)pictureModel { if (pictureModel.imageType == MFImageTypeGIF) { UIImage *animatedImage = pictureModel.animatedImage; if (animatedImage) { [self _hideProgressView]; [self setPictureSize:animatedImage.size]; self.imageView.image = animatedImage; }else { NSURL *url = [NSURL URLWithString:pictureModel.imageURL]; NSString *key = [[YYWebImageManager sharedManager] cacheKeyForURL:url]; BOOL imageAvailable = [[YYImageCache sharedCache] containsImageForKey:key]; if (!imageAvailable) { [self _showProgressView]; UIImage *image = pictureModel.posterImage ?: pictureModel.placeholderImage; [self setPictureSize:image.size]; self.imageView.image = image; __weak __typeof(self)weakSelf = self; self.operation = [[YYWebImageManager sharedManager] requestImageWithURL:url options:YYWebImageOptionProgressiveBlur progress:^(NSInteger receivedSize, NSInteger expectedSize) { __strong __typeof(weakSelf)strongSelf = weakSelf; _mf_dispatch_async_on_main_queue(^{ [strongSelf.progressView setProgress:(1.0 * receivedSize / expectedSize) <= 0.95 ? (1.0 * receivedSize / expectedSize) : 0.95 animated:true]; }); } transform:nil completion:^(UIImage * _Nullable image, NSURL * _Nonnull url, YYWebImageFromType from, YYWebImageStage stage, NSError * _Nullable error) { __strong __typeof(weakSelf)strongSelf = weakSelf; if (!error && stage == YYWebImageStageFinished) { strongSelf.operation = nil; YYImage *yyImage = (YYImage *)image; NSData *animatedData = yyImage.animatedImageData; UIImage *animatedImage = [UIImage forceDecodedImageWithData:animatedData compressed:pictureModel.compressed]; _mf_dispatch_async_on_main_queue(^{ [self _hideProgressView]; if (animatedImage) { pictureModel.animatedImage = animatedImage; pictureModel.posterImage = animatedImage.images.firstObject; CGSize size = animatedImage.size; [strongSelf setPictureSize:size]; [[MFRunLoopDistribution sharedRunLoopDistribution] addTask:^BOOL{ [strongSelf.imageView animatedTransitionImage:animatedImage]; return true; } withKey:pictureModel.imageURL]; } if ([_pictureDelegate respondsToSelector:@selector(pictureView:image:animatedImage:didLoadAtIndex:)]) { [_pictureDelegate pictureView:strongSelf image:nil animatedImage:animatedImage didLoadAtIndex:strongSelf.index]; } }); }else if (error) { strongSelf.operation = nil; _mf_dispatch_async_on_main_queue(^{ [strongSelf _hideProgressView]; }); } }]; }else { [self _showProgressView]; [UIView animateWithDuration:1 animations:^{ [self.progressView setProgress:0.8 animated:true]; }]; UIImage *image = pictureModel.posterImage ?: pictureModel.placeholderImage; [self setPictureSize:image.size]; self.imageView.image = image; [[YYImageCache sharedCache] getImageDataForKey:key withBlock:^(NSData * _Nullable imageData) { UIImage *animatedImage = [UIImage forceDecodedImageWithData:imageData compressed:pictureModel.compressed]; _mf_dispatch_async_on_main_queue(^{ [self _hideProgressView]; if (animatedImage) { pictureModel.animatedImage = animatedImage; pictureModel.posterImage = animatedImage.images.firstObject; __weak __typeof(self)weakSelf = self; if ([_pictureDelegate respondsToSelector:@selector(pictureView:image:animatedImage:didLoadAtIndex:)]) { [_pictureDelegate pictureView:weakSelf image:nil animatedImage:animatedImage didLoadAtIndex:weakSelf.index]; } CGSize size = animatedImage.size; [self setPictureSize:size]; [[MFRunLoopDistribution sharedRunLoopDistribution] addTask:^BOOL{ [self.imageView animatedTransitionImage:animatedImage]; return true; } withKey:pictureModel.imageURL]; } }); }]; } } }else if (pictureModel.imageType == MFImageTypeStaticWebP) { UIImage *webpImage = pictureModel.posterImage; if (webpImage) { [self _hideProgressView]; [self setPictureSize:webpImage.size]; self.imageView.image = webpImage; }else { NSURL *url = [NSURL URLWithString:pictureModel.imageURL]; NSString *key = [[YYWebImageManager sharedManager] cacheKeyForURL:url]; BOOL imageAvailable = [[YYImageCache sharedCache] containsImageForKey:key]; if (!imageAvailable) { [self _showProgressView]; UIImage *image = pictureModel.posterImage ?: pictureModel.placeholderImage; [self setPictureSize:image.size]; self.imageView.image = image; __weak __typeof(self)weakSelf = self; self.operation = [[YYWebImageManager sharedManager] requestImageWithURL:url options:YYWebImageOptionProgressiveBlur progress:^(NSInteger receivedSize, NSInteger expectedSize) { __strong __typeof(weakSelf)strongSelf = weakSelf; _mf_dispatch_async_on_main_queue(^{ [strongSelf.progressView setProgress:(1.0 * receivedSize / expectedSize) <= 0.95 ? (1.0 * receivedSize / expectedSize) : 0.95 animated:true]; }); } transform:nil completion:^(UIImage * _Nullable image, NSURL * _Nonnull url, YYWebImageFromType from, YYWebImageStage stage, NSError * _Nullable error) { __strong __typeof(weakSelf)strongSelf = weakSelf; if (!error && stage == YYWebImageStageFinished) { strongSelf.operation = nil; _mf_dispatch_async_on_main_queue(^{ [strongSelf _hideProgressView]; UIImage *webpImage = (YYImage *)image; if (webpImage) { pictureModel.posterImage = webpImage; CGSize size = webpImage.size; [strongSelf setPictureSize:size]; [[MFRunLoopDistribution sharedRunLoopDistribution] addTask:^BOOL{ [strongSelf.imageView animatedTransitionImage:webpImage]; return true; } withKey:pictureModel.imageURL]; } if ([_pictureDelegate respondsToSelector:@selector(pictureView:image:animatedImage:didLoadAtIndex:)]) { [_pictureDelegate pictureView:strongSelf image:webpImage animatedImage:nil didLoadAtIndex:strongSelf.index]; } }); }else if (error) { strongSelf.operation = nil; [strongSelf _hideProgressView]; } }]; }else { [self _showProgressView]; [UIView animateWithDuration:1 animations:^{ [self.progressView setProgress:0.8 animated:true]; }]; UIImage *image = pictureModel.posterImage ?: pictureModel.placeholderImage; [self setPictureSize:image.size]; self.imageView.image = image; [[YYImageCache sharedCache] getImageDataForKey:key withBlock:^(NSData * _Nullable imageData) { UIImage *webpImage = [YYImage imageWithData:imageData]; _mf_dispatch_async_on_main_queue(^{ [self _hideProgressView]; if (webpImage) { pictureModel.posterImage = webpImage; __weak __typeof(self)weakSelf = self; if ([_pictureDelegate respondsToSelector:@selector(pictureView:image:animatedImage:didLoadAtIndex:)]) { [_pictureDelegate pictureView:weakSelf image:webpImage animatedImage:nil didLoadAtIndex:weakSelf.index]; } CGSize size = webpImage.size; [self setPictureSize:size]; [[MFRunLoopDistribution sharedRunLoopDistribution] addTask:^BOOL{ [self.imageView animatedTransitionImage:webpImage]; return true; } withKey:pictureModel.imageURL]; } }); }]; } } }else if (pictureModel.imageType == MFImageTypeAnimatedWebP) { UIImage *webpAnimatedImage = pictureModel.webpAnimatedImage; if (webpAnimatedImage) { [self _hideProgressView]; [self setPictureSize:webpAnimatedImage.size]; self.imageView.image = webpAnimatedImage; }else { NSURL *url = [NSURL URLWithString:pictureModel.imageURL]; NSString *key = [[YYWebImageManager sharedManager] cacheKeyForURL:url]; BOOL imageAvailable = [[YYImageCache sharedCache] containsImageForKey:key]; if (!imageAvailable) { [self _showProgressView]; UIImage *image = pictureModel.posterImage ?: pictureModel.placeholderImage; [self setPictureSize:image.size]; self.imageView.image = image; __weak __typeof(self)weakSelf = self; self.operation = [[YYWebImageManager sharedManager] requestImageWithURL:url options:YYWebImageOptionProgressiveBlur progress:^(NSInteger receivedSize, NSInteger expectedSize) { __strong __typeof(weakSelf)strongSelf = weakSelf; _mf_dispatch_async_on_main_queue(^{ [strongSelf.progressView setProgress:(1.0 * receivedSize / expectedSize) <= 0.95 ? (1.0 * receivedSize / expectedSize) : 0.95 animated:true]; }); } transform:nil completion:^(UIImage * _Nullable image, NSURL * _Nonnull url, YYWebImageFromType from, YYWebImageStage stage, NSError * _Nullable error) { __strong __typeof(weakSelf)strongSelf = weakSelf; if (!error && stage == YYWebImageStageFinished) { strongSelf.operation = nil; _mf_dispatch_async_on_main_queue(^{ [strongSelf _hideProgressView]; YYImage *webpImage = (YYImage *)image; if (webpImage) { pictureModel.posterImage = webpImage; pictureModel.webpAnimatedImage = webpImage; CGSize size = webpImage.size; [strongSelf setPictureSize:size]; [[MFRunLoopDistribution sharedRunLoopDistribution] addTask:^BOOL{ [strongSelf.imageView animatedTransitionImage:webpImage]; return true; } withKey:pictureModel.imageURL]; } if ([_pictureDelegate respondsToSelector:@selector(pictureView:image:animatedImage:didLoadAtIndex:)]) { [_pictureDelegate pictureView:strongSelf image:webpImage animatedImage:nil didLoadAtIndex:strongSelf.index]; } }); }else if (error) { strongSelf.operation = nil; [strongSelf _hideProgressView]; } }]; }else { [self _showProgressView]; [UIView animateWithDuration:1 animations:^{ [self.progressView setProgress:0.8 animated:true]; }]; UIImage *image = pictureModel.posterImage ?: pictureModel.placeholderImage; [self setPictureSize:image.size]; self.imageView.image = image; [[YYImageCache sharedCache] getImageDataForKey:key withBlock:^(NSData * _Nullable imageData) { UIImage *webpImage = [YYImage imageWithData:imageData]; _mf_dispatch_async_on_main_queue(^{ [self _hideProgressView]; if (webpImage) { pictureModel.posterImage = webpImage; __weak __typeof(self)weakSelf = self; if ([_pictureDelegate respondsToSelector:@selector(pictureView:image:animatedImage:didLoadAtIndex:)]) { [_pictureDelegate pictureView:weakSelf image:webpImage animatedImage:nil didLoadAtIndex:weakSelf.index]; } CGSize size = webpImage.size; [self setPictureSize:size]; [[MFRunLoopDistribution sharedRunLoopDistribution] addTask:^BOOL{ [self.imageView animatedTransitionImage:webpImage]; return true; } withKey:pictureModel.imageURL]; } }); }]; } } }else { if (pictureModel.posterImage) { [self _hideProgressView]; [self setPictureSize:pictureModel.posterImage.size]; self.imageView.image = pictureModel.posterImage; }else { NSURL *url = [NSURL URLWithString:pictureModel.imageURL]; NSString *key = [[YYWebImageManager sharedManager] cacheKeyForURL:url]; BOOL imageAvailable = [[YYImageCache sharedCache] containsImageForKey:key]; if (!imageAvailable) { [self _showProgressView]; UIImage *image = pictureModel.posterImage ?: pictureModel.placeholderImage; [self setPictureSize:image.size]; self.imageView.image = image; __weak __typeof(self)weakSelf = self; self.operation = [[YYWebImageManager sharedManager] requestImageWithURL:url options:YYWebImageOptionProgressiveBlur progress:^(NSInteger receivedSize, NSInteger expectedSize) { __strong __typeof(weakSelf)strongSelf = weakSelf; _mf_dispatch_async_on_main_queue(^{ [strongSelf.progressView setProgress:(1.0 * receivedSize / expectedSize) <= 0.95 ? (1.0 * receivedSize / expectedSize) : 0.95 animated:true]; }); } transform:nil completion:^(UIImage * _Nullable image, NSURL * _Nonnull url, YYWebImageFromType from, YYWebImageStage stage, NSError * _Nullable error) { __strong __typeof(weakSelf)strongSelf = weakSelf; _mf_dispatch_async_on_main_queue(^{ if (!error && stage == YYWebImageStageFinished) { strongSelf.operation = nil; [strongSelf _hideProgressView]; if (image) { pictureModel.posterImage = image; if ([_pictureDelegate respondsToSelector:@selector(pictureView:image:animatedImage:didLoadAtIndex:)]) { [_pictureDelegate pictureView:strongSelf image:image animatedImage:nil didLoadAtIndex:strongSelf.index]; } CGSize size = image.size; [strongSelf setPictureSize:size]; [[MFRunLoopDistribution sharedRunLoopDistribution] addTask:^BOOL{ [strongSelf.imageView animatedTransitionImage:image]; return true; } withKey:pictureModel.imageURL]; } }else if (error){ strongSelf.operation = nil; [strongSelf _hideProgressView]; } }); }]; }else { [self _showProgressView]; [UIView animateWithDuration:1 animations:^{ [self.progressView setProgress:0.8 animated:true]; }]; UIImage *image = pictureModel.posterImage ?: pictureModel.placeholderImage; [self setPictureSize:image.size]; self.imageView.image = image; [[YYImageCache sharedCache] getImageDataForKey:key withBlock:^(NSData * _Nullable imageData) { UIImage *image = [UIImage imageWithData:imageData]; _mf_dispatch_async_on_main_queue(^{ [self _hideProgressView]; if (image) { pictureModel.posterImage = image; __weak __typeof(self)weakSelf = self; if ([_pictureDelegate respondsToSelector:@selector(pictureView:image:animatedImage:didLoadAtIndex:)]) { [_pictureDelegate pictureView:weakSelf image:image animatedImage:nil didLoadAtIndex:weakSelf.index]; } CGSize size = image.size; [self setPictureSize:size]; [[MFRunLoopDistribution sharedRunLoopDistribution] addTask:^BOOL{ [self.imageView animatedTransitionImage:image]; return true; } withKey:pictureModel.imageURL]; } }); }]; } } } } #pragma mark - ------------------- - (void)_showProgressView { _mf_dispatch_async_on_main_queue(^{ self.loadingFinished = false; self.progressView.alpha = 1; }); } - (void)_hideProgressView { if (pthread_main_np()) { self.loadingFinished = true; self.progressView.alpha = 0; }else { _mf_dispatch_async_on_main_queue(^{ self.loadingFinished = true; self.progressView.alpha = 0; }); } } - (void)setPictureModel:(id)pictureModel { if (!pictureModel) { return; } _pictureModel = pictureModel; if (pictureModel.imageName) { [self _configWithLocalPictureModel:pictureModel]; }else { [self _configWithRemotePictureModel:pictureModel]; } } - (void)setContentSize:(CGSize)contentSize { [super setContentSize:contentSize]; if (self.zoomScale == 1) { [UIView animateWithDuration:0.25 delay:0 options:7 << 16 animations:^{ CGPoint center = self.imageView.center; center.x = self.contentSize.width * 0.5; self.imageView.center = center; } completion:nil]; } } - (void)setLastContentOffset:(CGPoint)lastContentOffset { // 如果用户没有在拖动,并且绽放比 > 0.15 if (!(self.dragging == false && _scale > 0.15)) { _lastContentOffset = lastContentOffset; } } - (void)setPictureSize:(CGSize)pictureSize { _pictureSize = pictureSize; if (CGSizeEqualToSize(pictureSize, CGSizeZero)) { return; } // 计算实际的大小 CGFloat screenW = [UIScreen mainScreen].bounds.size.width; CGFloat scale = screenW / pictureSize.width; CGFloat height = scale * pictureSize.height; self.showPictureSize = CGSizeMake(screenW, height); } - (void)setShowPictureSize:(CGSize)showPictureSize { _showPictureSize = showPictureSize; self.imageView.frame = [self getImageActualFrame:showPictureSize]; self.contentSize = self.imageView.frame.size; } - (CGRect)getImageActualFrame:(CGSize)imageSize { CGFloat x = 0; CGFloat y = 0; if (imageSize.height < [UIScreen mainScreen].bounds.size.height) { y = ([UIScreen mainScreen].bounds.size.height - imageSize.height) / 2; } return CGRectMake(x, y, imageSize.width, imageSize.height); } - (CGRect)zoomRectForScale:(float)scale withCenter:(CGPoint)center { CGRect zoomRect; zoomRect.size.height =self.frame.size.height / scale; zoomRect.size.width =self.frame.size.width / scale; zoomRect.origin.x = center.x - (zoomRect.size.width / 2.0); zoomRect.origin.y = center.y - (zoomRect.size.height / 2.0); return zoomRect; } #pragma mark - 监听方法 - (void)doubleClick:(UITapGestureRecognizer *)gesture { if (!self.isLoadingFinished) { return; } CGFloat newScale = 2; if (_doubleClicks) { newScale = 1; } CGRect zoomRect = [self zoomRectForScale:newScale withCenter:[gesture locationInView:gesture.view]]; [self zoomToRect:zoomRect animated:YES]; _doubleClicks = !_doubleClicks; } #pragma mark - UIScrollViewDelegate - (void)scrollViewDidScroll:(UIScrollView *)scrollView { self.lastContentOffset = scrollView.contentOffset; // 保存 offsetY _offsetY = scrollView.contentOffset.y; self.progressView.alpha = 0; // 正在动画 if ([self.imageView.layer animationForKey:@"transform"] != nil) { return; } // 用户正在缩放 if (self.zoomBouncing || self.zooming) { return; } CGFloat screenH = [UIScreen mainScreen].bounds.size.height; // 滑动到中间 if (scrollView.contentSize.height > screenH) { // 代表没有滑动到底部 if (_lastContentOffset.y > 0 && _lastContentOffset.y <= scrollView.contentSize.height - screenH) { return; } } _scale = fabs(_lastContentOffset.y) / screenH; // 如果内容高度 > 屏幕高度 // 并且偏移量 > 内容高度 - 屏幕高度 // 那么就代表滑动到最底部了 if (scrollView.contentSize.height > screenH && _lastContentOffset.y > scrollView.contentSize.height - screenH) { _scale = (_lastContentOffset.y - (scrollView.contentSize.height - screenH)) / screenH; } // 条件1:拖动到顶部再继续往下拖 // 条件2:拖动到顶部再继续往上拖 // 两个条件都满足才去设置 scale -> 针对于长图 if (scrollView.contentSize.height > screenH) { // 长图 if (scrollView.contentOffset.y < 0 || _lastContentOffset.y > scrollView.contentSize.height - screenH) { [_pictureDelegate pictureView:self scale:_scale]; } }else { [_pictureDelegate pictureView:self scale:_scale]; } // 如果用户松手 if (scrollView.dragging == false) { if (_scale > 0.08 && _scale <= 1) { // 关闭 [_pictureDelegate pictureView:self didClickAtIndex:self.index]; // 设置 contentOffset [scrollView setContentOffset:_lastContentOffset animated:false]; }else { if (!_scale && !self.isLoadingFinished) { [UIView animateWithDuration:0.1 animations:^{ self.progressView.alpha = 1; }]; } } } } - (UIView *)viewForZoomingInScrollView:(UIScrollView *)scrollView { return _imageView; } - (void)scrollViewDidZoom:(UIScrollView *)scrollView { CGPoint center = _imageView.center; CGFloat offsetY = (scrollView.bounds.size.height > scrollView.contentSize.height) ? (scrollView.bounds.size.height - scrollView.contentSize.height) * 0.5 : 0.0; center.y = scrollView.contentSize.height * 0.5 + offsetY; _imageView.center = center; if (scrollView.zoomScale == 1 && !self.isLoadingFinished) { [UIView animateWithDuration:0.1 animations:^{ self.progressView.alpha = 1; }]; } // 如果是缩小,保证在屏幕中间 if (scrollView.zoomScale < scrollView.minimumZoomScale) { CGFloat offsetX = (scrollView.bounds.size.width > scrollView.contentSize.width) ? (scrollView.bounds.size.width - scrollView.contentSize.width) * 0.5 : 0.0; center.x = scrollView.contentSize.width * 0.5 + offsetX; _imageView.center = center; } } @end ================================================ FILE: MFPictureBrowserDemo/MFPictureBrowserDemo/MFPictureBrowser/MFRunLoopDistribution.h ================================================ // Copyright © 2018年 GodzzZZZ. All rights reserved. #import typedef BOOL(^MFRunLoopDistributionUnit)(void); @interface MFRunLoopDistribution : NSObject + (instancetype)sharedRunLoopDistribution; - (void)addTask:(MFRunLoopDistributionUnit)unit withKey:(id)key; - (void)removeAllTasks; @end ================================================ FILE: MFPictureBrowserDemo/MFPictureBrowserDemo/MFPictureBrowser/MFRunLoopDistribution.m ================================================ // Copyright © 2018年 GodzzZZZ. All rights reserved. #import "MFRunLoopDistribution.h" #import @interface MFRunLoopDistribution () @property (nonatomic, strong) NSMutableArray *tasks; @property (nonatomic, strong) NSMutableArray *tasksKeys; @property (nonatomic, strong) NSTimer *timer; @end @implementation MFRunLoopDistribution - (void)removeAllTasks { [self.tasks removeAllObjects]; [self.tasksKeys removeAllObjects]; } - (void)addTask:(MFRunLoopDistributionUnit)unit withKey:(id)key{ [self.tasks addObject:unit]; [self.tasksKeys addObject:key]; } - (void)_timerFiredMethod:(NSTimer *)timer {} - (instancetype)init { if ((self = [super init])) { _tasks = [NSMutableArray array]; _tasksKeys = [NSMutableArray array]; _timer = [NSTimer scheduledTimerWithTimeInterval:0.1 target:self selector:@selector(_timerFiredMethod:) userInfo:nil repeats:YES]; } return self; } + (instancetype)sharedRunLoopDistribution { static MFRunLoopDistribution *singleton; static dispatch_once_t once; dispatch_once(&once, ^{ singleton = [[self alloc] init]; [self _registerRunLoopDistributionAsMainRunloopObserver:singleton]; }); return singleton; } + (void)_registerRunLoopDistributionAsMainRunloopObserver:(MFRunLoopDistribution *)runLoopDistribution { static CFRunLoopObserverRef defaultModeObserver; _registerObserver(kCFRunLoopBeforeWaiting, defaultModeObserver, NSIntegerMax - 999, kCFRunLoopDefaultMode, (__bridge void *)runLoopDistribution, &_defaultModeRunLoopWorkDistributionCallback); } static void _registerObserver(CFOptionFlags activities, CFRunLoopObserverRef observer, CFIndex order, CFStringRef mode, void *info, CFRunLoopObserverCallBack callback) { CFRunLoopRef runLoop = CFRunLoopGetCurrent(); CFRunLoopObserverContext context = { 0, info, &CFRetain, &CFRelease, NULL }; observer = CFRunLoopObserverCreate(NULL, activities, YES, order, callback, &context); CFRunLoopAddObserver(runLoop, observer, mode); CFRelease(observer); } static void _runLoopWorkDistributionCallback(CFRunLoopObserverRef observer, CFRunLoopActivity activity, void *info) { MFRunLoopDistribution *runLoopWorkDistribution = (__bridge MFRunLoopDistribution *)info; if (runLoopWorkDistribution.tasks.count == 0) { return; } BOOL result = NO; while (result == NO && runLoopWorkDistribution.tasks.count) { MFRunLoopDistributionUnit unit = runLoopWorkDistribution.tasks.firstObject; result = unit(); [runLoopWorkDistribution.tasks removeObjectAtIndex:0]; [runLoopWorkDistribution.tasksKeys removeObjectAtIndex:0]; } } static void _defaultModeRunLoopWorkDistributionCallback(CFRunLoopObserverRef observer, CFRunLoopActivity activity, void *info) { _runLoopWorkDistributionCallback(observer, activity, info); } @end ================================================ FILE: MFPictureBrowserDemo/MFPictureBrowserDemo/MFPictureBrowser/UIImage+ForceDecoded.h ================================================ // Copyright © 2018年 GodzzZZZ. All rights reserved. #import @interface UIImage (ForceDecoded) + (UIImage *)forceDecodedImageWithData:(NSData *)data; + (UIImage *)forceDecodedImageWithData:(NSData *)data compressed:(BOOL)compressed; @end ================================================ FILE: MFPictureBrowserDemo/MFPictureBrowserDemo/MFPictureBrowser/UIImage+ForceDecoded.m ================================================ // Copyright © 2018年 GodzzZZZ. All rights reserved. #import "UIImage+ForceDecoded.h" @implementation UIImage (ForceDecoded) //https://github.com/ibireme/YYKit/blob/master/YYKit/Image/YYImageCoder.m CGColorSpaceRef CGColorSpaceGetDeviceRGB() { static CGColorSpaceRef space; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ space = CGColorSpaceCreateDeviceRGB(); }); return space; } CGImageRef CGImageCreateDecodedCopy(CGImageRef imageRef, BOOL decodeForDisplay) { if (!imageRef) return NULL; size_t width = CGImageGetWidth(imageRef); size_t height = CGImageGetHeight(imageRef); if (width == 0 || height == 0) return NULL; if (decodeForDisplay) { //decode with redraw (may lose some precision) CGImageAlphaInfo alphaInfo = CGImageGetAlphaInfo(imageRef) & kCGBitmapAlphaInfoMask; BOOL hasAlpha = NO; if (alphaInfo == kCGImageAlphaPremultipliedLast || alphaInfo == kCGImageAlphaPremultipliedFirst || alphaInfo == kCGImageAlphaLast || alphaInfo == kCGImageAlphaFirst) { hasAlpha = YES; } // BGRA8888 (premultiplied) or BGRX8888 // same as UIGraphicsBeginImageContext() and -[UIView drawRect:] CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Host; bitmapInfo |= hasAlpha ? kCGImageAlphaPremultipliedFirst : kCGImageAlphaNoneSkipFirst; CGContextRef context = CGBitmapContextCreate(NULL, width, height, 8, 0, CGColorSpaceGetDeviceRGB(), bitmapInfo); if (!context) return NULL; CGContextDrawImage(context, CGRectMake(0, 0, width, height), imageRef); // decode CGImageRef newImage = CGBitmapContextCreateImage(context); CFRelease(context); return newImage; } else { CGColorSpaceRef space = CGImageGetColorSpace(imageRef); size_t bitsPerComponent = CGImageGetBitsPerComponent(imageRef); size_t bitsPerPixel = CGImageGetBitsPerPixel(imageRef); size_t bytesPerRow = CGImageGetBytesPerRow(imageRef); CGBitmapInfo bitmapInfo = CGImageGetBitmapInfo(imageRef); if (bytesPerRow == 0 || width == 0 || height == 0) return NULL; CGDataProviderRef dataProvider = CGImageGetDataProvider(imageRef); if (!dataProvider) return NULL; CFDataRef data = CGDataProviderCopyData(dataProvider); // decode if (!data) return NULL; CGDataProviderRef newProvider = CGDataProviderCreateWithCFData(data); CFRelease(data); if (!newProvider) return NULL; CGImageRef newImage = CGImageCreate(width, height, bitsPerComponent, bitsPerPixel, bytesPerRow, space, bitmapInfo, newProvider, NULL, false, kCGRenderingIntentDefault); CFRelease(newProvider); return newImage; } } + (UIImage *)forceDecodedImageWithData:(NSData *)data { return [self forceDecodedImageWithData:data compressed:true]; } + (UIImage *)forceDecodedImageWithData:(NSData *)data compressed:(BOOL)compressed { if (!data) { return nil; } CGFloat maxPixelSize = compressed ? data.length * 0.01 : data.length; CGImageSourceRef source = CGImageSourceCreateWithData((__bridge CFDataRef)data, NULL); CFDictionaryRef options = (__bridge CFDictionaryRef) @{ (id) kCGImageSourceCreateThumbnailWithTransform : @YES, (id) kCGImageSourceCreateThumbnailFromImageAlways : @YES, (id) kCGImageSourceThumbnailMaxPixelSize : @(maxPixelSize) }; size_t count = CGImageSourceGetCount(source); UIImage *animatedImage; if (count <= 1) { CGImageRef imageRef = CGImageSourceCreateThumbnailAtIndex(source, 0, options); CGImageRef decodedImageRef = CGImageCreateDecodedCopy(imageRef, true); CGImageRelease(imageRef); animatedImage = [UIImage imageWithCGImage:decodedImageRef scale:[UIScreen mainScreen].scale orientation:UIImageOrientationUp]; } else { NSMutableArray *images = [NSMutableArray array]; NSTimeInterval duration = 0.0f; for (size_t i = 0; i < count; i++) { CGImageRef imageRef = CGImageSourceCreateThumbnailAtIndex(source, i, options); CGImageRef decodedImageRef = CGImageCreateDecodedCopy(imageRef, true); duration += [self frameDurationAtIndex:i source:source]; UIImage *image = [UIImage imageWithCGImage:decodedImageRef scale:[UIScreen mainScreen].scale orientation:UIImageOrientationUp]; [images addObject:image]; CGImageRelease(imageRef); } if (!duration) { duration = (1.0f / 10.0f) * count; } animatedImage = [UIImage animatedImageWithImages:images duration:duration]; } CFRelease(source); return animatedImage; } + (float)frameDurationAtIndex:(NSUInteger)index source:(CGImageSourceRef)source { float frameDuration = 0.1f; CFDictionaryRef cfFrameProperties = CGImageSourceCopyPropertiesAtIndex(source, index, nil); NSDictionary *frameProperties = (__bridge NSDictionary *)cfFrameProperties; NSDictionary *gifProperties = frameProperties[(NSString *)kCGImagePropertyGIFDictionary]; NSNumber *delayTimeUnclampedProp = gifProperties[(NSString *)kCGImagePropertyGIFUnclampedDelayTime]; if (delayTimeUnclampedProp) { frameDuration = [delayTimeUnclampedProp floatValue]; }else { NSNumber *delayTimeProp = gifProperties[(NSString *)kCGImagePropertyGIFDelayTime]; if (delayTimeProp) { frameDuration = [delayTimeProp floatValue]; } } CFRelease(cfFrameProperties); return frameDuration; } @end ================================================ FILE: MFPictureBrowserDemo/MFPictureBrowserDemo/MFPictureBrowser/UIImageView+TransitionImage.h ================================================ // Copyright © 2018年 GodzzZZZ. All rights reserved. #import @interface UIImageView (TransitionImage) - (void)animatedTransitionImage:(UIImage *)image; @end ================================================ FILE: MFPictureBrowserDemo/MFPictureBrowserDemo/MFPictureBrowser/UIImageView+TransitionImage.m ================================================ // Copyright © 2018年 GodzzZZZ. All rights reserved. #import "UIImageView+TransitionImage.h" @implementation UIImageView (TransitionImage) - (void)animatedTransitionImage:(UIImage *)image { [UIView transitionWithView:self duration:0.15f options:UIViewAnimationOptionTransitionCrossDissolve animations:^{ self.image = image; } completion:NULL]; } @end ================================================ FILE: MFPictureBrowserDemo/MFPictureBrowserDemo/MFPictureModel.h ================================================ #import #import "MFPictureModelProtocol.h" @interface MFPictureModel : NSObject < MFPictureModelProtocol > @property (nonatomic, strong) NSString *imageURL; @property (nonatomic, strong) NSString *imageName; @property (nonatomic, assign) MFImageType imageType; @property (nonatomic, strong) UIImage *placeholderImage; @property (nonatomic, strong) UIImage *posterImage; @property (nonatomic, strong) UIImage *animatedImage; @property (nonatomic, strong) YYImage *webpAnimatedImage; @property (nonatomic, assign) BOOL compressed; @property (nonatomic, assign) BOOL hidden; - (instancetype)initWithURL:(NSString *)imageURL imageName:(NSString *)imageName imageType:(MFImageType)imageType; - (instancetype)initWithURL:(NSString *)imageURL imageName:(NSString *)imageName imageType:(MFImageType)imageType compressed:(BOOL)compressed hidden:(BOOL)hidden; - (instancetype)initWithURL:(NSString *)imageURL imageName:(NSString *)imageName imageType:(MFImageType)imageType placeholderImage:(UIImage *)palceholderImage posterImage:(UIImage *)posterImage animatedImage:(UIImage *)animatedImage webpAnimatedImage:(YYImage *)webpAnimatedImage compressed:(BOOL)compressed hidden:(BOOL)hidden; @end ================================================ FILE: MFPictureBrowserDemo/MFPictureBrowserDemo/MFPictureModel.m ================================================ #import "MFPictureModel.h" @implementation MFPictureModel - (instancetype)initWithURL:(NSString *)imageURL imageName:(NSString *)imageName imageType:(MFImageType)imageType { return [self initWithURL:imageURL imageName:imageName imageType:imageType placeholderImage:[UIImage imageNamed:@"placeholder"] posterImage:nil animatedImage:nil webpAnimatedImage:nil compressed:true hidden:false]; } - (instancetype)initWithURL:(NSString *)imageURL imageName:(NSString *)imageName imageType:(MFImageType)imageType compressed:(BOOL)compressed hidden:(BOOL)hidden { return [self initWithURL:imageURL imageName:imageName imageType:imageType placeholderImage:[UIImage imageNamed:@"placeholder"] posterImage:nil animatedImage:nil webpAnimatedImage:nil compressed:compressed hidden:false]; } - (instancetype)initWithURL:(NSString *)imageURL imageName:(NSString *)imageName imageType:(MFImageType)imageType placeholderImage:(UIImage *)palceholderImage posterImage:(UIImage *)posterImage animatedImage:(UIImage *)animatedImage webpAnimatedImage:(YYImage *)webpAnimatedImage compressed:(BOOL)compressed hidden:(BOOL)hidden { self = [super init]; if (self) { _imageURL = imageURL; _imageName = imageName; _imageType = imageType; _placeholderImage = palceholderImage; _posterImage = posterImage; _animatedImage = animatedImage; _webpAnimatedImage = webpAnimatedImage; _compressed = compressed; _hidden = hidden; } return self; } @end ================================================ FILE: MFPictureBrowserDemo/MFPictureBrowserDemo/RemoteImageViewController.h ================================================ #import @interface RemoteImageViewController : UIViewController @property (nonatomic, strong) NSMutableArray *picList; @end ================================================ FILE: MFPictureBrowserDemo/MFPictureBrowserDemo/RemoteImageViewController.m ================================================ #import "RemoteImageViewController.h" #import "MFPictureBrowser.h" #import "MFDisplayPhotoCollectionViewCell.h" #import "MFPictureModel.h" #import #import "MFPictureBrowser/UIImageView+TransitionImage.h" #import "MFPictureBrowser/UIImage+ForceDecoded.h" @interface RemoteImageViewController () < UICollectionViewDelegate, UICollectionViewDataSource, UICollectionViewDelegateFlowLayout, MFPictureBrowserDelegate > @property (nonatomic, strong) UICollectionView *collectionView; @property (nonatomic, assign) NSInteger currentIndex; @end @implementation RemoteImageViewController - (UICollectionView *)collectionView { if (!_collectionView) { UICollectionViewFlowLayout *flow = [[UICollectionViewFlowLayout alloc] init]; _collectionView = [[UICollectionView alloc] initWithFrame:CGRectMake(10, 10, [UIScreen mainScreen].bounds.size.width - 20, [UIScreen mainScreen].bounds.size.height - 20) collectionViewLayout:flow]; _collectionView.delegate = self; _collectionView.dataSource = self; _collectionView.backgroundColor = [UIColor whiteColor]; _collectionView.bounces = NO; } return _collectionView; } - (NSMutableArray *)picList { if (!_picList) { _picList = @[].mutableCopy; } return _picList; } - (void)viewDidLoad { [super viewDidLoad]; self.view.backgroundColor = [UIColor whiteColor]; [self.view addSubview:self.collectionView]; [self.collectionView registerClass:[MFDisplayPhotoCollectionViewCell class] forCellWithReuseIdentifier:@"reuseCell"]; } - (void)viewDidDisappear:(BOOL)animated { [super viewDidDisappear:animated]; YYImageCache *cache = [YYImageCache sharedCache]; [cache.memoryCache removeAllObjects]; [cache.diskCache removeAllObjects]; } - (NSInteger)numberOfSectionsInCollectionView:(UICollectionView *)collectionView { return 1; } - (NSInteger)collectionView:(UICollectionView *)collectionView numberOfItemsInSection:(NSInteger)section { return self.picList.count; } - (UICollectionViewCell *)collectionView: (UICollectionView *)collectionView cellForItemAtIndexPath: (NSIndexPath *)indexPath { MFDisplayPhotoCollectionViewCell *cell = [collectionView dequeueReusableCellWithReuseIdentifier:@"reuseCell" forIndexPath:indexPath]; [cell.button addTarget:self action:@selector(buttonClick:) forControlEvents:UIControlEventTouchUpInside]; MFPictureModel *pictureModel = self.picList[indexPath.row]; NSURL *url = [NSURL URLWithString:pictureModel.imageURL]; __weak MFDisplayPhotoCollectionViewCell *weakCell = cell; if (!pictureModel.hidden) { weakCell.displayImageView.alpha = 1.0f; }else { weakCell.displayImageView.alpha = 0.0f; } __weak __typeof(self)weakSelf = self; [weakCell.displayImageView yy_setImageWithURL:url placeholder:pictureModel.placeholderImage options:YYWebImageOptionProgressiveBlur | YYWebImageOptionSetImageWithFadeAnimation completion:^(UIImage * _Nullable image, NSURL * _Nonnull url, YYWebImageFromType from, YYWebImageStage stage, NSError * _Nullable error) { __strong __typeof(weakSelf)strongSelf = weakSelf; if (!error && stage == YYWebImageStageFinished) { dispatch_async(dispatch_get_main_queue(), ^{ pictureModel.posterImage = image; [strongSelf configTagImageView:weakCell.tagImageView imageType:pictureModel.imageType]; }); switch (pictureModel.imageType) { case MFImageTypeGIF: { if (!pictureModel.animatedImage) { YYImage *yyImage = (YYImage *)image; NSData *animatedData = yyImage.animatedImageData; UIImage *animatedImage = [UIImage forceDecodedImageWithData:animatedData]; pictureModel.animatedImage = animatedImage; } } break; case MFImageTypeAnimatedWebP: { if (!pictureModel.webpAnimatedImage) { pictureModel.webpAnimatedImage = (YYImage *)image; } } break; case MFImageTypeStaticWebP: { if (!pictureModel.posterImage) { pictureModel.posterImage = (YYImage *)image; } } break; default: break; } } }]; return cell; } - (void)configTagImageView:(UIImageView *)tagImageView imageType:(MFImageType)imageType { if (imageType == MFImageTypeLongImage) { tagImageView.image = [UIImage imageNamed:@"ic_messages_pictype_long_pic_30x30_"]; }else if (imageType == MFImageTypeGIF || imageType == MFImageTypeAnimatedWebP) { tagImageView.image = [UIImage imageNamed:@"ic_messages_pictype_gif_30x30_"]; }else { tagImageView.image = nil; } tagImageView.alpha = 0; if (tagImageView.image) { tagImageView.alpha = 1; } } - (void)buttonClick:(UIButton *)sender { MFDisplayPhotoCollectionViewCell *cell = (MFDisplayPhotoCollectionViewCell *)sender.superview.superview; NSIndexPath *indexPath = [self.collectionView indexPathForCell:cell]; MFPictureBrowser *browser = [[MFPictureBrowser alloc] init]; browser.delegate = self; self.currentIndex = indexPath.row; MFPictureModel *pictureModel = self.picList[indexPath.row]; pictureModel.hidden = true; [self.collectionView reloadItemsAtIndexPaths:@[indexPath]]; [browser showImageFromView:cell.displayImageView picturesCount:self.picList.count currentPictureIndex:indexPath.row]; } - (CGSize)collectionView: (UICollectionView *)collectionView layout: (UICollectionViewLayout*)collectionViewLayout sizeForItemAtIndexPath: (NSIndexPath *)indexPath{ return CGSizeMake(([UIScreen mainScreen].bounds.size.width - 20 - 20)/3, ([UIScreen mainScreen].bounds.size.width - 20 - 20)/3); } - (CGFloat)collectionView: (UICollectionView *)collectionView layout: (UICollectionViewLayout*)collectionViewLayout minimumLineSpacingForSectionAtIndex: (NSInteger)section{ return 5.0f; } - (CGFloat)collectionView: (UICollectionView *)collectionView layout: (UICollectionViewLayout*)collectionViewLayout minimumInteritemSpacingForSectionAtIndex: (NSInteger)section{ return 5.0f; } - (UIEdgeInsets)collectionView:(UICollectionView *)collectionView layout:(UICollectionViewLayout *)collectionViewLayout insetForSectionAtIndex:(NSInteger)section { return UIEdgeInsetsMake(5, 5, 5, 5); } - (void)collectionView:(UICollectionView *)collectionView didSelectItemAtIndexPath:(NSIndexPath *)indexPath { [collectionView deselectItemAtIndexPath:indexPath animated:YES]; } - (UIImageView *)pictureBrowser:(MFPictureBrowser *)pictureBrowser imageViewAtIndex:(NSInteger)index { NSIndexPath *indexPath = [NSIndexPath indexPathForRow:index inSection:0]; MFDisplayPhotoCollectionViewCell *cell = (MFDisplayPhotoCollectionViewCell *)[self.collectionView cellForItemAtIndexPath:indexPath]; return cell.displayImageView; } - (id)pictureBrowser:(MFPictureBrowser *)pictureBrowser pictureModelAtIndex:(NSInteger)index { MFPictureModel *pictureModel = self.picList[index]; return pictureModel; } - (void)pictureBrowser:(MFPictureBrowser *)pictureBrowser image:(UIImage *)image animatedImage:(UIImage *)animatedImage didLoadAtIndex:(NSInteger)index { NSIndexPath *indexPath = [NSIndexPath indexPathForRow:index inSection:0]; MFPictureModel *pictureModel = self.picList[index]; if (animatedImage) { pictureModel.posterImage = animatedImage.images.firstObject; }else if (image) { pictureModel.posterImage = image; } [self.collectionView reloadItemsAtIndexPaths:@[indexPath]]; } - (void)pictureBrowser:(MFPictureBrowser *)pictureBrowser scrollToIndex:(NSInteger)index { MFPictureModel *pictureModel = self.picList[self.currentIndex]; pictureModel.hidden = false; [self.collectionView reloadItemsAtIndexPaths:@[[NSIndexPath indexPathForRow:self.currentIndex inSection:0]]]; self.currentIndex = index; MFPictureModel *currentPictureModel = self.picList[self.currentIndex]; currentPictureModel.hidden = true; [self.collectionView reloadItemsAtIndexPaths:@[[NSIndexPath indexPathForRow:self.currentIndex inSection:0]]]; } - (void)pictureBrowser:(MFPictureBrowser *)pictureBrowser dimissAtIndex:(NSInteger)index { MFPictureModel *pictureModel = self.picList[index]; pictureModel.hidden = false; [self.collectionView reloadItemsAtIndexPaths:@[[NSIndexPath indexPathForRow:index inSection:0]]]; } @end ================================================ FILE: MFPictureBrowserDemo/MFPictureBrowserDemo/main.m ================================================ // // main.m // MFPictureBrowserDemo // // Created by 张冬冬 on 2018/4/17. // Copyright © 2018年 张冬冬. All rights reserved. // #import #import "AppDelegate.h" int main(int argc, char * argv[]) { @autoreleasepool { return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class])); } } ================================================ FILE: MFPictureBrowserDemo/MFPictureBrowserDemo.xcodeproj/project.pbxproj ================================================ // !$*UTF8*$! { archiveVersion = 1; classes = { }; objectVersion = 48; objects = { /* Begin PBXBuildFile section */ 4A24835D209C5BF7004C69EC /* 2.webp in Resources */ = {isa = PBXBuildFile; fileRef = 4A24835C209C5BF7004C69EC /* 2.webp */; }; 4A29A8FD209C37020029A378 /* RemoteImageViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 4A29A8FC209C37020029A378 /* RemoteImageViewController.m */; }; 4A5AEE862085987300EBFF88 /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 4A5AEE852085987300EBFF88 /* AppDelegate.m */; }; 4A5AEE8C2085987300EBFF88 /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 4A5AEE8A2085987300EBFF88 /* Main.storyboard */; }; 4A5AEE912085987300EBFF88 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 4A5AEE8F2085987300EBFF88 /* LaunchScreen.storyboard */; }; 4A5AEE942085987300EBFF88 /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 4A5AEE932085987300EBFF88 /* main.m */; }; 4A5AEE9D2085A87600EBFF88 /* MFPictureBrowser.m in Sources */ = {isa = PBXBuildFile; fileRef = 4A5AEE9C2085A87600EBFF88 /* MFPictureBrowser.m */; }; 4A5AEEA02085BC7500EBFF88 /* MFPictureView.m in Sources */ = {isa = PBXBuildFile; fileRef = 4A5AEE9F2085BC7500EBFF88 /* MFPictureView.m */; }; 4A5AEEA32085C11B00EBFF88 /* MFDisplayPhotoCollectionViewCell.m in Sources */ = {isa = PBXBuildFile; fileRef = 4A5AEEA22085C11B00EBFF88 /* MFDisplayPhotoCollectionViewCell.m */; }; 4A60D12C2091733C00612995 /* UIImageView+TransitionImage.m in Sources */ = {isa = PBXBuildFile; fileRef = 4A60D12B2091733C00612995 /* UIImageView+TransitionImage.m */; }; 4A60D135209184B600612995 /* UIImage+ForceDecoded.m in Sources */ = {isa = PBXBuildFile; fileRef = 4A60D134209184B600612995 /* UIImage+ForceDecoded.m */; }; 4AD36532209B0A67005F9089 /* 1.webp in Resources */ = {isa = PBXBuildFile; fileRef = 4AD36531209B0A67005F9089 /* 1.webp */; }; 4AD36535209BE6B9005F9089 /* MFRunLoopDistribution.m in Sources */ = {isa = PBXBuildFile; fileRef = 4AD36534209BE6B9005F9089 /* MFRunLoopDistribution.m */; }; 4AE0C925208EBF6800A2B2D5 /* MFPictureModel.m in Sources */ = {isa = PBXBuildFile; fileRef = 4AE0C924208EBF6800A2B2D5 /* MFPictureModel.m */; }; 4AE0C929208F01FC00A2B2D5 /* 1.gif in Resources */ = {isa = PBXBuildFile; fileRef = 4AE0C928208F01FC00A2B2D5 /* 1.gif */; }; 4AE0C92D208F046800A2B2D5 /* 2.gif in Resources */ = {isa = PBXBuildFile; fileRef = 4AE0C92C208F046800A2B2D5 /* 2.gif */; }; 4AE0C92F208F1E5500A2B2D5 /* 3.gif in Resources */ = {isa = PBXBuildFile; fileRef = 4AE0C92E208F1E5500A2B2D5 /* 3.gif */; }; 5E53B83F20863DD5007480C7 /* ListViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 5E53B83E20863DD5007480C7 /* ListViewController.m */; }; 5E53B84520863F14007480C7 /* LocalImageViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 5E53B84420863F14007480C7 /* LocalImageViewController.m */; }; 5E53B849208642AE007480C7 /* Image.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 5E53B848208642AE007480C7 /* Image.xcassets */; }; C343AF5E55FDB1C04384A8C5 /* libPods-MFPictureBrowserDemo.a in Frameworks */ = {isa = PBXBuildFile; fileRef = A3403644A2EFF9B61054AEB2 /* libPods-MFPictureBrowserDemo.a */; }; /* End PBXBuildFile section */ /* Begin PBXFileReference section */ 03F4406D2CD57D8E4838F5A5 /* Pods-MFPictureBrowserDemo.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-MFPictureBrowserDemo.debug.xcconfig"; path = "Pods/Target Support Files/Pods-MFPictureBrowserDemo/Pods-MFPictureBrowserDemo.debug.xcconfig"; sourceTree = ""; }; 4A24835C209C5BF7004C69EC /* 2.webp */ = {isa = PBXFileReference; lastKnownFileType = file; path = 2.webp; sourceTree = ""; }; 4A29A8FB209C37010029A378 /* RemoteImageViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RemoteImageViewController.h; sourceTree = ""; }; 4A29A8FC209C37020029A378 /* RemoteImageViewController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = RemoteImageViewController.m; sourceTree = ""; }; 4A5AEE812085987300EBFF88 /* MFPictureBrowserDemo.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = MFPictureBrowserDemo.app; sourceTree = BUILT_PRODUCTS_DIR; }; 4A5AEE842085987300EBFF88 /* AppDelegate.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = ""; }; 4A5AEE852085987300EBFF88 /* AppDelegate.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = ""; }; 4A5AEE8B2085987300EBFF88 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; }; 4A5AEE902085987300EBFF88 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; 4A5AEE922085987300EBFF88 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; 4A5AEE932085987300EBFF88 /* main.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = main.m; sourceTree = ""; }; 4A5AEE9B2085A87600EBFF88 /* MFPictureBrowser.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MFPictureBrowser.h; sourceTree = ""; }; 4A5AEE9C2085A87600EBFF88 /* MFPictureBrowser.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MFPictureBrowser.m; sourceTree = ""; }; 4A5AEE9E2085BC7500EBFF88 /* MFPictureView.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MFPictureView.h; sourceTree = ""; }; 4A5AEE9F2085BC7500EBFF88 /* MFPictureView.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MFPictureView.m; sourceTree = ""; }; 4A5AEEA12085C11B00EBFF88 /* MFDisplayPhotoCollectionViewCell.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = MFDisplayPhotoCollectionViewCell.h; sourceTree = ""; }; 4A5AEEA22085C11B00EBFF88 /* MFDisplayPhotoCollectionViewCell.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = MFDisplayPhotoCollectionViewCell.m; sourceTree = ""; }; 4A60D12A2091733C00612995 /* UIImageView+TransitionImage.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "UIImageView+TransitionImage.h"; sourceTree = ""; }; 4A60D12B2091733C00612995 /* UIImageView+TransitionImage.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = "UIImageView+TransitionImage.m"; sourceTree = ""; }; 4A60D133209184B600612995 /* UIImage+ForceDecoded.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "UIImage+ForceDecoded.h"; sourceTree = ""; }; 4A60D134209184B600612995 /* UIImage+ForceDecoded.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = "UIImage+ForceDecoded.m"; sourceTree = ""; }; 4AD36531209B0A67005F9089 /* 1.webp */ = {isa = PBXFileReference; lastKnownFileType = file; path = 1.webp; sourceTree = ""; }; 4AD36533209BE6B9005F9089 /* MFRunLoopDistribution.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MFRunLoopDistribution.h; sourceTree = ""; }; 4AD36534209BE6B9005F9089 /* MFRunLoopDistribution.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MFRunLoopDistribution.m; sourceTree = ""; }; 4AE0C922208EBAE900A2B2D5 /* MFPictureModelProtocol.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MFPictureModelProtocol.h; sourceTree = ""; }; 4AE0C923208EBF6800A2B2D5 /* MFPictureModel.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MFPictureModel.h; sourceTree = ""; }; 4AE0C924208EBF6800A2B2D5 /* MFPictureModel.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MFPictureModel.m; sourceTree = ""; }; 4AE0C928208F01FC00A2B2D5 /* 1.gif */ = {isa = PBXFileReference; lastKnownFileType = image.gif; path = 1.gif; sourceTree = ""; }; 4AE0C92C208F046800A2B2D5 /* 2.gif */ = {isa = PBXFileReference; lastKnownFileType = image.gif; path = 2.gif; sourceTree = ""; }; 4AE0C92E208F1E5500A2B2D5 /* 3.gif */ = {isa = PBXFileReference; lastKnownFileType = image.gif; path = 3.gif; sourceTree = ""; }; 5E53B83D20863DD5007480C7 /* ListViewController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ListViewController.h; sourceTree = ""; }; 5E53B83E20863DD5007480C7 /* ListViewController.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = ListViewController.m; sourceTree = ""; }; 5E53B84320863F14007480C7 /* LocalImageViewController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = LocalImageViewController.h; sourceTree = ""; }; 5E53B84420863F14007480C7 /* LocalImageViewController.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = LocalImageViewController.m; sourceTree = ""; }; 5E53B848208642AE007480C7 /* Image.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Image.xcassets; sourceTree = ""; }; 8E2D0E105573B85353E3FBA2 /* Pods-MFPictureBrowserDemo.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-MFPictureBrowserDemo.release.xcconfig"; path = "Pods/Target Support Files/Pods-MFPictureBrowserDemo/Pods-MFPictureBrowserDemo.release.xcconfig"; sourceTree = ""; }; A3403644A2EFF9B61054AEB2 /* libPods-MFPictureBrowserDemo.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = "libPods-MFPictureBrowserDemo.a"; sourceTree = BUILT_PRODUCTS_DIR; }; /* End PBXFileReference section */ /* Begin PBXFrameworksBuildPhase section */ 4A5AEE7E2085987300EBFF88 /* Frameworks */ = { isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( C343AF5E55FDB1C04384A8C5 /* libPods-MFPictureBrowserDemo.a in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; }; /* End PBXFrameworksBuildPhase section */ /* Begin PBXGroup section */ 0AAAB81BBA9D6597659FA589 /* Pods */ = { isa = PBXGroup; children = ( 03F4406D2CD57D8E4838F5A5 /* Pods-MFPictureBrowserDemo.debug.xcconfig */, 8E2D0E105573B85353E3FBA2 /* Pods-MFPictureBrowserDemo.release.xcconfig */, ); name = Pods; sourceTree = ""; }; 4A5AEE782085987300EBFF88 = { isa = PBXGroup; children = ( 4A5AEE832085987300EBFF88 /* MFPictureBrowserDemo */, 4A5AEE822085987300EBFF88 /* Products */, 0AAAB81BBA9D6597659FA589 /* Pods */, A77A5EFA813399DFB28FEDF5 /* Frameworks */, ); sourceTree = ""; }; 4A5AEE822085987300EBFF88 /* Products */ = { isa = PBXGroup; children = ( 4A5AEE812085987300EBFF88 /* MFPictureBrowserDemo.app */, ); name = Products; sourceTree = ""; }; 4A5AEE832085987300EBFF88 /* MFPictureBrowserDemo */ = { isa = PBXGroup; children = ( 4A5AEE9A2085988A00EBFF88 /* MFPictureBrowser */, 4A5AEE842085987300EBFF88 /* AppDelegate.h */, 4A5AEE852085987300EBFF88 /* AppDelegate.m */, 5E53B83D20863DD5007480C7 /* ListViewController.h */, 5E53B83E20863DD5007480C7 /* ListViewController.m */, 4AE0C923208EBF6800A2B2D5 /* MFPictureModel.h */, 4AE0C924208EBF6800A2B2D5 /* MFPictureModel.m */, 4A29A8FB209C37010029A378 /* RemoteImageViewController.h */, 4A29A8FC209C37020029A378 /* RemoteImageViewController.m */, 5E53B84320863F14007480C7 /* LocalImageViewController.h */, 5E53B84420863F14007480C7 /* LocalImageViewController.m */, 4A5AEEA12085C11B00EBFF88 /* MFDisplayPhotoCollectionViewCell.h */, 4A5AEEA22085C11B00EBFF88 /* MFDisplayPhotoCollectionViewCell.m */, 4A5AEE8A2085987300EBFF88 /* Main.storyboard */, 4A5AEE8F2085987300EBFF88 /* LaunchScreen.storyboard */, 4A5AEE922085987300EBFF88 /* Info.plist */, 4A5AEE932085987300EBFF88 /* main.m */, 4AD36531209B0A67005F9089 /* 1.webp */, 4A24835C209C5BF7004C69EC /* 2.webp */, 4AE0C928208F01FC00A2B2D5 /* 1.gif */, 4AE0C92C208F046800A2B2D5 /* 2.gif */, 4AE0C92E208F1E5500A2B2D5 /* 3.gif */, 5E53B848208642AE007480C7 /* Image.xcassets */, ); path = MFPictureBrowserDemo; sourceTree = ""; }; 4A5AEE9A2085988A00EBFF88 /* MFPictureBrowser */ = { isa = PBXGroup; children = ( 4AD36533209BE6B9005F9089 /* MFRunLoopDistribution.h */, 4AD36534209BE6B9005F9089 /* MFRunLoopDistribution.m */, 4A5AEE9B2085A87600EBFF88 /* MFPictureBrowser.h */, 4A5AEE9C2085A87600EBFF88 /* MFPictureBrowser.m */, 4A5AEE9E2085BC7500EBFF88 /* MFPictureView.h */, 4A5AEE9F2085BC7500EBFF88 /* MFPictureView.m */, 4A60D12A2091733C00612995 /* UIImageView+TransitionImage.h */, 4A60D12B2091733C00612995 /* UIImageView+TransitionImage.m */, 4A60D133209184B600612995 /* UIImage+ForceDecoded.h */, 4A60D134209184B600612995 /* UIImage+ForceDecoded.m */, 4AE0C922208EBAE900A2B2D5 /* MFPictureModelProtocol.h */, ); path = MFPictureBrowser; sourceTree = ""; }; A77A5EFA813399DFB28FEDF5 /* Frameworks */ = { isa = PBXGroup; children = ( A3403644A2EFF9B61054AEB2 /* libPods-MFPictureBrowserDemo.a */, ); name = Frameworks; sourceTree = ""; }; /* End PBXGroup section */ /* Begin PBXNativeTarget section */ 4A5AEE802085987300EBFF88 /* MFPictureBrowserDemo */ = { isa = PBXNativeTarget; buildConfigurationList = 4A5AEE972085987300EBFF88 /* Build configuration list for PBXNativeTarget "MFPictureBrowserDemo" */; buildPhases = ( CD5B2E9B46AE3F8D5A629EC8 /* [CP] Check Pods Manifest.lock */, 4A5AEE7D2085987300EBFF88 /* Sources */, 4A5AEE7E2085987300EBFF88 /* Frameworks */, 4A5AEE7F2085987300EBFF88 /* Resources */, ); buildRules = ( ); dependencies = ( ); name = MFPictureBrowserDemo; productName = MFPictureBrowserDemo; productReference = 4A5AEE812085987300EBFF88 /* MFPictureBrowserDemo.app */; productType = "com.apple.product-type.application"; }; /* End PBXNativeTarget section */ /* Begin PBXProject section */ 4A5AEE792085987300EBFF88 /* Project object */ = { isa = PBXProject; attributes = { LastUpgradeCheck = 0910; ORGANIZATIONNAME = "张冬冬"; TargetAttributes = { 4A5AEE802085987300EBFF88 = { CreatedOnToolsVersion = 9.1; ProvisioningStyle = Automatic; }; }; }; buildConfigurationList = 4A5AEE7C2085987300EBFF88 /* Build configuration list for PBXProject "MFPictureBrowserDemo" */; compatibilityVersion = "Xcode 8.0"; developmentRegion = en; hasScannedForEncodings = 0; knownRegions = ( en, Base, ); mainGroup = 4A5AEE782085987300EBFF88; productRefGroup = 4A5AEE822085987300EBFF88 /* Products */; projectDirPath = ""; projectRoot = ""; targets = ( 4A5AEE802085987300EBFF88 /* MFPictureBrowserDemo */, ); }; /* End PBXProject section */ /* Begin PBXResourcesBuildPhase section */ 4A5AEE7F2085987300EBFF88 /* Resources */ = { isa = PBXResourcesBuildPhase; buildActionMask = 2147483647; files = ( 4AD36532209B0A67005F9089 /* 1.webp in Resources */, 4AE0C92F208F1E5500A2B2D5 /* 3.gif in Resources */, 4A5AEE912085987300EBFF88 /* LaunchScreen.storyboard in Resources */, 4AE0C92D208F046800A2B2D5 /* 2.gif in Resources */, 4A24835D209C5BF7004C69EC /* 2.webp in Resources */, 5E53B849208642AE007480C7 /* Image.xcassets in Resources */, 4AE0C929208F01FC00A2B2D5 /* 1.gif in Resources */, 4A5AEE8C2085987300EBFF88 /* Main.storyboard in Resources */, ); runOnlyForDeploymentPostprocessing = 0; }; /* End PBXResourcesBuildPhase section */ /* Begin PBXShellScriptBuildPhase section */ CD5B2E9B46AE3F8D5A629EC8 /* [CP] Check Pods Manifest.lock */ = { isa = PBXShellScriptBuildPhase; buildActionMask = 2147483647; files = ( ); inputPaths = ( "${PODS_PODFILE_DIR_PATH}/Podfile.lock", "${PODS_ROOT}/Manifest.lock", ); name = "[CP] Check Pods Manifest.lock"; outputPaths = ( "$(DERIVED_FILE_DIR)/Pods-MFPictureBrowserDemo-checkManifestLockResult.txt", ); runOnlyForDeploymentPostprocessing = 0; shellPath = /bin/sh; shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n"; showEnvVarsInLog = 0; }; /* End PBXShellScriptBuildPhase section */ /* Begin PBXSourcesBuildPhase section */ 4A5AEE7D2085987300EBFF88 /* Sources */ = { isa = PBXSourcesBuildPhase; buildActionMask = 2147483647; files = ( 4A5AEE9D2085A87600EBFF88 /* MFPictureBrowser.m in Sources */, 4A5AEEA32085C11B00EBFF88 /* MFDisplayPhotoCollectionViewCell.m in Sources */, 5E53B83F20863DD5007480C7 /* ListViewController.m in Sources */, 4AD36535209BE6B9005F9089 /* MFRunLoopDistribution.m in Sources */, 4A5AEEA02085BC7500EBFF88 /* MFPictureView.m in Sources */, 4A5AEE942085987300EBFF88 /* main.m in Sources */, 5E53B84520863F14007480C7 /* LocalImageViewController.m in Sources */, 4A60D135209184B600612995 /* UIImage+ForceDecoded.m in Sources */, 4A29A8FD209C37020029A378 /* RemoteImageViewController.m in Sources */, 4A5AEE862085987300EBFF88 /* AppDelegate.m in Sources */, 4A60D12C2091733C00612995 /* UIImageView+TransitionImage.m in Sources */, 4AE0C925208EBF6800A2B2D5 /* MFPictureModel.m in Sources */, ); runOnlyForDeploymentPostprocessing = 0; }; /* End PBXSourcesBuildPhase section */ /* Begin PBXVariantGroup section */ 4A5AEE8A2085987300EBFF88 /* Main.storyboard */ = { isa = PBXVariantGroup; children = ( 4A5AEE8B2085987300EBFF88 /* Base */, ); name = Main.storyboard; sourceTree = ""; }; 4A5AEE8F2085987300EBFF88 /* LaunchScreen.storyboard */ = { isa = PBXVariantGroup; children = ( 4A5AEE902085987300EBFF88 /* Base */, ); name = LaunchScreen.storyboard; sourceTree = ""; }; /* End PBXVariantGroup section */ /* Begin XCBuildConfiguration section */ 4A5AEE952085987300EBFF88 /* Debug */ = { isa = XCBuildConfiguration; buildSettings = { ALWAYS_SEARCH_USER_PATHS = NO; CLANG_ANALYZER_NONNULL = YES; CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; CLANG_CXX_LIBRARY = "libc++"; CLANG_ENABLE_MODULES = YES; CLANG_ENABLE_OBJC_ARC = YES; CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; CLANG_WARN_BOOL_CONVERSION = YES; CLANG_WARN_COMMA = YES; CLANG_WARN_CONSTANT_CONVERSION = YES; CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; CLANG_WARN_DOCUMENTATION_COMMENTS = YES; CLANG_WARN_EMPTY_BODY = YES; CLANG_WARN_ENUM_CONVERSION = YES; CLANG_WARN_INFINITE_RECURSION = YES; CLANG_WARN_INT_CONVERSION = YES; CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; CLANG_WARN_STRICT_PROTOTYPES = YES; CLANG_WARN_SUSPICIOUS_MOVE = YES; CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; CLANG_WARN_UNREACHABLE_CODE = YES; CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; CODE_SIGN_IDENTITY = "iPhone Developer"; COPY_PHASE_STRIP = NO; DEBUG_INFORMATION_FORMAT = dwarf; ENABLE_STRICT_OBJC_MSGSEND = YES; ENABLE_TESTABILITY = YES; GCC_C_LANGUAGE_STANDARD = gnu11; GCC_DYNAMIC_NO_PIC = NO; GCC_NO_COMMON_BLOCKS = YES; GCC_OPTIMIZATION_LEVEL = 0; GCC_PREPROCESSOR_DEFINITIONS = ( "DEBUG=1", "$(inherited)", ); GCC_WARN_64_TO_32_BIT_CONVERSION = YES; GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; GCC_WARN_UNDECLARED_SELECTOR = YES; GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; IPHONEOS_DEPLOYMENT_TARGET = 11.1; MTL_ENABLE_DEBUG_INFO = YES; ONLY_ACTIVE_ARCH = YES; SDKROOT = iphoneos; }; name = Debug; }; 4A5AEE962085987300EBFF88 /* Release */ = { isa = XCBuildConfiguration; buildSettings = { ALWAYS_SEARCH_USER_PATHS = NO; CLANG_ANALYZER_NONNULL = YES; CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; CLANG_CXX_LIBRARY = "libc++"; CLANG_ENABLE_MODULES = YES; CLANG_ENABLE_OBJC_ARC = YES; CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; CLANG_WARN_BOOL_CONVERSION = YES; CLANG_WARN_COMMA = YES; CLANG_WARN_CONSTANT_CONVERSION = YES; CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; CLANG_WARN_DOCUMENTATION_COMMENTS = YES; CLANG_WARN_EMPTY_BODY = YES; CLANG_WARN_ENUM_CONVERSION = YES; CLANG_WARN_INFINITE_RECURSION = YES; CLANG_WARN_INT_CONVERSION = YES; CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; CLANG_WARN_STRICT_PROTOTYPES = YES; CLANG_WARN_SUSPICIOUS_MOVE = YES; CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; CLANG_WARN_UNREACHABLE_CODE = YES; CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; CODE_SIGN_IDENTITY = "iPhone Developer"; COPY_PHASE_STRIP = NO; DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; ENABLE_NS_ASSERTIONS = NO; ENABLE_STRICT_OBJC_MSGSEND = YES; GCC_C_LANGUAGE_STANDARD = gnu11; GCC_NO_COMMON_BLOCKS = YES; GCC_WARN_64_TO_32_BIT_CONVERSION = YES; GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; GCC_WARN_UNDECLARED_SELECTOR = YES; GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; IPHONEOS_DEPLOYMENT_TARGET = 11.1; MTL_ENABLE_DEBUG_INFO = NO; SDKROOT = iphoneos; VALIDATE_PRODUCT = YES; }; name = Release; }; 4A5AEE982085987300EBFF88 /* Debug */ = { isa = XCBuildConfiguration; baseConfigurationReference = 03F4406D2CD57D8E4838F5A5 /* Pods-MFPictureBrowserDemo.debug.xcconfig */; buildSettings = { CODE_SIGN_STYLE = Automatic; DEVELOPMENT_TEAM = 5D33D27VC6; FRAMEWORK_SEARCH_PATHS = ( "$(inherited)", "$(PROJECT_DIR)/MFPictureBrowserDemo/MFPictureBrowser", ); GCC_PREPROCESSOR_DEFINITIONS = ( "$(inherited)", "COCOAPODS=1", "PIN_WEBP=1", ); INFOPLIST_FILE = MFPictureBrowserDemo/Info.plist; IPHONEOS_DEPLOYMENT_TARGET = 9.0; LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; PRODUCT_BUNDLE_IDENTIFIER = net.microfeel.MFPictureBrowserDemo; PRODUCT_NAME = "$(TARGET_NAME)"; TARGETED_DEVICE_FAMILY = "1,2"; }; name = Debug; }; 4A5AEE992085987300EBFF88 /* Release */ = { isa = XCBuildConfiguration; baseConfigurationReference = 8E2D0E105573B85353E3FBA2 /* Pods-MFPictureBrowserDemo.release.xcconfig */; buildSettings = { CODE_SIGN_STYLE = Automatic; DEVELOPMENT_TEAM = 5D33D27VC6; FRAMEWORK_SEARCH_PATHS = ( "$(inherited)", "$(PROJECT_DIR)/MFPictureBrowserDemo/MFPictureBrowser", ); GCC_PREPROCESSOR_DEFINITIONS = ( "$(inherited)", "COCOAPODS=1", "PIN_WEBP=1", ); INFOPLIST_FILE = MFPictureBrowserDemo/Info.plist; IPHONEOS_DEPLOYMENT_TARGET = 9.0; LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; PRODUCT_BUNDLE_IDENTIFIER = net.microfeel.MFPictureBrowserDemo; PRODUCT_NAME = "$(TARGET_NAME)"; TARGETED_DEVICE_FAMILY = "1,2"; }; name = Release; }; /* End XCBuildConfiguration section */ /* Begin XCConfigurationList section */ 4A5AEE7C2085987300EBFF88 /* Build configuration list for PBXProject "MFPictureBrowserDemo" */ = { isa = XCConfigurationList; buildConfigurations = ( 4A5AEE952085987300EBFF88 /* Debug */, 4A5AEE962085987300EBFF88 /* Release */, ); defaultConfigurationIsVisible = 0; defaultConfigurationName = Release; }; 4A5AEE972085987300EBFF88 /* Build configuration list for PBXNativeTarget "MFPictureBrowserDemo" */ = { isa = XCConfigurationList; buildConfigurations = ( 4A5AEE982085987300EBFF88 /* Debug */, 4A5AEE992085987300EBFF88 /* Release */, ); defaultConfigurationIsVisible = 0; defaultConfigurationName = Release; }; /* End XCConfigurationList section */ }; rootObject = 4A5AEE792085987300EBFF88 /* Project object */; } ================================================ FILE: MFPictureBrowserDemo/MFPictureBrowserDemo.xcodeproj/project.xcworkspace/contents.xcworkspacedata ================================================ ================================================ FILE: MFPictureBrowserDemo/MFPictureBrowserDemo.xcworkspace/contents.xcworkspacedata ================================================ ================================================ FILE: MFPictureBrowserDemo/Podfile ================================================ # Uncomment the next line to define a global platform for your project # platform :ios, '9.0' target 'MFPictureBrowserDemo' do # Uncomment the next line if you're using Swift or would like to use dynamic frameworks # use_frameworks! # Pods for MFPictureBrowserDemo pod ‘YYWebImage’ pod ‘YYImage/WebP’ pod ‘MFCategory’ end ================================================ FILE: MFPictureBrowserDemo/Pods/MFCategory/Classes/Manager/MFCookiesManager.h ================================================ // // MFCookiesManager.h // // Created by 张冬冬. // Copyright © 2018年 张冬冬. All rights reserved. // #import @interface MFCookiesManager : NSObject + (void)saveCookies; + (void)loadCookiesWithName:(NSString *)cookiesName; @end ================================================ FILE: MFPictureBrowserDemo/Pods/MFCategory/Classes/Manager/MFCookiesManager.m ================================================ // // MFCookiesManager.m // // Created by 张冬冬. // Copyright © 2018年 张冬冬. All rights reserved. // #import "MFCookiesManager.h" static NSString *cookies = @"cookies"; @implementation MFCookiesManager + (void)saveCookies { NSData *cookiesData = [NSKeyedArchiver archivedDataWithRootObject:[[NSHTTPCookieStorage sharedHTTPCookieStorage] cookies]]; NSUserDefaults *defaults = [NSUserDefaults standardUserDefaults]; [defaults setObject:cookiesData forKey:cookies]; [defaults synchronize]; } + (void)loadCookiesWithName:(NSString *)cookiesName { NSArray *cookieList = [NSKeyedUnarchiver unarchiveObjectWithData:[[NSUserDefaults standardUserDefaults] objectForKey:cookies]]; NSHTTPCookieStorage *cookieStorage = [NSHTTPCookieStorage sharedHTTPCookieStorage]; for (NSHTTPCookie *cookie in cookieList){ if ([cookie.name isEqualToString:cookiesName]) { [cookieStorage setCookie:cookie]; } } } @end ================================================ FILE: MFPictureBrowserDemo/Pods/MFCategory/Classes/Manager/MFDeviceManager.h ================================================ // // MFDeviceManager.h // // Created by 张冬冬. // Copyright © 2018年 张冬冬. All rights reserved. // #import #import @interface MFDeviceManager : NSObject //Screen scale + (CGFloat)scale; //生成GUID + (NSString *)guid; //磁盘全部空间 + (CGFloat)diskOfAllSizeGBytes; //磁盘可用空间 + (CGFloat)diskOfFreeSizeGBytes; //获取文件大小 + (long long)fileSizeAtPath:(NSString *)filePath; //获取文件夹下所有文件的大小 + (long long)folderSizeAtPath:(NSString *)folderPath; //获取设备 IP 地址 + (NSString *)getIPAddress; @end ================================================ FILE: MFPictureBrowserDemo/Pods/MFCategory/Classes/Manager/MFDeviceManager.m ================================================ // // MFDeviceManager.m // // Created by 张冬冬. // Copyright © 2018年 张冬冬. All rights reserved. // #import "MFDeviceManager.h" #import #import @implementation MFDeviceManager + (CGFloat)scale{ if ([[UIDevice currentDevice].systemVersion floatValue]>=8.0) { return [UIScreen mainScreen].nativeScale; } else { return [UIScreen mainScreen].scale; } } + (NSString *)guid { CFUUIDRef theUUID = CFUUIDCreate(NULL); CFStringRef theString = CFUUIDCreateString(NULL, theUUID); NSString *unique = [NSString stringWithString:(__bridge id)theString]; CFRelease(theString); CFRelease(theUUID); // Cleanup CF objects return unique; } //磁盘全部空间 + (CGFloat)diskOfAllSizeGBytes{ CGFloat size = 0.0; NSError *error; NSDictionary *dic = [[NSFileManager defaultManager] attributesOfFileSystemForPath:NSHomeDirectory() error:&error]; if (error) { #ifdef DEBUG NSLog(@"error: %@", error.localizedDescription); #endif }else{ NSNumber *number = [dic objectForKey:NSFileSystemSize]; size = [number floatValue]/1024/1024/1024; } return size; } //磁盘可用空间 + (CGFloat)diskOfFreeSizeGBytes{ CGFloat size = 0.0; NSError *error; NSDictionary *dic = [[NSFileManager defaultManager] attributesOfFileSystemForPath:NSHomeDirectory() error:&error]; if (error) { #ifdef DEBUG NSLog(@"error: %@", error.localizedDescription); #endif }else{ NSNumber *number = [dic objectForKey:NSFileSystemFreeSize]; size = [number floatValue]/1024/1024/1024; } return size; } //获取文件大小 + (long long)fileSizeAtPath:(NSString *)filePath{ NSFileManager *fileManager = [NSFileManager defaultManager]; if (![fileManager fileExistsAtPath:filePath]) return 0; return [[fileManager attributesOfItemAtPath:filePath error:nil] fileSize]; } //获取文件夹下所有文件的大小 + (long long)folderSizeAtPath:(NSString *)folderPath{ NSFileManager *fileManager = [NSFileManager defaultManager]; if (![fileManager fileExistsAtPath:folderPath]) return 0; NSEnumerator *filesEnumerator = [[fileManager subpathsAtPath:folderPath] objectEnumerator]; NSString *fileName; long long folerSize = 0; while ((fileName = [filesEnumerator nextObject]) != nil) { NSString *filePath = [folderPath stringByAppendingPathComponent:fileName]; folerSize += [self fileSizeAtPath:filePath]; } return folerSize; } //获取设备 IP 地址 + (NSString *)getIPAddress { NSString *address = @"error"; struct ifaddrs *interfaces = NULL; struct ifaddrs *temp_addr = NULL; int success = 0; success = getifaddrs(&interfaces); if (success == 0) { temp_addr = interfaces; while(temp_addr != NULL) { if(temp_addr->ifa_addr->sa_family == AF_INET) { if([[NSString stringWithUTF8String:temp_addr->ifa_name] isEqualToString:@"en0"]) { address = [NSString stringWithUTF8String:inet_ntoa(((struct sockaddr_in *)temp_addr->ifa_addr)->sin_addr)]; } } temp_addr = temp_addr->ifa_next; } } freeifaddrs(interfaces); return address; } @end ================================================ FILE: MFPictureBrowserDemo/Pods/MFCategory/Classes/Manager/MFManager.h ================================================ // // MFManager.h // // Created by 张冬冬. // Copyright © 2018年 张冬冬. All rights reserved. // #import #import @interface MFManager : NSObject + (void)animatedTransferKeyWindow:(UIViewController *)controller; + (void)jumpAppStoreWithAppID:(NSString *)appID; @end ================================================ FILE: MFPictureBrowserDemo/Pods/MFCategory/Classes/Manager/MFManager.m ================================================ // // MFManager.m // // Created by 张冬冬. // Copyright © 2018年 张冬冬. All rights reserved. // #import "MFManager.h" @implementation MFManager + (void)animatedTransferKeyWindow:(UIViewController *)controller { [UIView transitionWithView:[UIApplication sharedApplication].keyWindow duration:0.3f options:UIViewAnimationOptionTransitionCrossDissolve animations:^{ BOOL oldState = [UIView areAnimationsEnabled]; [UIView setAnimationsEnabled:NO]; [UIApplication sharedApplication].keyWindow.rootViewController = controller; [UIView setAnimationsEnabled:oldState]; } completion:nil]; } + (void)jumpAppStoreWithAppID:(NSString *)appID { NSString *urlString = [NSString stringWithFormat:@"itms-apps://itunes.apple.com/app/id%@", appID]; NSURL *url = [NSURL URLWithString:urlString]; if (@available(iOS 10, *)) { [[UIApplication sharedApplication] openURL:url options:@{} completionHandler:nil]; }else { [[UIApplication sharedApplication] openURL:url]; } } @end ================================================ FILE: MFPictureBrowserDemo/Pods/MFCategory/Classes/Manager/MFREManager.h ================================================ // // MFREManager.h // // Created by 张冬冬. // Copyright © 2018年 张冬冬. All rights reserved. // #import @interface MFREManager : NSObject //判断手机号码格式是否正确 + (BOOL)valiMobile:(NSString *)mobile; //利用正则表达式验证 + (BOOL)isAvailableEmail:(NSString *)email; @end ================================================ FILE: MFPictureBrowserDemo/Pods/MFCategory/Classes/Manager/MFREManager.m ================================================ // // MFREManager.m // // Created by 张冬冬. // Copyright © 2018年 张冬冬. All rights reserved. // #import "MFREManager.h" @implementation MFREManager //判断手机号码格式是否正确 + (BOOL)valiMobile:(NSString *)mobile{ mobile = [mobile stringByReplacingOccurrencesOfString:@" " withString:@""]; if (mobile.length != 11) { return NO; }else{ /** * 移动号段正则表达式 */ NSString *CM_NUM = @"^((13[4-9])|(147)|(15[0-2,7-9])|(178)|(18[2-4,7-8]))\\d{8}|(1705)\\d{7}$"; /** * 联通号段正则表达式 */ NSString *CU_NUM = @"^((13[0-2])|(145)|(15[5-6])|(176)|(18[5,6]))\\d{8}|(1709)\\d{7}$"; /** * 电信号段正则表达式 */ NSString *CT_NUM = @"^((133)|(153)|(177)|(18[0,1,9]))\\d{8}$"; NSPredicate *pred1 = [NSPredicate predicateWithFormat:@"SELF MATCHES %@", CM_NUM]; BOOL isMatch1 = [pred1 evaluateWithObject:mobile]; NSPredicate *pred2 = [NSPredicate predicateWithFormat:@"SELF MATCHES %@", CU_NUM]; BOOL isMatch2 = [pred2 evaluateWithObject:mobile]; NSPredicate *pred3 = [NSPredicate predicateWithFormat:@"SELF MATCHES %@", CT_NUM]; BOOL isMatch3 = [pred3 evaluateWithObject:mobile]; if (isMatch1 || isMatch2 || isMatch3) { return YES; }else{ return NO; } } } //利用正则表达式验证 + (BOOL)isAvailableEmail:(NSString *)email{ NSString *emailRegex = @"[A-Z0-9a-z._%+-]+@[A-Za-z0-9.-]+\\.[A-Za-z]{2,4}"; NSPredicate *emailTest = [NSPredicate predicateWithFormat:@"SELF MATCHES %@", emailRegex]; return [emailTest evaluateWithObject:email]; } @end ================================================ FILE: MFPictureBrowserDemo/Pods/MFCategory/Classes/Manager/MFSavePhotoManager.h ================================================ // // MFSavePhotoManager.h // // Created by 张冬冬. // Copyright © 2018年 张冬冬. All rights reserved. // #import #import typedef void (^successHandler)(void); typedef void (^failureHandler)(NSError * _Nullable error); @interface MFSavePhotoManager : NSObject ///保存普通类型的 image eg: png, jpg + (void)saveImage:(UIImage *_Nullable)image success:(successHandler _Nullable )successHandle failure:(failureHandler _Nullable )failureHanlde; ///保存 gif 类型的图片. imageData 可从 SD 或 YY 框架的缓存数据获取 /* YYWebImageManager *manager = [YYWebImageManager sharedManager]; NSString *key = [manager cacheKeyForURL:url]; NSData *data = [manager.cache getImageDataForKey:key]; */ + (void)saveImageData:(NSData *_Nullable)imageData success:(successHandler _Nullable )successHandle failure:(failureHandler _Nullable )failureHanlde; @end ================================================ FILE: MFPictureBrowserDemo/Pods/MFCategory/Classes/Manager/MFSavePhotoManager.m ================================================ // // MFSavePhotoManager.m // // Created by 张冬冬. // Copyright © 2018年 张冬冬. All rights reserved. // #import "MFSavePhotoManager.h" #import @implementation MFSavePhotoManager + (void)saveImage:(UIImage *)image success:(successHandler)successHandle failure:(failureHandler)failureHanlde { dispatch_async(dispatch_get_main_queue(), ^{ [[PHPhotoLibrary sharedPhotoLibrary] performChanges:^{ [PHAssetChangeRequest creationRequestForAssetFromImage:image]; } completionHandler:^(BOOL success, NSError * _Nullable error) { if (success && successHandle) { successHandle(); }else if (error && failureHanlde) { failureHanlde(error); } }]; }); } + (void)saveImageData:(NSData *)imageData success:(successHandler)successHandle failure:(failureHandler)failureHanlde { dispatch_async(dispatch_get_main_queue(), ^{ [[PHPhotoLibrary sharedPhotoLibrary] performChanges:^{ PHAssetResourceCreationOptions *options = [[PHAssetResourceCreationOptions alloc] init]; [[PHAssetCreationRequest creationRequestForAsset] addResourceWithType:PHAssetResourceTypePhoto data:imageData options:options]; } completionHandler:^(BOOL success, NSError * _Nullable error) { if (success && successHandle) { successHandle(); }else if (error && failureHanlde) { failureHanlde(error); } }]; }); } @end ================================================ FILE: MFPictureBrowserDemo/Pods/MFCategory/Classes/NSArrayCategory/NSArray+Distinct.h ================================================ // // NSArray+Distinct.h // // Created by 张冬冬. // Copyright © 2018年 张冬冬. All rights reserved. // #import @interface NSArray (Distinct) + (NSArray *)distinctUnionOfArray:(NSArray *)originArray; @end ================================================ FILE: MFPictureBrowserDemo/Pods/MFCategory/Classes/NSArrayCategory/NSArray+Distinct.m ================================================ // // NSArray+Distinct.m // // Created by 张冬冬. // Copyright © 2018年 张冬冬. All rights reserved. // #import "NSArray+Distinct.h" @implementation NSArray (Distinct) + (NSArray *)distinctUnionOfArray:(NSArray *)originArray { return [originArray valueForKeyPath:@"@distinctUnionOfObjects.self"]; } @end ================================================ FILE: MFPictureBrowserDemo/Pods/MFCategory/Classes/NSDataCategory/NSData+MFHexString.h ================================================ // // NSData+MFHexString.m // Created by 张冬冬. // Copyright © 2018年 张冬冬. All rights reserved. // #import @interface NSData (MFHexString) + (NSData *)convertedToDataFromHexString:(NSString *)hexString; @end ================================================ FILE: MFPictureBrowserDemo/Pods/MFCategory/Classes/NSDataCategory/NSData+MFHexString.m ================================================ // // NSData+MFHexString.m // Created by 张冬冬. // Copyright © 2018年 张冬冬. All rights reserved. // #import "NSData+MFHexString.h" @implementation NSData (MFHexString) + (NSData *)convertedToDataFromHexString:(NSString *)hexString { if (!hexString || [hexString length] == 0) { return nil; } NSMutableData *hexData = [[NSMutableData alloc] initWithCapacity:20]; NSRange range; if ([hexString length] % 2 == 0) { range = NSMakeRange(0, 2); } else { range = NSMakeRange(0, 1); } for (NSInteger i = range.location; i < [hexString length]; i += 2) { unsigned int anInt; NSString *hexCharString = [hexString substringWithRange:range]; NSScanner *scanner = [[NSScanner alloc] initWithString:hexCharString]; [scanner scanHexInt:&anInt]; NSData *entity = [[NSData alloc] initWithBytes:&anInt length:1]; [hexData appendData:entity]; range.location += range.length; range.length = 2; } return hexData; } @end ================================================ FILE: MFPictureBrowserDemo/Pods/MFCategory/Classes/NSDateFormatterCategory/NSDateFormatter+Cache.h ================================================ // // NSDateFormatter+Cache.h // // Created by 张冬冬. // Copyright © 2018年 张冬冬. All rights reserved. // #import @interface NSDateFormatter (Cache) + (instancetype)sharedInstance; @end ================================================ FILE: MFPictureBrowserDemo/Pods/MFCategory/Classes/NSDateFormatterCategory/NSDateFormatter+Cache.m ================================================ // // NSDateFormatter+Cache.m // // Created by 张冬冬. // Copyright © 2018年 张冬冬. All rights reserved. // #import "NSDateFormatter+Cache.h" @implementation NSDateFormatter (Cache) + (instancetype)sharedInstance { static NSDateFormatter *formatter = nil; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ formatter = [[NSDateFormatter alloc] init]; }); return formatter; } @end ================================================ FILE: MFPictureBrowserDemo/Pods/MFCategory/Classes/NSStringCategory/NSString+File.h ================================================ // // NSString+File.h // // Created by 张冬冬. // Copyright © 2018年 张冬冬. All rights reserved. // #import @interface NSString (File) + (instancetype)uuid; + (instancetype)documentPath; + (instancetype)homePath; + (instancetype)tempPath; @end ================================================ FILE: MFPictureBrowserDemo/Pods/MFCategory/Classes/NSStringCategory/NSString+File.m ================================================ // // NSString+File.m // // Created by 张冬冬. // Copyright © 2018年 张冬冬. All rights reserved. // #import "NSString+File.h" @implementation NSString (File) + (instancetype)uuid { CFUUIDRef uuid = CFUUIDCreate(kCFAllocatorDefault); CFStringRef strUuid = CFUUIDCreateString(kCFAllocatorDefault,uuid); NSString * str = [NSString stringWithString:(__bridge NSString *)strUuid]; CFRelease(strUuid); CFRelease(uuid); return str; } + (instancetype)documentPath { NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); NSString *documentsDirectory = [paths objectAtIndex:0]; return documentsDirectory; } + (instancetype)homePath { return NSHomeDirectory(); } + (instancetype)tempPath { return NSTemporaryDirectory(); } @end ================================================ FILE: MFPictureBrowserDemo/Pods/MFCategory/Classes/NSStringCategory/NSString+MFBase64Encoder.h ================================================ // // NSString+MFBase64Encoder.h // // Created by 张冬冬. // Copyright © 2018年 张冬冬. All rights reserved. // #import @interface NSString (MFBase64Encoder) - (NSString *)base64Encoded; @end ================================================ FILE: MFPictureBrowserDemo/Pods/MFCategory/Classes/NSStringCategory/NSString+MFBase64Encoder.m ================================================ // // NSString+MFBase64Encoder.m // // Created by 张冬冬. // Copyright © 2018年 张冬冬. All rights reserved. // #import "NSString+MFBase64Encoder.h" @implementation NSString (MFBase64Encoder) - (NSString *)base64Encoded { NSData *data = [self dataUsingEncoding:NSUTF8StringEncoding]; return [data base64EncodedStringWithOptions:0]; } @end ================================================ FILE: MFPictureBrowserDemo/Pods/MFCategory/Classes/NSStringCategory/NSString+MFMD5Encoder.h ================================================ // // NSString+MFMD5Encoder.h // // Created by 张冬冬. // Copyright © 2018年 张冬冬. All rights reserved. // #import @interface NSString (MFMD5Encoder) - (NSString *)md5String; @end ================================================ FILE: MFPictureBrowserDemo/Pods/MFCategory/Classes/NSStringCategory/NSString+MFMD5Encoder.m ================================================ // // NSString+MFMD5Encoder.m // // Created by 张冬冬. // Copyright © 2018年 张冬冬. All rights reserved. // #import "NSString+MFMD5Encoder.h" #import @implementation NSString (MFMD5Encoder) - (NSString *)md5String { const char *str = [(NSString *)self UTF8String]; unsigned char result[CC_MD5_DIGEST_LENGTH] = {0}; CC_MD5(str, (CC_LONG)strlen(str), result); NSMutableString *ret = [NSMutableString string]; for (int i = 0; i < CC_MD5_DIGEST_LENGTH; i++) { [ret appendFormat:@"%02x", result[i]]; } return ret; } @end ================================================ FILE: MFPictureBrowserDemo/Pods/MFCategory/Classes/NSTimerCategory/NSTimer+MFWeakTimer.h ================================================ // // NSTimer+MFWeakTimer.h // // Created by 张冬冬. // Copyright © 2018年 张冬冬. All rights reserved. // #import @interface NSTimer (MFWeakTimer) + (NSTimer *_Nonnull)mf_scheduledTimerWithTimeInterval:(NSTimeInterval)interval repeats:(BOOL)repeats block:(void (^_Nonnull)(NSTimer * _Nonnull timer))block; @end ================================================ FILE: MFPictureBrowserDemo/Pods/MFCategory/Classes/NSTimerCategory/NSTimer+MFWeakTimer.m ================================================ // // NSTimer+MFWeakTimer.m // // Created by 张冬冬. // Copyright © 2018年 张冬冬. All rights reserved. // #import "NSTimer+MFWeakTimer.h" @implementation NSTimer (MFWeakTimer) + (NSTimer *_Nonnull)mf_scheduledTimerWithTimeInterval:(NSTimeInterval)interval repeats:(BOOL)repeats block:(void (^_Nonnull)(NSTimer * _Nonnull timer))block { if (@available(iOS 10.0, *)) { return [self scheduledTimerWithTimeInterval:interval repeats:repeats block:[block copy]]; }else { return [self scheduledTimerWithTimeInterval:interval target:self selector:@selector(timerhandleInvoke:) userInfo:[block copy] repeats:repeats]; } } + (void)timerhandleInvoke:(NSTimer *)timer { void (^block)(void) = timer.userInfo; if (block) { block(); } } @end ================================================ FILE: MFPictureBrowserDemo/Pods/MFCategory/Classes/UIColorCategory/UIColor+MFHexColor.h ================================================ // // UIColor+MFHexColor.h // // Created by 张冬冬. // Copyright © 2018年 张冬冬. All rights reserved. // #import @interface UIColor (MFHexColor) + (UIColor *)colorWithHexString:(NSString *)hexString; + (UIColor *)colorWithHexString:(NSString *)hexString alpha:(CGFloat)alpha; - (NSString *)hexString; @end ================================================ FILE: MFPictureBrowserDemo/Pods/MFCategory/Classes/UIColorCategory/UIColor+MFHexColor.m ================================================ // // UIColor+MFHexColor.m // // Created by 张冬冬. // Copyright © 2018年 张冬冬. All rights reserved. // #import "UIColor+MFHexColor.h" @implementation UIColor (MFHexColor) + (UIColor *)colorWithHexString:(NSString *)hexString alpha:(CGFloat)alpha { NSString *cString = [[hexString stringByTrimmingCharactersInSet:[NSCharacterSet whitespaceAndNewlineCharacterSet]] uppercaseString]; //hexString应该6到8个字符 if ([cString length] < 6) { return [UIColor clearColor]; } //如果hexString 有@"0X"前缀 if ([cString hasPrefix:@"0X"]) cString = [cString substringFromIndex:2]; //如果hexString 有@"#""前缀 if ([cString hasPrefix:@"#"]) cString = [cString substringFromIndex:1]; if ([cString length] != 6) return [UIColor clearColor]; //RGBCOLOR转换 NSRange range; range.location = 0; range.length = 2; //R NSString *rString = [cString substringWithRange:range]; //G range.location = 2; NSString *gString = [cString substringWithRange:range]; //B range.location = 4; NSString *bString = [cString substringWithRange:range]; unsigned int r, g, b; [[NSScanner scannerWithString:rString] scanHexInt:&r]; [[NSScanner scannerWithString:gString] scanHexInt:&g]; [[NSScanner scannerWithString:bString] scanHexInt:&b]; return [UIColor colorWithRed:((float) r / 255.0f) green:((float) g / 255.0f) blue:((float) b / 255.0f) alpha:alpha]; } + (UIColor *)colorWithHexString:(NSString *)hexString { return [UIColor colorWithHexString:hexString alpha:1.0]; } - (NSString *)hexString { const CGFloat *components = CGColorGetComponents(self.CGColor); CGFloat r = components[0]; CGFloat g = components[1]; CGFloat b = components[2]; return [NSString stringWithFormat:@"#%02lX%02lX%02lX", lroundf(r * 255), lroundf(g * 255), lroundf(b * 255)]; } @end ================================================ FILE: MFPictureBrowserDemo/Pods/MFCategory/Classes/UIColorCategory/UIColor+MFLDColor.h ================================================ // // UIColor+MFLDColor.h // // Created by 张冬冬. // Copyright © 2018年 张冬冬. All rights reserved. // #import @interface UIColor (MFLDColor) - (BOOL) isLightColor; @end ================================================ FILE: MFPictureBrowserDemo/Pods/MFCategory/Classes/UIColorCategory/UIColor+MFLDColor.m ================================================ // // UIColor+MFLDColor.m // // Created by 张冬冬. // Copyright © 2018年 张冬冬. All rights reserved. // #import "UIColor+MFLDColor.h" @implementation UIColor (MFLDColor) - (BOOL)isLightColor { CGFloat components[3]; [self p_getRGBComponents:components forColor:self]; CGFloat num = components[0] + components[1] + components[2]; if(num < 382) return NO; else return YES; } // get rgb value - (void)p_getRGBComponents:(CGFloat [3])components forColor:(UIColor *)color { #if __IPHONE_OS_VERSION_MAX_ALLOWED > __IPHONE_6_1 int bitmapInfo = kCGBitmapByteOrderDefault | kCGImageAlphaPremultipliedLast; #else int bitmapInfo = kCGImageAlphaPremultipliedLast; #endif CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB(); unsigned char resultingPixel[4]; CGContextRef context = CGBitmapContextCreate(&resultingPixel, 1, 1, 8, 4, rgbColorSpace, bitmapInfo); CGContextSetFillColorWithColor(context, [color CGColor]); CGContextFillRect(context, CGRectMake(0, 0, 1, 1)); CGContextRelease(context); CGColorSpaceRelease(rgbColorSpace); for (int component = 0; component < 3; component++) { components[component] = resultingPixel[component]; } } @end ================================================ FILE: MFPictureBrowserDemo/Pods/MFCategory/Classes/UIImageCategory/UIImage+Current.h ================================================ // // UIImage+Current.h // // Created by 张冬冬. // Copyright © 2018年 张冬冬. All rights reserved. // #import @interface UIImage (Current) + (UIImage *)getImageFromCurrentView:(UIView *)view; @end ================================================ FILE: MFPictureBrowserDemo/Pods/MFCategory/Classes/UIImageCategory/UIImage+Current.m ================================================ // // UIImage+Current.m // // Created by 张冬冬. // Copyright © 2018年 张冬冬. All rights reserved. // #import "UIImage+Current.h" @implementation UIImage (Current) + (UIImage *)getImageFromCurrentView:(UIView *)view { UIGraphicsBeginImageContextWithOptions(view.bounds.size, YES, 0.0); [view.layer renderInContext:UIGraphicsGetCurrentContext()]; UIImage *image = UIGraphicsGetImageFromCurrentImageContext(); UIGraphicsEndImageContext(); return image; } @end ================================================ FILE: MFPictureBrowserDemo/Pods/MFCategory/Classes/UIViewCategory/UIView+MFExpandTouchSize.h ================================================ // // UIView+MFExpandTouchSize.h // // Created by 张冬冬. // Copyright © 2018年 张冬冬. All rights reserved. // #import @interface UIView (MFExpandTouchSize) @property (nonatomic,assign) UIEdgeInsets expandTouchInset; @end ================================================ FILE: MFPictureBrowserDemo/Pods/MFCategory/Classes/UIViewCategory/UIView+MFExpandTouchSize.m ================================================ // // UIView+MFExpandTouchSize.m // // Created by 张冬冬. // Copyright © 2018年 张冬冬. All rights reserved. // #import "UIView+MFExpandTouchSize.h" #import void expandTouch_swizzingMethod(Class class,SEL orig,SEL new){ Method origMethod = class_getInstanceMethod(class, orig); Method newMethod = class_getInstanceMethod(class, new); if (class_addMethod(class, orig, method_getImplementation(newMethod), method_getTypeEncoding(newMethod))) { class_replaceMethod(class, new, method_getImplementation(origMethod), method_getTypeEncoding(origMethod)); }else{ method_exchangeImplementations(origMethod, newMethod); } } @implementation UIView (MFExpandTouchSize) + (void)load { static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ expandTouch_swizzingMethod([self class], @selector(pointInside:withEvent:), @selector(mf_expandTouchPointInside:withEvent:)); }); } - (BOOL)mf_expandTouchPointInside:(CGPoint)point withEvent:(UIEvent *)event { if (UIEdgeInsetsEqualToEdgeInsets(self.expandTouchInset, UIEdgeInsetsZero)||self.hidden||([self isKindOfClass:[UIControl class]] && !((UIControl *)self).enabled)) { [self mf_expandTouchPointInside:point withEvent:event]; } CGRect hitRect = UIEdgeInsetsInsetRect(self.bounds, self.expandTouchInset); hitRect.size.width = MAX(hitRect.size.width, 0); hitRect.size.height = MAX(hitRect.size.height, 0); return CGRectContainsPoint(hitRect, point); } - (void)setExpandTouchInset:(UIEdgeInsets)expandTouchInset { objc_setAssociatedObject(self, @selector(expandTouchInset), [NSValue valueWithUIEdgeInsets:expandTouchInset], OBJC_ASSOCIATION_RETAIN_NONATOMIC); } - (UIEdgeInsets)expandTouchInset { return [objc_getAssociatedObject(self, _cmd) UIEdgeInsetsValue]; } @end ================================================ FILE: MFPictureBrowserDemo/Pods/MFCategory/Classes/UIViewCategory/UIView+MFFrame.h ================================================ // // UIView+MFFrame.h // // Created by 张冬冬. // Copyright © 2018年 张冬冬. All rights reserved. // #import @interface UIView (MFFrame) @property (nonatomic, assign) CGFloat x; @property (nonatomic, assign) CGFloat y; @property (nonatomic, assign) CGFloat width; @property (nonatomic, assign) CGFloat height; @property (nonatomic, assign) CGSize size; @property (nonatomic, assign) CGPoint origin; @end ================================================ FILE: MFPictureBrowserDemo/Pods/MFCategory/Classes/UIViewCategory/UIView+MFFrame.m ================================================ // // UIView+MFFrame.m // // Created by 张冬冬. // Copyright © 2018年 张冬冬. All rights reserved. // #import "UIView+MFFrame.h" @implementation UIView (MFFrame) - (CGFloat)x { return self.frame.origin.x; } - (CGFloat)y { return self.frame.origin.y; } - (CGFloat)width { return self.frame.size.width; } - (CGFloat)height { return self.frame.size.height; } - (void)setX:(CGFloat)x { self.frame = CGRectMake(x, self.y, self.width, self.height); } - (void)setY:(CGFloat)y { self.frame = CGRectMake(self.x, y, self.width, self.height); } - (void)setWidth:(CGFloat)width { self.frame = CGRectMake(self.x, self.y, width, self.height); } - (void)setHeight:(CGFloat)height { self.frame = CGRectMake(self.x, self.y, self.width, height); } - (CGSize)size { return self.frame.size; } - (void)setSize:(CGSize)size { self.frame = CGRectMake(self.x, self.y, size.width, size.height); } - (CGPoint)origin { return self.frame.origin; } - (void)setOrigin:(CGPoint)origin { self.frame = CGRectMake(origin.x, origin.y, self.width, self.height); } @end ================================================ FILE: MFPictureBrowserDemo/Pods/MFCategory/Classes/UIViewCategory/UIView+MFSnapshot.h ================================================ // // UIView+MFSnapshot.h // // Created by 张冬冬. // Copyright © 2018年 张冬冬. All rights reserved. // #import @interface UIView (MFSnapshot) - (UIImage *)viewSnapshot; - (UIImage *)layerSnapshot; @end ================================================ FILE: MFPictureBrowserDemo/Pods/MFCategory/Classes/UIViewCategory/UIView+MFSnapshot.m ================================================ // // UIView+MFSnapshot.m // // Created by 张冬冬. // Copyright © 2018年 张冬冬. All rights reserved. // #import "UIView+MFSnapshot.h" @implementation UIView (MFSnapshot) - (UIImage *)viewSnapshot { UIGraphicsBeginImageContextWithOptions(self.bounds.size, NO, [UIScreen mainScreen].scale); [self drawViewHierarchyInRect:self.bounds afterScreenUpdates:NO]; UIImage *image = UIGraphicsGetImageFromCurrentImageContext(); UIGraphicsEndImageContext(); return image; } - (UIImage *)layerSnapshot { UIGraphicsBeginImageContextWithOptions(self.bounds.size, NO, [UIScreen mainScreen].scale); [self.layer renderInContext:UIGraphicsGetCurrentContext()]; UIImage *image = UIGraphicsGetImageFromCurrentImageContext(); UIGraphicsEndImageContext(); return image; } @end ================================================ FILE: MFPictureBrowserDemo/Pods/MFCategory/Classes/UIViewCategory/UIView+Shadow.h ================================================ // // UIView+Shadow.m // Created by pipelining. // Copyright © 2018年 GodzzZZZ. All rights reserved. // #import @interface UIView (Shadow) - (void)addShadowWithColor:(UIColor *)shadowColor; - (void)removeShadow; @end ================================================ FILE: MFPictureBrowserDemo/Pods/MFCategory/Classes/UIViewCategory/UIView+Shadow.m ================================================ // // UIView+Shadow.m // Created by pipelining. // Copyright © 2018年 GodzzZZZ. All rights reserved. // #import "UIView+Shadow.h" @implementation UIView (Shadow) - (void)addShadowWithColor:(UIColor *)shadowColor { self.layer.shadowColor = shadowColor.CGColor; self.layer.shadowRadius = 8; self.layer.shadowOpacity = 0.7; self.layer.shadowOffset = CGSizeMake(0, 5); } - (void)removeShadow { self.layer.shadowOpacity = 0; } @end ================================================ FILE: MFPictureBrowserDemo/Pods/MFCategory/LICENSE ================================================ MIT License Copyright (c) 2018 GodzzZZZ Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ================================================ FILE: MFPictureBrowserDemo/Pods/MFCategory/README.md ================================================ # MFCategory OC常用分类 ================================================ FILE: MFPictureBrowserDemo/Pods/Pods.xcodeproj/project.pbxproj ================================================ // !$*UTF8*$! { archiveVersion = 1; classes = { }; objectVersion = 48; objects = { /* Begin PBXBuildFile section */ 02E865D72099D2FD1243697923B2B4B7 /* YYKVStorage.h in Headers */ = {isa = PBXBuildFile; fileRef = 0D1FCF67F9946129754D753F5504C711 /* YYKVStorage.h */; settings = {ATTRIBUTES = (Project, ); }; }; 0413E125ABFFCFF246ED9ADACC43B7E4 /* MFREManager.h in Headers */ = {isa = PBXBuildFile; fileRef = FCA07E0F4B89C8B69DE604568E3501D9 /* MFREManager.h */; settings = {ATTRIBUTES = (Project, ); }; }; 042EEBE27D87F926314C3D65D157D3BE /* YYImageCoder.m in Sources */ = {isa = PBXBuildFile; fileRef = 46A4B00C9EEC3C8DB459978CEAA0B882 /* YYImageCoder.m */; }; 05CA2AF1247C77B1A4C0634ADE891640 /* ImageIO.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 4418563B9EF3CB95C8F65879F546D44C /* ImageIO.framework */; }; 07DA83207C6539EB33761509DAD155F8 /* CoreFoundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 67D6E55E61A18B9FCEE8B47E22D4270C /* CoreFoundation.framework */; }; 083CC59EA97F29E4893A842CEC47FAFB /* MFSavePhotoManager.h in Headers */ = {isa = PBXBuildFile; fileRef = 414CC9D9152EB41690188B0A94CE53D8 /* MFSavePhotoManager.h */; settings = {ATTRIBUTES = (Project, ); }; }; 0C1F5495E2C48A6CDBE80ED416284D59 /* NSArray+Distinct.m in Sources */ = {isa = PBXBuildFile; fileRef = 9E0F3EF807998BDC69BBCD6C5D8C3E52 /* NSArray+Distinct.m */; }; 0E5C782DD1565CBFA38A7BFEB2AFE5AA /* NSTimer+MFWeakTimer.m in Sources */ = {isa = PBXBuildFile; fileRef = E81C4D19016A649D58FD3E31FE95068A /* NSTimer+MFWeakTimer.m */; }; 0FC58C2058A4BB7DCBDCB8D3EB3F69FA /* NSString+File.h in Headers */ = {isa = PBXBuildFile; fileRef = CAB7A2005DA56FA086ABEE79F3DBF423 /* NSString+File.h */; settings = {ATTRIBUTES = (Project, ); }; }; 105CD28A1A183013BDF2E9DC41C13025 /* MFManager.h in Headers */ = {isa = PBXBuildFile; fileRef = 1AD931227771EBB1CAB963AEB7985225 /* MFManager.h */; settings = {ATTRIBUTES = (Project, ); }; }; 10BAAA9BF7963F6E56402B76F8794D99 /* UIView+MFSnapshot.m in Sources */ = {isa = PBXBuildFile; fileRef = 59E34C1838F6D6759741ABB91258D932 /* UIView+MFSnapshot.m */; }; 14A3306CD2222ABCED413D48BBF24307 /* NSString+MFMD5Encoder.m in Sources */ = {isa = PBXBuildFile; fileRef = B965A2D3DAE4E6CF4A69C2BA4CA30657 /* NSString+MFMD5Encoder.m */; }; 1548EA871BB792DFD1ADB5D554D5B93B /* YYWebImageManager.m in Sources */ = {isa = PBXBuildFile; fileRef = F5D1D555371D69FF2516CDB195FB48A0 /* YYWebImageManager.m */; }; 18D8488FB14EC002DEFA2CBE1B9D6DF1 /* UIImage+Current.m in Sources */ = {isa = PBXBuildFile; fileRef = 33126ACC412964BD7E2E2DB5D056193A /* UIImage+Current.m */; }; 1A8AD5A82AB27D6B5FC4F49449E21889 /* AssetsLibrary.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = A90BE5491E0D2F0314AC354A80EA7053 /* AssetsLibrary.framework */; }; 1D333DCFE52ED903A939788CD835B780 /* YYImageCache.m in Sources */ = {isa = PBXBuildFile; fileRef = F3A0EC16B9709D89AE00FDE77D3BB41B /* YYImageCache.m */; }; 1F1B08FB73758E0F1E3004009D7F610E /* CALayer+YYWebImage.h in Headers */ = {isa = PBXBuildFile; fileRef = 794D05F11A52A3A3967BD8230E27F9CE /* CALayer+YYWebImage.h */; settings = {ATTRIBUTES = (Project, ); }; }; 2104FA68551B02221EEA410ECDF1A14B /* UIColor+MFLDColor.h in Headers */ = {isa = PBXBuildFile; fileRef = 59A5474B8982530896DD676925D76B7B /* UIColor+MFLDColor.h */; settings = {ATTRIBUTES = (Project, ); }; }; 24F75AF86066385BA5D40010E57A36B7 /* UIColor+MFHexColor.h in Headers */ = {isa = PBXBuildFile; fileRef = 1F5352C0CDC59A9C21D1F37AE73217DF /* UIColor+MFHexColor.h */; settings = {ATTRIBUTES = (Project, ); }; }; 255CFB9F22BF1D76532766EF5197562B /* YYMemoryCache.h in Headers */ = {isa = PBXBuildFile; fileRef = F033C04E9D6CFA71BA5CECF8B24B1E0E /* YYMemoryCache.h */; settings = {ATTRIBUTES = (Project, ); }; }; 272DBC1ADD0B026B6625C91EDF77C771 /* UIButton+YYWebImage.m in Sources */ = {isa = PBXBuildFile; fileRef = E90002DC07702BAAD7B220257400D5BF /* UIButton+YYWebImage.m */; }; 29A9A76BA16EFE4672DB09693F830BD4 /* UIImageView+YYWebImage.h in Headers */ = {isa = PBXBuildFile; fileRef = 5CF3CD94E8D04009A849C917B7E2721D /* UIImageView+YYWebImage.h */; settings = {ATTRIBUTES = (Project, ); }; }; 2B6522BAC13320D33D3D6ADD36526E26 /* NSData+MFHexString.h in Headers */ = {isa = PBXBuildFile; fileRef = 18A8A5FB1348C9094843CCC56048CED7 /* NSData+MFHexString.h */; settings = {ATTRIBUTES = (Project, ); }; }; 2C9AEC6C0FD7DEE6846616CB6A4ADF1F /* YYSpriteSheetImage.h in Headers */ = {isa = PBXBuildFile; fileRef = 9CAC01F29611B985E772308AA7843437 /* YYSpriteSheetImage.h */; settings = {ATTRIBUTES = (Project, ); }; }; 2DC2CA7D1626F12A8296E2B9A2741CED /* CoreFoundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 67D6E55E61A18B9FCEE8B47E22D4270C /* CoreFoundation.framework */; }; 32E8FDD134677B65125211724B394D1D /* UIView+Shadow.h in Headers */ = {isa = PBXBuildFile; fileRef = F4849E59376115AB459DDD960FDCD27C /* UIView+Shadow.h */; settings = {ATTRIBUTES = (Project, ); }; }; 345870329F21A4EA6978E58820D37217 /* YYWebImage-dummy.m in Sources */ = {isa = PBXBuildFile; fileRef = 3BF8CD7C5CFEB2D2B37356DB0BDC5F89 /* YYWebImage-dummy.m */; }; 3593C954B1047C28453590BA0D723D09 /* Foundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = A1F5E026DFDC125DA63FB26DF3D0B33B /* Foundation.framework */; }; 38D59E295C6E5924B8CC86DBEDCF6852 /* MobileCoreServices.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 3F584F554FC4F3918A874F458E376711 /* MobileCoreServices.framework */; }; 3ECA2690533A2AFC4133893200D39B12 /* YYImage.h in Headers */ = {isa = PBXBuildFile; fileRef = 7838A96C85C34F629DF8916882103E32 /* YYImage.h */; settings = {ATTRIBUTES = (Project, ); }; }; 40F71A32A408C21C211D4766A1827B1E /* _YYWebImageSetter.h in Headers */ = {isa = PBXBuildFile; fileRef = FA93AEE8C1840A47C87811DDD26E846B /* _YYWebImageSetter.h */; settings = {ATTRIBUTES = (Project, ); }; }; 43B1CFEE4FD272831183D1098AAE2769 /* YYImage.m in Sources */ = {isa = PBXBuildFile; fileRef = 75F4A127D9B097D8F215A4AE17706549 /* YYImage.m */; }; 52723F23446A4DDB4E9CD073EADFC54E /* YYImage-dummy.m in Sources */ = {isa = PBXBuildFile; fileRef = F1E3230C4E7D4DE61C64F4DF7AE30985 /* YYImage-dummy.m */; }; 545EC49452E887AABE12F54BCF5EB416 /* YYAnimatedImageView.m in Sources */ = {isa = PBXBuildFile; fileRef = 47508E096FF1A69BB3E1834812787FB7 /* YYAnimatedImageView.m */; }; 5568546AC897E573177E738AA9683D2F /* Foundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = A1F5E026DFDC125DA63FB26DF3D0B33B /* Foundation.framework */; }; 5975016BDE98C9D90C82144316486F45 /* _YYWebImageSetter.m in Sources */ = {isa = PBXBuildFile; fileRef = 6BD323327DD6FD31FE6A513987C22AC1 /* _YYWebImageSetter.m */; }; 59AD06CEB9C30E66DB24B42D9A9435C8 /* MobileCoreServices.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 3F584F554FC4F3918A874F458E376711 /* MobileCoreServices.framework */; }; 5AB9C76511C219DD2B281BC4BC22D976 /* NSString+File.m in Sources */ = {isa = PBXBuildFile; fileRef = 27D2201704B93283CE3F081F925809F3 /* NSString+File.m */; }; 5D902E65B6FD3861C04DDCB57ACE93ED /* Pods-MFPictureBrowserDemo-dummy.m in Sources */ = {isa = PBXBuildFile; fileRef = E22A84D16B0BFC973322E5A43AB3F983 /* Pods-MFPictureBrowserDemo-dummy.m */; }; 60A37919DA2F26EDB3104D36E983E01E /* UIColor+MFHexColor.m in Sources */ = {isa = PBXBuildFile; fileRef = 6439E5A5516A25ECA56884DE34BEE75A /* UIColor+MFHexColor.m */; }; 66631C381422CA569BB9DFF407CDE6B5 /* YYCache-dummy.m in Sources */ = {isa = PBXBuildFile; fileRef = 497229BA559DDB5472BBE2168DA56BD9 /* YYCache-dummy.m */; }; 680F900015946EE7E91C2FE52E52FD1F /* MFCookiesManager.m in Sources */ = {isa = PBXBuildFile; fileRef = 22864423188C23E57030ED1E8E40D200 /* MFCookiesManager.m */; }; 68876BB41E4CF65E3B066C3266A99391 /* YYDiskCache.h in Headers */ = {isa = PBXBuildFile; fileRef = 91E6C7401E50FC83BC216E30AD8EA51B /* YYDiskCache.h */; settings = {ATTRIBUTES = (Project, ); }; }; 6B12CF3AC3EEBF0EDA135210774B5514 /* UIKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 7A72DD915EC9DA0CB8E471AB1F10DEBE /* UIKit.framework */; }; 6C79AE946EAE5491C61D0D8B7EA3D50B /* UIImage+YYWebImage.h in Headers */ = {isa = PBXBuildFile; fileRef = E67B35CF2E03E1BBA0B7802DA8EBA724 /* UIImage+YYWebImage.h */; settings = {ATTRIBUTES = (Project, ); }; }; 6D7D5F802FCB4C87BAE1A5D57F7092AF /* Accelerate.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = A624E91122CB033D86EBD46036B82B28 /* Accelerate.framework */; }; 6EB8CC0852A66546FF2F6A983D4C504E /* YYDiskCache.m in Sources */ = {isa = PBXBuildFile; fileRef = F50A838E25BB546FB2CDB378D98B6AC1 /* YYDiskCache.m */; }; 6F79380780284ED005F861FACE179D95 /* YYFrameImage.m in Sources */ = {isa = PBXBuildFile; fileRef = 729013888648DE5112848CB23AE2D60C /* YYFrameImage.m */; }; 71013DC69F2F5B727DC511AB551ECD55 /* MKAnnotationView+YYWebImage.h in Headers */ = {isa = PBXBuildFile; fileRef = 686CF685C828E569E655F372A698C1E2 /* MKAnnotationView+YYWebImage.h */; settings = {ATTRIBUTES = (Project, ); }; }; 73A7AF0F36FF0FB9A5AD57E310E8805A /* YYMemoryCache.m in Sources */ = {isa = PBXBuildFile; fileRef = BE553099A1DB48C152573B1011A5CB3B /* YYMemoryCache.m */; }; 749B82B89BD1E41420389983F4CB9D6D /* NSDateFormatter+Cache.m in Sources */ = {isa = PBXBuildFile; fileRef = A4307434B93D64ABCD8D5C0195FF6E96 /* NSDateFormatter+Cache.m */; }; 785D27C659EE598DFC0F2580F52D29E6 /* UIView+MFSnapshot.h in Headers */ = {isa = PBXBuildFile; fileRef = 8C1022760C1A1E6DD3819746C56B5FB7 /* UIView+MFSnapshot.h */; settings = {ATTRIBUTES = (Project, ); }; }; 7913CE51FDFAD0E01E3E5D534E84ECF5 /* ImageIO.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 4418563B9EF3CB95C8F65879F546D44C /* ImageIO.framework */; }; 7BBD6998D4960BA999E32684F03E6720 /* Foundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = A1F5E026DFDC125DA63FB26DF3D0B33B /* Foundation.framework */; }; 7BE0872A542AEB80D672FBBE1CDD9A9B /* YYCache.m in Sources */ = {isa = PBXBuildFile; fileRef = C319F72B335E282982A7B54082327494 /* YYCache.m */; }; 7DFCF4696E65DB0F0E5377CC4576C6E3 /* NSData+MFHexString.m in Sources */ = {isa = PBXBuildFile; fileRef = 259C903D8916CADC3DE22115F1902447 /* NSData+MFHexString.m */; }; 7E63962C48E08320F84689B7E2BFBFD9 /* YYCache.h in Headers */ = {isa = PBXBuildFile; fileRef = E7F11E54EA95CB17F9DC0B75433DB95A /* YYCache.h */; settings = {ATTRIBUTES = (Project, ); }; }; 82EBB9B8294B7D64E6749DBE34DA86BE /* MFCookiesManager.h in Headers */ = {isa = PBXBuildFile; fileRef = 1F25ECBDB5805338D6A5C25E9FF34877 /* MFCookiesManager.h */; settings = {ATTRIBUTES = (Project, ); }; }; 85985555A36A012966D9B4F219C6C786 /* CoreFoundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 67D6E55E61A18B9FCEE8B47E22D4270C /* CoreFoundation.framework */; }; 85CADF4495AC3FD0DED98ABE434C47DA /* YYSpriteSheetImage.m in Sources */ = {isa = PBXBuildFile; fileRef = 3A4D78B7241EED991E9EA5767FFA5B35 /* YYSpriteSheetImage.m */; }; 871A9CBDDEB8DB166D87217747B440BF /* QuartzCore.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = D0A3B26ECE81C89075EAD21D4DF8B1D0 /* QuartzCore.framework */; }; 8797F9B3FCF56C89B3D02D6C403B9B31 /* YYWebImageOperation.h in Headers */ = {isa = PBXBuildFile; fileRef = D67289DBF982BE4D6E7C5288DFDD80D4 /* YYWebImageOperation.h */; settings = {ATTRIBUTES = (Project, ); }; }; 87D956671DDF22ED40C9EB4BA692DAFE /* AssetsLibrary.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = A90BE5491E0D2F0314AC354A80EA7053 /* AssetsLibrary.framework */; }; 88C85D77169D92CCB835699EE03BB8F2 /* MFDeviceManager.m in Sources */ = {isa = PBXBuildFile; fileRef = EBBF5E83328566742155402A3D1C09CD /* MFDeviceManager.m */; }; 88DEFFC47ECD0BB02DE6EDF249D78375 /* UIButton+YYWebImage.h in Headers */ = {isa = PBXBuildFile; fileRef = EB0CE14D78EA969F1B653326808F7D2B /* UIButton+YYWebImage.h */; settings = {ATTRIBUTES = (Project, ); }; }; 89608D50EB882EB4B358E475F69E5798 /* YYWebImageOperation.m in Sources */ = {isa = PBXBuildFile; fileRef = 33FA4A8B527E5A40924E3FEE1B89917D /* YYWebImageOperation.m */; }; 8DFE9DB7B082A4397A6583E5FE6AFDC7 /* MFCategory-dummy.m in Sources */ = {isa = PBXBuildFile; fileRef = CCDFCCE55F98E593534EDEC38283F3B9 /* MFCategory-dummy.m */; }; 9196F9BF21D72D9ECB46EB0BC24FAEF5 /* YYFrameImage.h in Headers */ = {isa = PBXBuildFile; fileRef = 675B03562F0E45A550E100F786BE077C /* YYFrameImage.h */; settings = {ATTRIBUTES = (Project, ); }; }; 920E2B6A1C691D358B30FD5EFB417CC8 /* UIView+Shadow.m in Sources */ = {isa = PBXBuildFile; fileRef = 401CF9D9C2D2E35A473E570FD79E9C7F /* UIView+Shadow.m */; }; 944BB684DC1C161FFC494A6EBEEE275D /* UIImageView+YYWebImage.m in Sources */ = {isa = PBXBuildFile; fileRef = FA90D34F39288E1B5D5DF41A2DC634D1 /* UIImageView+YYWebImage.m */; }; 9497EB337FFFB7BF0E58AA7171047AE6 /* Accelerate.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = A624E91122CB033D86EBD46036B82B28 /* Accelerate.framework */; }; 94C286A53BA0D6F990F22F338376423D /* YYAnimatedImageView.h in Headers */ = {isa = PBXBuildFile; fileRef = A0B75EA0D37E0EB00EF90EBEECE8F36F /* YYAnimatedImageView.h */; settings = {ATTRIBUTES = (Project, ); }; }; 94CAFB4DBD9B82564FDA21413629BF4D /* UIView+MFFrame.m in Sources */ = {isa = PBXBuildFile; fileRef = 5F30AF9CF7BFF0B94E2B5C02264857B8 /* UIView+MFFrame.m */; }; 97C5A226652C798FA68195BD204DC91F /* CALayer+YYWebImage.m in Sources */ = {isa = PBXBuildFile; fileRef = 8B35C2DBACDF44140CE0B83C15EC57B6 /* CALayer+YYWebImage.m */; }; 99B03C4ECAB497CA6E198DD1BA03D46C /* MFSavePhotoManager.m in Sources */ = {isa = PBXBuildFile; fileRef = 4472B7B0D062E373C0D16EB1E058BD69 /* MFSavePhotoManager.m */; }; 9D7EC27377BEF6E1F431849144DC9E16 /* Foundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = A1F5E026DFDC125DA63FB26DF3D0B33B /* Foundation.framework */; }; A43890A730F8817097457E7F1E38A96C /* NSDateFormatter+Cache.h in Headers */ = {isa = PBXBuildFile; fileRef = CD4AACB630F7D7EB7568A8E2A97EA292 /* NSDateFormatter+Cache.h */; settings = {ATTRIBUTES = (Project, ); }; }; A7F80D0C1B57144A1834B2D0EA8A0132 /* UIKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 7A72DD915EC9DA0CB8E471AB1F10DEBE /* UIKit.framework */; }; A9C18E9904C01D8400886CFAA39469F5 /* NSString+MFMD5Encoder.h in Headers */ = {isa = PBXBuildFile; fileRef = 582F0CFEA32A0FA96633974A3A6DEAE4 /* NSString+MFMD5Encoder.h */; settings = {ATTRIBUTES = (Project, ); }; }; AE1287B5E96B3DD6CBC14FA306B4A0E6 /* MFREManager.m in Sources */ = {isa = PBXBuildFile; fileRef = C0AB40AB5E8C0133BF01B633A9570247 /* MFREManager.m */; }; AEC270D9B9F21B35F6BC70BEFBA1BB86 /* YYKVStorage.m in Sources */ = {isa = PBXBuildFile; fileRef = 71AF685CEA0DE94748BE10A6E2A4C28A /* YYKVStorage.m */; }; AFC40425C34D6FD145E5812EB37D80E5 /* UIKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 7A72DD915EC9DA0CB8E471AB1F10DEBE /* UIKit.framework */; }; B0AF6948FB992F59D834AA97DB4288F5 /* QuartzCore.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = D0A3B26ECE81C89075EAD21D4DF8B1D0 /* QuartzCore.framework */; }; B9390AC9DCBDD7B25F1C803E3F1E5683 /* UIImage+Current.h in Headers */ = {isa = PBXBuildFile; fileRef = 3D848BE4180A8B73EA93D2DBBCE844AA /* UIImage+Current.h */; settings = {ATTRIBUTES = (Project, ); }; }; BA6CE1209CFB6AA4B7D6E5D15D1D9F98 /* UIKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 7A72DD915EC9DA0CB8E471AB1F10DEBE /* UIKit.framework */; }; BCA78209FFEF4AB5E9A1377DBF06398F /* UIView+MFExpandTouchSize.h in Headers */ = {isa = PBXBuildFile; fileRef = 7CFD7E42176B883C53CCA3E944BCBF01 /* UIView+MFExpandTouchSize.h */; settings = {ATTRIBUTES = (Project, ); }; }; BDC66EF003D69B92E15B893541B4D75B /* YYImageCoder.h in Headers */ = {isa = PBXBuildFile; fileRef = B69CF91C0FF5CC8070422FBAB51E6A16 /* YYImageCoder.h */; settings = {ATTRIBUTES = (Project, ); }; }; BDDE29B59467D55BF3D9F40EDA6A3A2C /* YYWebImage.h in Headers */ = {isa = PBXBuildFile; fileRef = BC995431DC4629ECDF8001C10EA907EF /* YYWebImage.h */; settings = {ATTRIBUTES = (Project, ); }; }; C02C60E87116038B5D2D5BDB032E79DA /* NSString+MFBase64Encoder.h in Headers */ = {isa = PBXBuildFile; fileRef = E727DBDD2E743E74AD48638D4D16D6ED /* NSString+MFBase64Encoder.h */; settings = {ATTRIBUTES = (Project, ); }; }; C03783322E036B05A1D835BA5C704981 /* MFDeviceManager.h in Headers */ = {isa = PBXBuildFile; fileRef = 5CDD0D8509D2F4E48EC3F67A86264C93 /* MFDeviceManager.h */; settings = {ATTRIBUTES = (Project, ); }; }; C235FE5994C4BF0EDC49E4C9450F9287 /* Foundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = A1F5E026DFDC125DA63FB26DF3D0B33B /* Foundation.framework */; }; C2D1AB19695E8C6623FAD8A998879E0A /* NSTimer+MFWeakTimer.h in Headers */ = {isa = PBXBuildFile; fileRef = 55EA1B819A72B60C758D04E15724A5B8 /* NSTimer+MFWeakTimer.h */; settings = {ATTRIBUTES = (Project, ); }; }; C6D2C35599DFD780092B6371B956842A /* UIView+MFFrame.h in Headers */ = {isa = PBXBuildFile; fileRef = 30CD2F9D94CC9C94A85A83466F1A2D10 /* UIView+MFFrame.h */; settings = {ATTRIBUTES = (Project, ); }; }; CA09FF94CAD0CF7EF9CF44C3640F9F2C /* YYImageCache.h in Headers */ = {isa = PBXBuildFile; fileRef = 357B81EBC4388CAC98CE10331E9AD299 /* YYImageCache.h */; settings = {ATTRIBUTES = (Project, ); }; }; CCF0CCD85D57D5FF6C449F3CB4B15C60 /* MKAnnotationView+YYWebImage.m in Sources */ = {isa = PBXBuildFile; fileRef = FEEDAA5492CAEC285AD4E4296FEBC43E /* MKAnnotationView+YYWebImage.m */; }; E0BE34BF4E0E89BDB0DC7032034B9E5C /* YYWebImageManager.h in Headers */ = {isa = PBXBuildFile; fileRef = 2313DA32B3020FF83733FB63FECB065F /* YYWebImageManager.h */; settings = {ATTRIBUTES = (Project, ); }; }; E648338CBBBAA0E5DCD952256CA80E7E /* UIColor+MFLDColor.m in Sources */ = {isa = PBXBuildFile; fileRef = 80D6CD643C4E24A79DE1D7F9552E055A /* UIColor+MFLDColor.m */; }; F0EE23FEEB84B639624AD61D861633FA /* NSArray+Distinct.h in Headers */ = {isa = PBXBuildFile; fileRef = 7BBC6141A5F1E5B08191B24209D722B6 /* NSArray+Distinct.h */; settings = {ATTRIBUTES = (Project, ); }; }; F51300B83F349CA0E8E314564627FD03 /* QuartzCore.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = D0A3B26ECE81C89075EAD21D4DF8B1D0 /* QuartzCore.framework */; }; F62510BD3FA536537AD3326C4F20C87B /* UIView+MFExpandTouchSize.m in Sources */ = {isa = PBXBuildFile; fileRef = 5C9D94016BFA85E334A11422CF62ADB8 /* UIView+MFExpandTouchSize.m */; }; F65E778FC8D8C242EA652C61BC75C943 /* MFManager.m in Sources */ = {isa = PBXBuildFile; fileRef = 5015C918123125FBEE5A85EAA3AE2380 /* MFManager.m */; }; FB1025C581A7F63898FB0A3FFB1E0CCC /* NSString+MFBase64Encoder.m in Sources */ = {isa = PBXBuildFile; fileRef = 91C95F65D829456C398D251E5E1D39FB /* NSString+MFBase64Encoder.m */; }; FED34258DE3C03376648100F27F72B33 /* UIImage+YYWebImage.m in Sources */ = {isa = PBXBuildFile; fileRef = 64AA21528972BB83D7AD02AE41DBF0A7 /* UIImage+YYWebImage.m */; }; /* End PBXBuildFile section */ /* Begin PBXContainerItemProxy section */ 1D21CF61942F9479B58C85A062D3D9BC /* PBXContainerItemProxy */ = { isa = PBXContainerItemProxy; containerPortal = D41D8CD98F00B204E9800998ECF8427E /* Project object */; proxyType = 1; remoteGlobalIDString = 4BCE175B89FF4865888D899363622C97; remoteInfo = YYCache; }; 396E384C2819E11617867A94805B3959 /* PBXContainerItemProxy */ = { isa = PBXContainerItemProxy; containerPortal = D41D8CD98F00B204E9800998ECF8427E /* Project object */; proxyType = 1; remoteGlobalIDString = 696B117AFDF09EFF7EE7421385BF0AA4; remoteInfo = MFCategory; }; 4AF9F1B8F2DF654228223281F30D8405 /* PBXContainerItemProxy */ = { isa = PBXContainerItemProxy; containerPortal = D41D8CD98F00B204E9800998ECF8427E /* Project object */; proxyType = 1; remoteGlobalIDString = 18FCE031EFADC83B5097BF4CDDAF8C0A; remoteInfo = YYImage; }; 5D8BAF5C182B3705C9AEC0AC853058B9 /* PBXContainerItemProxy */ = { isa = PBXContainerItemProxy; containerPortal = D41D8CD98F00B204E9800998ECF8427E /* Project object */; proxyType = 1; remoteGlobalIDString = 18FCE031EFADC83B5097BF4CDDAF8C0A; remoteInfo = YYImage; }; 9075AAD3904983CF707E3AB7E6E05403 /* PBXContainerItemProxy */ = { isa = PBXContainerItemProxy; containerPortal = D41D8CD98F00B204E9800998ECF8427E /* Project object */; proxyType = 1; remoteGlobalIDString = 68FBB5AE4B739F99050CC610B9CA3D65; remoteInfo = YYWebImage; }; EB0EF67BCCD1F4E2255BD42067D45625 /* PBXContainerItemProxy */ = { isa = PBXContainerItemProxy; containerPortal = D41D8CD98F00B204E9800998ECF8427E /* Project object */; proxyType = 1; remoteGlobalIDString = 4BCE175B89FF4865888D899363622C97; remoteInfo = YYCache; }; /* End PBXContainerItemProxy section */ /* Begin PBXFileReference section */ 0D1FCF67F9946129754D753F5504C711 /* YYKVStorage.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = YYKVStorage.h; path = YYCache/YYKVStorage.h; sourceTree = ""; }; 152F77ECEC4C3DAB70FD364E55B8DE68 /* YYImage.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; path = YYImage.xcconfig; sourceTree = ""; }; 18A8A5FB1348C9094843CCC56048CED7 /* NSData+MFHexString.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = "NSData+MFHexString.h"; path = "Classes/NSDataCategory/NSData+MFHexString.h"; sourceTree = ""; }; 1AD931227771EBB1CAB963AEB7985225 /* MFManager.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = MFManager.h; path = Classes/Manager/MFManager.h; sourceTree = ""; }; 1BE0EDE708E60D41A1FE5BAE8E6EAF47 /* WebP.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = WebP.framework; path = Vendor/WebP.framework; sourceTree = ""; }; 1F25ECBDB5805338D6A5C25E9FF34877 /* MFCookiesManager.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = MFCookiesManager.h; path = Classes/Manager/MFCookiesManager.h; sourceTree = ""; }; 1F5352C0CDC59A9C21D1F37AE73217DF /* UIColor+MFHexColor.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = "UIColor+MFHexColor.h"; path = "Classes/UIColorCategory/UIColor+MFHexColor.h"; sourceTree = ""; }; 22864423188C23E57030ED1E8E40D200 /* MFCookiesManager.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = MFCookiesManager.m; path = Classes/Manager/MFCookiesManager.m; sourceTree = ""; }; 2313DA32B3020FF83733FB63FECB065F /* YYWebImageManager.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = YYWebImageManager.h; path = YYWebImage/YYWebImageManager.h; sourceTree = ""; }; 259C903D8916CADC3DE22115F1902447 /* NSData+MFHexString.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = "NSData+MFHexString.m"; path = "Classes/NSDataCategory/NSData+MFHexString.m"; sourceTree = ""; }; 27D2201704B93283CE3F081F925809F3 /* NSString+File.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = "NSString+File.m"; path = "Classes/NSStringCategory/NSString+File.m"; sourceTree = ""; }; 2D70D0A909FC964983AF6909D710101C /* Pods-MFPictureBrowserDemo-resources.sh */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.script.sh; path = "Pods-MFPictureBrowserDemo-resources.sh"; sourceTree = ""; }; 2DB471A32A577EFDB2E898DB671C67B1 /* YYCache-prefix.pch */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; path = "YYCache-prefix.pch"; sourceTree = ""; }; 30CD2F9D94CC9C94A85A83466F1A2D10 /* UIView+MFFrame.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = "UIView+MFFrame.h"; path = "Classes/UIViewCategory/UIView+MFFrame.h"; sourceTree = ""; }; 33126ACC412964BD7E2E2DB5D056193A /* UIImage+Current.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = "UIImage+Current.m"; path = "Classes/UIImageCategory/UIImage+Current.m"; sourceTree = ""; }; 33FA4A8B527E5A40924E3FEE1B89917D /* YYWebImageOperation.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = YYWebImageOperation.m; path = YYWebImage/YYWebImageOperation.m; sourceTree = ""; }; 357B81EBC4388CAC98CE10331E9AD299 /* YYImageCache.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = YYImageCache.h; path = YYWebImage/YYImageCache.h; sourceTree = ""; }; 36C2C3FCE3284A25F333CA7188A8B20A /* YYCache.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; path = YYCache.xcconfig; sourceTree = ""; }; 3A4D78B7241EED991E9EA5767FFA5B35 /* YYSpriteSheetImage.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = YYSpriteSheetImage.m; path = YYImage/YYSpriteSheetImage.m; sourceTree = ""; }; 3BF8CD7C5CFEB2D2B37356DB0BDC5F89 /* YYWebImage-dummy.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; path = "YYWebImage-dummy.m"; sourceTree = ""; }; 3D848BE4180A8B73EA93D2DBBCE844AA /* UIImage+Current.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = "UIImage+Current.h"; path = "Classes/UIImageCategory/UIImage+Current.h"; sourceTree = ""; }; 3F584F554FC4F3918A874F458E376711 /* MobileCoreServices.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = MobileCoreServices.framework; path = Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS10.3.sdk/System/Library/Frameworks/MobileCoreServices.framework; sourceTree = DEVELOPER_DIR; }; 401CF9D9C2D2E35A473E570FD79E9C7F /* UIView+Shadow.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = "UIView+Shadow.m"; path = "Classes/UIViewCategory/UIView+Shadow.m"; sourceTree = ""; }; 414CC9D9152EB41690188B0A94CE53D8 /* MFSavePhotoManager.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = MFSavePhotoManager.h; path = Classes/Manager/MFSavePhotoManager.h; sourceTree = ""; }; 4418563B9EF3CB95C8F65879F546D44C /* ImageIO.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = ImageIO.framework; path = Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS10.3.sdk/System/Library/Frameworks/ImageIO.framework; sourceTree = DEVELOPER_DIR; }; 4472B7B0D062E373C0D16EB1E058BD69 /* MFSavePhotoManager.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = MFSavePhotoManager.m; path = Classes/Manager/MFSavePhotoManager.m; sourceTree = ""; }; 44DB6F17527AADEF30F125FAA1376E6F /* YYWebImage.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; path = YYWebImage.xcconfig; sourceTree = ""; }; 46A4B00C9EEC3C8DB459978CEAA0B882 /* YYImageCoder.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = YYImageCoder.m; path = YYImage/YYImageCoder.m; sourceTree = ""; }; 47508E096FF1A69BB3E1834812787FB7 /* YYAnimatedImageView.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = YYAnimatedImageView.m; path = YYImage/YYAnimatedImageView.m; sourceTree = ""; }; 497229BA559DDB5472BBE2168DA56BD9 /* YYCache-dummy.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; path = "YYCache-dummy.m"; sourceTree = ""; }; 5015C918123125FBEE5A85EAA3AE2380 /* MFManager.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = MFManager.m; path = Classes/Manager/MFManager.m; sourceTree = ""; }; 52188DA9E5943D6846C798060E4C45A7 /* Pods-MFPictureBrowserDemo-frameworks.sh */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.script.sh; path = "Pods-MFPictureBrowserDemo-frameworks.sh"; sourceTree = ""; }; 55EA1B819A72B60C758D04E15724A5B8 /* NSTimer+MFWeakTimer.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = "NSTimer+MFWeakTimer.h"; path = "Classes/NSTimerCategory/NSTimer+MFWeakTimer.h"; sourceTree = ""; }; 582F0CFEA32A0FA96633974A3A6DEAE4 /* NSString+MFMD5Encoder.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = "NSString+MFMD5Encoder.h"; path = "Classes/NSStringCategory/NSString+MFMD5Encoder.h"; sourceTree = ""; }; 59A5474B8982530896DD676925D76B7B /* UIColor+MFLDColor.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = "UIColor+MFLDColor.h"; path = "Classes/UIColorCategory/UIColor+MFLDColor.h"; sourceTree = ""; }; 59E34C1838F6D6759741ABB91258D932 /* UIView+MFSnapshot.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = "UIView+MFSnapshot.m"; path = "Classes/UIViewCategory/UIView+MFSnapshot.m"; sourceTree = ""; }; 5C9D94016BFA85E334A11422CF62ADB8 /* UIView+MFExpandTouchSize.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = "UIView+MFExpandTouchSize.m"; path = "Classes/UIViewCategory/UIView+MFExpandTouchSize.m"; sourceTree = ""; }; 5CDD0D8509D2F4E48EC3F67A86264C93 /* MFDeviceManager.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = MFDeviceManager.h; path = Classes/Manager/MFDeviceManager.h; sourceTree = ""; }; 5CF3CD94E8D04009A849C917B7E2721D /* UIImageView+YYWebImage.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = "UIImageView+YYWebImage.h"; path = "YYWebImage/Categories/UIImageView+YYWebImage.h"; sourceTree = ""; }; 5F30AF9CF7BFF0B94E2B5C02264857B8 /* UIView+MFFrame.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = "UIView+MFFrame.m"; path = "Classes/UIViewCategory/UIView+MFFrame.m"; sourceTree = ""; }; 6439E5A5516A25ECA56884DE34BEE75A /* UIColor+MFHexColor.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = "UIColor+MFHexColor.m"; path = "Classes/UIColorCategory/UIColor+MFHexColor.m"; sourceTree = ""; }; 64AA21528972BB83D7AD02AE41DBF0A7 /* UIImage+YYWebImage.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = "UIImage+YYWebImage.m"; path = "YYWebImage/Categories/UIImage+YYWebImage.m"; sourceTree = ""; }; 675B03562F0E45A550E100F786BE077C /* YYFrameImage.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = YYFrameImage.h; path = YYImage/YYFrameImage.h; sourceTree = ""; }; 67D6E55E61A18B9FCEE8B47E22D4270C /* CoreFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreFoundation.framework; path = Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS10.3.sdk/System/Library/Frameworks/CoreFoundation.framework; sourceTree = DEVELOPER_DIR; }; 686CF685C828E569E655F372A698C1E2 /* MKAnnotationView+YYWebImage.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = "MKAnnotationView+YYWebImage.h"; path = "YYWebImage/Categories/MKAnnotationView+YYWebImage.h"; sourceTree = ""; }; 6BD323327DD6FD31FE6A513987C22AC1 /* _YYWebImageSetter.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = _YYWebImageSetter.m; path = YYWebImage/Categories/_YYWebImageSetter.m; sourceTree = ""; }; 6E19475E71B031CC1ED87BF470606C65 /* Pods-MFPictureBrowserDemo-acknowledgements.plist */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.plist.xml; path = "Pods-MFPictureBrowserDemo-acknowledgements.plist"; sourceTree = ""; }; 71AF685CEA0DE94748BE10A6E2A4C28A /* YYKVStorage.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = YYKVStorage.m; path = YYCache/YYKVStorage.m; sourceTree = ""; }; 71C0DCF6B40EEF430263D3E6E251C63F /* YYWebImage-prefix.pch */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; path = "YYWebImage-prefix.pch"; sourceTree = ""; }; 729013888648DE5112848CB23AE2D60C /* YYFrameImage.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = YYFrameImage.m; path = YYImage/YYFrameImage.m; sourceTree = ""; }; 73C2768FC233B49CABCDE4A05B5F25FA /* MFCategory.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; path = MFCategory.xcconfig; sourceTree = ""; }; 75F4A127D9B097D8F215A4AE17706549 /* YYImage.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = YYImage.m; path = YYImage/YYImage.m; sourceTree = ""; }; 7838A96C85C34F629DF8916882103E32 /* YYImage.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = YYImage.h; path = YYImage/YYImage.h; sourceTree = ""; }; 794D05F11A52A3A3967BD8230E27F9CE /* CALayer+YYWebImage.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = "CALayer+YYWebImage.h"; path = "YYWebImage/Categories/CALayer+YYWebImage.h"; sourceTree = ""; }; 7A72DD915EC9DA0CB8E471AB1F10DEBE /* UIKit.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = UIKit.framework; path = Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS10.3.sdk/System/Library/Frameworks/UIKit.framework; sourceTree = DEVELOPER_DIR; }; 7B5F8BDD9DAB3C9040FFC89DB0FF2E66 /* YYImage-prefix.pch */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; path = "YYImage-prefix.pch"; sourceTree = ""; }; 7B667F796A41AAD096200E23150B5D63 /* Pods-MFPictureBrowserDemo-acknowledgements.markdown */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text; path = "Pods-MFPictureBrowserDemo-acknowledgements.markdown"; sourceTree = ""; }; 7BBC6141A5F1E5B08191B24209D722B6 /* NSArray+Distinct.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = "NSArray+Distinct.h"; path = "Classes/NSArrayCategory/NSArray+Distinct.h"; sourceTree = ""; }; 7CFD7E42176B883C53CCA3E944BCBF01 /* UIView+MFExpandTouchSize.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = "UIView+MFExpandTouchSize.h"; path = "Classes/UIViewCategory/UIView+MFExpandTouchSize.h"; sourceTree = ""; }; 80D6CD643C4E24A79DE1D7F9552E055A /* UIColor+MFLDColor.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = "UIColor+MFLDColor.m"; path = "Classes/UIColorCategory/UIColor+MFLDColor.m"; sourceTree = ""; }; 8B35C2DBACDF44140CE0B83C15EC57B6 /* CALayer+YYWebImage.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = "CALayer+YYWebImage.m"; path = "YYWebImage/Categories/CALayer+YYWebImage.m"; sourceTree = ""; }; 8C1022760C1A1E6DD3819746C56B5FB7 /* UIView+MFSnapshot.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = "UIView+MFSnapshot.h"; path = "Classes/UIViewCategory/UIView+MFSnapshot.h"; sourceTree = ""; }; 91C95F65D829456C398D251E5E1D39FB /* NSString+MFBase64Encoder.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = "NSString+MFBase64Encoder.m"; path = "Classes/NSStringCategory/NSString+MFBase64Encoder.m"; sourceTree = ""; }; 91E6C7401E50FC83BC216E30AD8EA51B /* YYDiskCache.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = YYDiskCache.h; path = YYCache/YYDiskCache.h; sourceTree = ""; }; 93A4A3777CF96A4AAC1D13BA6DCCEA73 /* Podfile */ = {isa = PBXFileReference; explicitFileType = text.script.ruby; includeInIndex = 1; lastKnownFileType = text; name = Podfile; path = ../Podfile; sourceTree = SOURCE_ROOT; xcLanguageSpecificationIdentifier = xcode.lang.ruby; }; 9745E42EF92EE7FAF160701A0507326D /* Pods-MFPictureBrowserDemo.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; path = "Pods-MFPictureBrowserDemo.release.xcconfig"; sourceTree = ""; }; 9CAC01F29611B985E772308AA7843437 /* YYSpriteSheetImage.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = YYSpriteSheetImage.h; path = YYImage/YYSpriteSheetImage.h; sourceTree = ""; }; 9E0F3EF807998BDC69BBCD6C5D8C3E52 /* NSArray+Distinct.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = "NSArray+Distinct.m"; path = "Classes/NSArrayCategory/NSArray+Distinct.m"; sourceTree = ""; }; A0B75EA0D37E0EB00EF90EBEECE8F36F /* YYAnimatedImageView.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = YYAnimatedImageView.h; path = YYImage/YYAnimatedImageView.h; sourceTree = ""; }; A1F5E026DFDC125DA63FB26DF3D0B33B /* Foundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Foundation.framework; path = Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS10.3.sdk/System/Library/Frameworks/Foundation.framework; sourceTree = DEVELOPER_DIR; }; A4307434B93D64ABCD8D5C0195FF6E96 /* NSDateFormatter+Cache.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = "NSDateFormatter+Cache.m"; path = "Classes/NSDateFormatterCategory/NSDateFormatter+Cache.m"; sourceTree = ""; }; A624E91122CB033D86EBD46036B82B28 /* Accelerate.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Accelerate.framework; path = Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS10.3.sdk/System/Library/Frameworks/Accelerate.framework; sourceTree = DEVELOPER_DIR; }; A90BE5491E0D2F0314AC354A80EA7053 /* AssetsLibrary.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AssetsLibrary.framework; path = Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS10.3.sdk/System/Library/Frameworks/AssetsLibrary.framework; sourceTree = DEVELOPER_DIR; }; B0AA7F6F79C3BDF0EF518013E51E328C /* libYYWebImage.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; name = libYYWebImage.a; path = libYYWebImage.a; sourceTree = BUILT_PRODUCTS_DIR; }; B69CF91C0FF5CC8070422FBAB51E6A16 /* YYImageCoder.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = YYImageCoder.h; path = YYImage/YYImageCoder.h; sourceTree = ""; }; B965A2D3DAE4E6CF4A69C2BA4CA30657 /* NSString+MFMD5Encoder.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = "NSString+MFMD5Encoder.m"; path = "Classes/NSStringCategory/NSString+MFMD5Encoder.m"; sourceTree = ""; }; BC995431DC4629ECDF8001C10EA907EF /* YYWebImage.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = YYWebImage.h; path = YYWebImage/YYWebImage.h; sourceTree = ""; }; BE553099A1DB48C152573B1011A5CB3B /* YYMemoryCache.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = YYMemoryCache.m; path = YYCache/YYMemoryCache.m; sourceTree = ""; }; C0AB40AB5E8C0133BF01B633A9570247 /* MFREManager.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = MFREManager.m; path = Classes/Manager/MFREManager.m; sourceTree = ""; }; C319F72B335E282982A7B54082327494 /* YYCache.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = YYCache.m; path = YYCache/YYCache.m; sourceTree = ""; }; C59BF3C9497204CB11DB55CF490F37D4 /* libYYCache.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; name = libYYCache.a; path = libYYCache.a; sourceTree = BUILT_PRODUCTS_DIR; }; CAB7A2005DA56FA086ABEE79F3DBF423 /* NSString+File.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = "NSString+File.h"; path = "Classes/NSStringCategory/NSString+File.h"; sourceTree = ""; }; CCDFCCE55F98E593534EDEC38283F3B9 /* MFCategory-dummy.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; path = "MFCategory-dummy.m"; sourceTree = ""; }; CD4AACB630F7D7EB7568A8E2A97EA292 /* NSDateFormatter+Cache.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = "NSDateFormatter+Cache.h"; path = "Classes/NSDateFormatterCategory/NSDateFormatter+Cache.h"; sourceTree = ""; }; CF4BED89085AF5CA0AC42794F6A7A219 /* MFCategory-prefix.pch */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; path = "MFCategory-prefix.pch"; sourceTree = ""; }; D0A3B26ECE81C89075EAD21D4DF8B1D0 /* QuartzCore.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = QuartzCore.framework; path = Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS10.3.sdk/System/Library/Frameworks/QuartzCore.framework; sourceTree = DEVELOPER_DIR; }; D67289DBF982BE4D6E7C5288DFDD80D4 /* YYWebImageOperation.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = YYWebImageOperation.h; path = YYWebImage/YYWebImageOperation.h; sourceTree = ""; }; E22A84D16B0BFC973322E5A43AB3F983 /* Pods-MFPictureBrowserDemo-dummy.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; path = "Pods-MFPictureBrowserDemo-dummy.m"; sourceTree = ""; }; E67B35CF2E03E1BBA0B7802DA8EBA724 /* UIImage+YYWebImage.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = "UIImage+YYWebImage.h"; path = "YYWebImage/Categories/UIImage+YYWebImage.h"; sourceTree = ""; }; E727DBDD2E743E74AD48638D4D16D6ED /* NSString+MFBase64Encoder.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = "NSString+MFBase64Encoder.h"; path = "Classes/NSStringCategory/NSString+MFBase64Encoder.h"; sourceTree = ""; }; E7F11E54EA95CB17F9DC0B75433DB95A /* YYCache.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = YYCache.h; path = YYCache/YYCache.h; sourceTree = ""; }; E81C4D19016A649D58FD3E31FE95068A /* NSTimer+MFWeakTimer.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = "NSTimer+MFWeakTimer.m"; path = "Classes/NSTimerCategory/NSTimer+MFWeakTimer.m"; sourceTree = ""; }; E90002DC07702BAAD7B220257400D5BF /* UIButton+YYWebImage.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = "UIButton+YYWebImage.m"; path = "YYWebImage/Categories/UIButton+YYWebImage.m"; sourceTree = ""; }; EB0CE14D78EA969F1B653326808F7D2B /* UIButton+YYWebImage.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = "UIButton+YYWebImage.h"; path = "YYWebImage/Categories/UIButton+YYWebImage.h"; sourceTree = ""; }; EBBF5E83328566742155402A3D1C09CD /* MFDeviceManager.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = MFDeviceManager.m; path = Classes/Manager/MFDeviceManager.m; sourceTree = ""; }; EC027519015738A199D3E531C2AD3647 /* libYYImage.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; name = libYYImage.a; path = libYYImage.a; sourceTree = BUILT_PRODUCTS_DIR; }; ED0729A69815F74EF312C59E0C1D71BD /* Pods-MFPictureBrowserDemo.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; path = "Pods-MFPictureBrowserDemo.debug.xcconfig"; sourceTree = ""; }; F033C04E9D6CFA71BA5CECF8B24B1E0E /* YYMemoryCache.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = YYMemoryCache.h; path = YYCache/YYMemoryCache.h; sourceTree = ""; }; F1E3230C4E7D4DE61C64F4DF7AE30985 /* YYImage-dummy.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; path = "YYImage-dummy.m"; sourceTree = ""; }; F3A0EC16B9709D89AE00FDE77D3BB41B /* YYImageCache.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = YYImageCache.m; path = YYWebImage/YYImageCache.m; sourceTree = ""; }; F4849E59376115AB459DDD960FDCD27C /* UIView+Shadow.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = "UIView+Shadow.h"; path = "Classes/UIViewCategory/UIView+Shadow.h"; sourceTree = ""; }; F50A838E25BB546FB2CDB378D98B6AC1 /* YYDiskCache.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = YYDiskCache.m; path = YYCache/YYDiskCache.m; sourceTree = ""; }; F5D1D555371D69FF2516CDB195FB48A0 /* YYWebImageManager.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = YYWebImageManager.m; path = YYWebImage/YYWebImageManager.m; sourceTree = ""; }; FA90D34F39288E1B5D5DF41A2DC634D1 /* UIImageView+YYWebImage.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = "UIImageView+YYWebImage.m"; path = "YYWebImage/Categories/UIImageView+YYWebImage.m"; sourceTree = ""; }; FA93AEE8C1840A47C87811DDD26E846B /* _YYWebImageSetter.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = _YYWebImageSetter.h; path = YYWebImage/Categories/_YYWebImageSetter.h; sourceTree = ""; }; FCA07E0F4B89C8B69DE604568E3501D9 /* MFREManager.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = MFREManager.h; path = Classes/Manager/MFREManager.h; sourceTree = ""; }; FCCCAAB50468DCB0A67106E816BEC34D /* libMFCategory.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; name = libMFCategory.a; path = libMFCategory.a; sourceTree = BUILT_PRODUCTS_DIR; }; FCE259EB1CBDC358F0091FC7E138BB10 /* libPods-MFPictureBrowserDemo.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; name = "libPods-MFPictureBrowserDemo.a"; path = "libPods-MFPictureBrowserDemo.a"; sourceTree = BUILT_PRODUCTS_DIR; }; FEEDAA5492CAEC285AD4E4296FEBC43E /* MKAnnotationView+YYWebImage.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = "MKAnnotationView+YYWebImage.m"; path = "YYWebImage/Categories/MKAnnotationView+YYWebImage.m"; sourceTree = ""; }; /* End PBXFileReference section */ /* Begin PBXFrameworksBuildPhase section */ 24E66C40C65385098DDDA3BEB942D14C /* Frameworks */ = { isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( 2DC2CA7D1626F12A8296E2B9A2741CED /* CoreFoundation.framework in Frameworks */, 7BBD6998D4960BA999E32684F03E6720 /* Foundation.framework in Frameworks */, B0AF6948FB992F59D834AA97DB4288F5 /* QuartzCore.framework in Frameworks */, A7F80D0C1B57144A1834B2D0EA8A0132 /* UIKit.framework in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; }; 97F73D03D864D92B9B675F908DCDA8FB /* Frameworks */ = { isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( 9497EB337FFFB7BF0E58AA7171047AE6 /* Accelerate.framework in Frameworks */, 87D956671DDF22ED40C9EB4BA692DAFE /* AssetsLibrary.framework in Frameworks */, 85985555A36A012966D9B4F219C6C786 /* CoreFoundation.framework in Frameworks */, 5568546AC897E573177E738AA9683D2F /* Foundation.framework in Frameworks */, 7913CE51FDFAD0E01E3E5D534E84ECF5 /* ImageIO.framework in Frameworks */, 38D59E295C6E5924B8CC86DBEDCF6852 /* MobileCoreServices.framework in Frameworks */, 871A9CBDDEB8DB166D87217747B440BF /* QuartzCore.framework in Frameworks */, AFC40425C34D6FD145E5812EB37D80E5 /* UIKit.framework in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; }; B6166C57974AC19F5F196B5C51D33020 /* Frameworks */ = { isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( 6D7D5F802FCB4C87BAE1A5D57F7092AF /* Accelerate.framework in Frameworks */, 1A8AD5A82AB27D6B5FC4F49449E21889 /* AssetsLibrary.framework in Frameworks */, 07DA83207C6539EB33761509DAD155F8 /* CoreFoundation.framework in Frameworks */, 9D7EC27377BEF6E1F431849144DC9E16 /* Foundation.framework in Frameworks */, 05CA2AF1247C77B1A4C0634ADE891640 /* ImageIO.framework in Frameworks */, 59AD06CEB9C30E66DB24B42D9A9435C8 /* MobileCoreServices.framework in Frameworks */, F51300B83F349CA0E8E314564627FD03 /* QuartzCore.framework in Frameworks */, 6B12CF3AC3EEBF0EDA135210774B5514 /* UIKit.framework in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; }; D98C06C61E69855FBA3C4A8986E366D1 /* Frameworks */ = { isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( C235FE5994C4BF0EDC49E4C9450F9287 /* Foundation.framework in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; }; EE4C9D7FEAC151A0C4BA0D3DCD6E6843 /* Frameworks */ = { isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( 3593C954B1047C28453590BA0D723D09 /* Foundation.framework in Frameworks */, BA6CE1209CFB6AA4B7D6E5D15D1D9F98 /* UIKit.framework in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; }; /* End PBXFrameworksBuildPhase section */ /* Begin PBXGroup section */ 32EE7697A19BD2E0F0A028313110F502 /* MFCategory */ = { isa = PBXGroup; children = ( 1F25ECBDB5805338D6A5C25E9FF34877 /* MFCookiesManager.h */, 22864423188C23E57030ED1E8E40D200 /* MFCookiesManager.m */, 5CDD0D8509D2F4E48EC3F67A86264C93 /* MFDeviceManager.h */, EBBF5E83328566742155402A3D1C09CD /* MFDeviceManager.m */, 1AD931227771EBB1CAB963AEB7985225 /* MFManager.h */, 5015C918123125FBEE5A85EAA3AE2380 /* MFManager.m */, FCA07E0F4B89C8B69DE604568E3501D9 /* MFREManager.h */, C0AB40AB5E8C0133BF01B633A9570247 /* MFREManager.m */, 414CC9D9152EB41690188B0A94CE53D8 /* MFSavePhotoManager.h */, 4472B7B0D062E373C0D16EB1E058BD69 /* MFSavePhotoManager.m */, 7BBC6141A5F1E5B08191B24209D722B6 /* NSArray+Distinct.h */, 9E0F3EF807998BDC69BBCD6C5D8C3E52 /* NSArray+Distinct.m */, 18A8A5FB1348C9094843CCC56048CED7 /* NSData+MFHexString.h */, 259C903D8916CADC3DE22115F1902447 /* NSData+MFHexString.m */, CD4AACB630F7D7EB7568A8E2A97EA292 /* NSDateFormatter+Cache.h */, A4307434B93D64ABCD8D5C0195FF6E96 /* NSDateFormatter+Cache.m */, CAB7A2005DA56FA086ABEE79F3DBF423 /* NSString+File.h */, 27D2201704B93283CE3F081F925809F3 /* NSString+File.m */, E727DBDD2E743E74AD48638D4D16D6ED /* NSString+MFBase64Encoder.h */, 91C95F65D829456C398D251E5E1D39FB /* NSString+MFBase64Encoder.m */, 582F0CFEA32A0FA96633974A3A6DEAE4 /* NSString+MFMD5Encoder.h */, B965A2D3DAE4E6CF4A69C2BA4CA30657 /* NSString+MFMD5Encoder.m */, 55EA1B819A72B60C758D04E15724A5B8 /* NSTimer+MFWeakTimer.h */, E81C4D19016A649D58FD3E31FE95068A /* NSTimer+MFWeakTimer.m */, 1F5352C0CDC59A9C21D1F37AE73217DF /* UIColor+MFHexColor.h */, 6439E5A5516A25ECA56884DE34BEE75A /* UIColor+MFHexColor.m */, 59A5474B8982530896DD676925D76B7B /* UIColor+MFLDColor.h */, 80D6CD643C4E24A79DE1D7F9552E055A /* UIColor+MFLDColor.m */, 3D848BE4180A8B73EA93D2DBBCE844AA /* UIImage+Current.h */, 33126ACC412964BD7E2E2DB5D056193A /* UIImage+Current.m */, 7CFD7E42176B883C53CCA3E944BCBF01 /* UIView+MFExpandTouchSize.h */, 5C9D94016BFA85E334A11422CF62ADB8 /* UIView+MFExpandTouchSize.m */, 30CD2F9D94CC9C94A85A83466F1A2D10 /* UIView+MFFrame.h */, 5F30AF9CF7BFF0B94E2B5C02264857B8 /* UIView+MFFrame.m */, 8C1022760C1A1E6DD3819746C56B5FB7 /* UIView+MFSnapshot.h */, 59E34C1838F6D6759741ABB91258D932 /* UIView+MFSnapshot.m */, F4849E59376115AB459DDD960FDCD27C /* UIView+Shadow.h */, 401CF9D9C2D2E35A473E570FD79E9C7F /* UIView+Shadow.m */, 3700191C51E266ED138DF6E4461ED6FD /* Support Files */, ); name = MFCategory; path = MFCategory; sourceTree = ""; }; 3700191C51E266ED138DF6E4461ED6FD /* Support Files */ = { isa = PBXGroup; children = ( 73C2768FC233B49CABCDE4A05B5F25FA /* MFCategory.xcconfig */, CCDFCCE55F98E593534EDEC38283F3B9 /* MFCategory-dummy.m */, CF4BED89085AF5CA0AC42794F6A7A219 /* MFCategory-prefix.pch */, ); name = "Support Files"; path = "../Target Support Files/MFCategory"; sourceTree = ""; }; 40F00C3C5CCF3258B73ADBE8CE1CE127 /* Frameworks */ = { isa = PBXGroup; children = ( 1BE0EDE708E60D41A1FE5BAE8E6EAF47 /* WebP.framework */, ); name = Frameworks; sourceTree = ""; }; 5C2AFFCA54438F9A82DFA43B4F7E525E /* YYCache */ = { isa = PBXGroup; children = ( E7F11E54EA95CB17F9DC0B75433DB95A /* YYCache.h */, C319F72B335E282982A7B54082327494 /* YYCache.m */, 91E6C7401E50FC83BC216E30AD8EA51B /* YYDiskCache.h */, F50A838E25BB546FB2CDB378D98B6AC1 /* YYDiskCache.m */, 0D1FCF67F9946129754D753F5504C711 /* YYKVStorage.h */, 71AF685CEA0DE94748BE10A6E2A4C28A /* YYKVStorage.m */, F033C04E9D6CFA71BA5CECF8B24B1E0E /* YYMemoryCache.h */, BE553099A1DB48C152573B1011A5CB3B /* YYMemoryCache.m */, 74BC8D47855A4F6C5EE49C1CD725111B /* Support Files */, ); name = YYCache; path = YYCache; sourceTree = ""; }; 60B536F963E3D570EF72574522B9CC5F /* YYWebImage */ = { isa = PBXGroup; children = ( FA93AEE8C1840A47C87811DDD26E846B /* _YYWebImageSetter.h */, 6BD323327DD6FD31FE6A513987C22AC1 /* _YYWebImageSetter.m */, 794D05F11A52A3A3967BD8230E27F9CE /* CALayer+YYWebImage.h */, 8B35C2DBACDF44140CE0B83C15EC57B6 /* CALayer+YYWebImage.m */, 686CF685C828E569E655F372A698C1E2 /* MKAnnotationView+YYWebImage.h */, FEEDAA5492CAEC285AD4E4296FEBC43E /* MKAnnotationView+YYWebImage.m */, EB0CE14D78EA969F1B653326808F7D2B /* UIButton+YYWebImage.h */, E90002DC07702BAAD7B220257400D5BF /* UIButton+YYWebImage.m */, E67B35CF2E03E1BBA0B7802DA8EBA724 /* UIImage+YYWebImage.h */, 64AA21528972BB83D7AD02AE41DBF0A7 /* UIImage+YYWebImage.m */, 5CF3CD94E8D04009A849C917B7E2721D /* UIImageView+YYWebImage.h */, FA90D34F39288E1B5D5DF41A2DC634D1 /* UIImageView+YYWebImage.m */, 357B81EBC4388CAC98CE10331E9AD299 /* YYImageCache.h */, F3A0EC16B9709D89AE00FDE77D3BB41B /* YYImageCache.m */, BC995431DC4629ECDF8001C10EA907EF /* YYWebImage.h */, 2313DA32B3020FF83733FB63FECB065F /* YYWebImageManager.h */, F5D1D555371D69FF2516CDB195FB48A0 /* YYWebImageManager.m */, D67289DBF982BE4D6E7C5288DFDD80D4 /* YYWebImageOperation.h */, 33FA4A8B527E5A40924E3FEE1B89917D /* YYWebImageOperation.m */, EF52E76253971BA1DA4F5CBA27CCDBED /* Support Files */, ); name = YYWebImage; path = YYWebImage; sourceTree = ""; }; 67A7655CC3E0F60C4FDFE576DCB7ECF3 /* Core */ = { isa = PBXGroup; children = ( A0B75EA0D37E0EB00EF90EBEECE8F36F /* YYAnimatedImageView.h */, 47508E096FF1A69BB3E1834812787FB7 /* YYAnimatedImageView.m */, 675B03562F0E45A550E100F786BE077C /* YYFrameImage.h */, 729013888648DE5112848CB23AE2D60C /* YYFrameImage.m */, 7838A96C85C34F629DF8916882103E32 /* YYImage.h */, 75F4A127D9B097D8F215A4AE17706549 /* YYImage.m */, B69CF91C0FF5CC8070422FBAB51E6A16 /* YYImageCoder.h */, 46A4B00C9EEC3C8DB459978CEAA0B882 /* YYImageCoder.m */, 9CAC01F29611B985E772308AA7843437 /* YYSpriteSheetImage.h */, 3A4D78B7241EED991E9EA5767FFA5B35 /* YYSpriteSheetImage.m */, ); name = Core; sourceTree = ""; }; 74BC8D47855A4F6C5EE49C1CD725111B /* Support Files */ = { isa = PBXGroup; children = ( 36C2C3FCE3284A25F333CA7188A8B20A /* YYCache.xcconfig */, 497229BA559DDB5472BBE2168DA56BD9 /* YYCache-dummy.m */, 2DB471A32A577EFDB2E898DB671C67B1 /* YYCache-prefix.pch */, ); name = "Support Files"; path = "../Target Support Files/YYCache"; sourceTree = ""; }; 785553737B06AFC3DA711DDEB7D56194 /* Frameworks */ = { isa = PBXGroup; children = ( BD21D7A7E6A4C15B2A8428E5CE9F8296 /* iOS */, ); name = Frameworks; sourceTree = ""; }; 7DB346D0F39D3F0E887471402A8071AB = { isa = PBXGroup; children = ( 93A4A3777CF96A4AAC1D13BA6DCCEA73 /* Podfile */, 785553737B06AFC3DA711DDEB7D56194 /* Frameworks */, AE91D483E254F60442583A001A353DF6 /* Pods */, D3E731E151F4EC4BA9E1BFC8763CECA4 /* Products */, B8D92185B1F7862F894E69F22C90D7E5 /* Targets Support Files */, ); sourceTree = ""; }; 8B2A92EBBBDC5E3FBE46D660B8DB0AE1 /* Pods-MFPictureBrowserDemo */ = { isa = PBXGroup; children = ( 7B667F796A41AAD096200E23150B5D63 /* Pods-MFPictureBrowserDemo-acknowledgements.markdown */, 6E19475E71B031CC1ED87BF470606C65 /* Pods-MFPictureBrowserDemo-acknowledgements.plist */, E22A84D16B0BFC973322E5A43AB3F983 /* Pods-MFPictureBrowserDemo-dummy.m */, 52188DA9E5943D6846C798060E4C45A7 /* Pods-MFPictureBrowserDemo-frameworks.sh */, 2D70D0A909FC964983AF6909D710101C /* Pods-MFPictureBrowserDemo-resources.sh */, ED0729A69815F74EF312C59E0C1D71BD /* Pods-MFPictureBrowserDemo.debug.xcconfig */, 9745E42EF92EE7FAF160701A0507326D /* Pods-MFPictureBrowserDemo.release.xcconfig */, ); name = "Pods-MFPictureBrowserDemo"; path = "Target Support Files/Pods-MFPictureBrowserDemo"; sourceTree = ""; }; A6343528AF748F27AAF52DE31242F7EB /* Support Files */ = { isa = PBXGroup; children = ( 152F77ECEC4C3DAB70FD364E55B8DE68 /* YYImage.xcconfig */, F1E3230C4E7D4DE61C64F4DF7AE30985 /* YYImage-dummy.m */, 7B5F8BDD9DAB3C9040FFC89DB0FF2E66 /* YYImage-prefix.pch */, ); name = "Support Files"; path = "../Target Support Files/YYImage"; sourceTree = ""; }; AE91D483E254F60442583A001A353DF6 /* Pods */ = { isa = PBXGroup; children = ( 32EE7697A19BD2E0F0A028313110F502 /* MFCategory */, 5C2AFFCA54438F9A82DFA43B4F7E525E /* YYCache */, F6821B55F92D08CEC110512547FE1F8D /* YYImage */, 60B536F963E3D570EF72574522B9CC5F /* YYWebImage */, ); name = Pods; sourceTree = ""; }; B8D92185B1F7862F894E69F22C90D7E5 /* Targets Support Files */ = { isa = PBXGroup; children = ( 8B2A92EBBBDC5E3FBE46D660B8DB0AE1 /* Pods-MFPictureBrowserDemo */, ); name = "Targets Support Files"; sourceTree = ""; }; B928A2EA8E04AFCA61C2EEACBDCC151A /* WebP */ = { isa = PBXGroup; children = ( 40F00C3C5CCF3258B73ADBE8CE1CE127 /* Frameworks */, ); name = WebP; sourceTree = ""; }; BD21D7A7E6A4C15B2A8428E5CE9F8296 /* iOS */ = { isa = PBXGroup; children = ( A624E91122CB033D86EBD46036B82B28 /* Accelerate.framework */, A90BE5491E0D2F0314AC354A80EA7053 /* AssetsLibrary.framework */, 67D6E55E61A18B9FCEE8B47E22D4270C /* CoreFoundation.framework */, A1F5E026DFDC125DA63FB26DF3D0B33B /* Foundation.framework */, 4418563B9EF3CB95C8F65879F546D44C /* ImageIO.framework */, 3F584F554FC4F3918A874F458E376711 /* MobileCoreServices.framework */, D0A3B26ECE81C89075EAD21D4DF8B1D0 /* QuartzCore.framework */, 7A72DD915EC9DA0CB8E471AB1F10DEBE /* UIKit.framework */, ); name = iOS; sourceTree = ""; }; D3E731E151F4EC4BA9E1BFC8763CECA4 /* Products */ = { isa = PBXGroup; children = ( FCCCAAB50468DCB0A67106E816BEC34D /* libMFCategory.a */, FCE259EB1CBDC358F0091FC7E138BB10 /* libPods-MFPictureBrowserDemo.a */, C59BF3C9497204CB11DB55CF490F37D4 /* libYYCache.a */, EC027519015738A199D3E531C2AD3647 /* libYYImage.a */, B0AA7F6F79C3BDF0EF518013E51E328C /* libYYWebImage.a */, ); name = Products; sourceTree = ""; }; EF52E76253971BA1DA4F5CBA27CCDBED /* Support Files */ = { isa = PBXGroup; children = ( 44DB6F17527AADEF30F125FAA1376E6F /* YYWebImage.xcconfig */, 3BF8CD7C5CFEB2D2B37356DB0BDC5F89 /* YYWebImage-dummy.m */, 71C0DCF6B40EEF430263D3E6E251C63F /* YYWebImage-prefix.pch */, ); name = "Support Files"; path = "../Target Support Files/YYWebImage"; sourceTree = ""; }; F6821B55F92D08CEC110512547FE1F8D /* YYImage */ = { isa = PBXGroup; children = ( 67A7655CC3E0F60C4FDFE576DCB7ECF3 /* Core */, A6343528AF748F27AAF52DE31242F7EB /* Support Files */, B928A2EA8E04AFCA61C2EEACBDCC151A /* WebP */, ); name = YYImage; path = YYImage; sourceTree = ""; }; /* End PBXGroup section */ /* Begin PBXHeadersBuildPhase section */ 115BCDB7958113E2566BC06D048EFA9B /* Headers */ = { isa = PBXHeadersBuildPhase; buildActionMask = 2147483647; files = ( 94C286A53BA0D6F990F22F338376423D /* YYAnimatedImageView.h in Headers */, 9196F9BF21D72D9ECB46EB0BC24FAEF5 /* YYFrameImage.h in Headers */, 3ECA2690533A2AFC4133893200D39B12 /* YYImage.h in Headers */, BDC66EF003D69B92E15B893541B4D75B /* YYImageCoder.h in Headers */, 2C9AEC6C0FD7DEE6846616CB6A4ADF1F /* YYSpriteSheetImage.h in Headers */, ); runOnlyForDeploymentPostprocessing = 0; }; 46F2C47C86584442AFBFA040DE9023D7 /* Headers */ = { isa = PBXHeadersBuildPhase; buildActionMask = 2147483647; files = ( 82EBB9B8294B7D64E6749DBE34DA86BE /* MFCookiesManager.h in Headers */, C03783322E036B05A1D835BA5C704981 /* MFDeviceManager.h in Headers */, 105CD28A1A183013BDF2E9DC41C13025 /* MFManager.h in Headers */, 0413E125ABFFCFF246ED9ADACC43B7E4 /* MFREManager.h in Headers */, 083CC59EA97F29E4893A842CEC47FAFB /* MFSavePhotoManager.h in Headers */, F0EE23FEEB84B639624AD61D861633FA /* NSArray+Distinct.h in Headers */, 2B6522BAC13320D33D3D6ADD36526E26 /* NSData+MFHexString.h in Headers */, A43890A730F8817097457E7F1E38A96C /* NSDateFormatter+Cache.h in Headers */, 0FC58C2058A4BB7DCBDCB8D3EB3F69FA /* NSString+File.h in Headers */, C02C60E87116038B5D2D5BDB032E79DA /* NSString+MFBase64Encoder.h in Headers */, A9C18E9904C01D8400886CFAA39469F5 /* NSString+MFMD5Encoder.h in Headers */, C2D1AB19695E8C6623FAD8A998879E0A /* NSTimer+MFWeakTimer.h in Headers */, 24F75AF86066385BA5D40010E57A36B7 /* UIColor+MFHexColor.h in Headers */, 2104FA68551B02221EEA410ECDF1A14B /* UIColor+MFLDColor.h in Headers */, B9390AC9DCBDD7B25F1C803E3F1E5683 /* UIImage+Current.h in Headers */, BCA78209FFEF4AB5E9A1377DBF06398F /* UIView+MFExpandTouchSize.h in Headers */, C6D2C35599DFD780092B6371B956842A /* UIView+MFFrame.h in Headers */, 785D27C659EE598DFC0F2580F52D29E6 /* UIView+MFSnapshot.h in Headers */, 32E8FDD134677B65125211724B394D1D /* UIView+Shadow.h in Headers */, ); runOnlyForDeploymentPostprocessing = 0; }; E4CEE2544C13A7B2A3E3555F019991B0 /* Headers */ = { isa = PBXHeadersBuildPhase; buildActionMask = 2147483647; files = ( 40F71A32A408C21C211D4766A1827B1E /* _YYWebImageSetter.h in Headers */, 1F1B08FB73758E0F1E3004009D7F610E /* CALayer+YYWebImage.h in Headers */, 71013DC69F2F5B727DC511AB551ECD55 /* MKAnnotationView+YYWebImage.h in Headers */, 88DEFFC47ECD0BB02DE6EDF249D78375 /* UIButton+YYWebImage.h in Headers */, 6C79AE946EAE5491C61D0D8B7EA3D50B /* UIImage+YYWebImage.h in Headers */, 29A9A76BA16EFE4672DB09693F830BD4 /* UIImageView+YYWebImage.h in Headers */, CA09FF94CAD0CF7EF9CF44C3640F9F2C /* YYImageCache.h in Headers */, BDDE29B59467D55BF3D9F40EDA6A3A2C /* YYWebImage.h in Headers */, E0BE34BF4E0E89BDB0DC7032034B9E5C /* YYWebImageManager.h in Headers */, 8797F9B3FCF56C89B3D02D6C403B9B31 /* YYWebImageOperation.h in Headers */, ); runOnlyForDeploymentPostprocessing = 0; }; F8353EFEFC6450EB98454958645CD4DA /* Headers */ = { isa = PBXHeadersBuildPhase; buildActionMask = 2147483647; files = ( 7E63962C48E08320F84689B7E2BFBFD9 /* YYCache.h in Headers */, 68876BB41E4CF65E3B066C3266A99391 /* YYDiskCache.h in Headers */, 02E865D72099D2FD1243697923B2B4B7 /* YYKVStorage.h in Headers */, 255CFB9F22BF1D76532766EF5197562B /* YYMemoryCache.h in Headers */, ); runOnlyForDeploymentPostprocessing = 0; }; /* End PBXHeadersBuildPhase section */ /* Begin PBXNativeTarget section */ 18FCE031EFADC83B5097BF4CDDAF8C0A /* YYImage */ = { isa = PBXNativeTarget; buildConfigurationList = 3CA4CB0D8DDC3C27A68BFC3F9BAFE1FE /* Build configuration list for PBXNativeTarget "YYImage" */; buildPhases = ( 9B219ABA96956B8421E372E39A841C78 /* Sources */, 97F73D03D864D92B9B675F908DCDA8FB /* Frameworks */, 115BCDB7958113E2566BC06D048EFA9B /* Headers */, ); buildRules = ( ); dependencies = ( ); name = YYImage; productName = YYImage; productReference = EC027519015738A199D3E531C2AD3647 /* libYYImage.a */; productType = "com.apple.product-type.library.static"; }; 4BCE175B89FF4865888D899363622C97 /* YYCache */ = { isa = PBXNativeTarget; buildConfigurationList = 30A099ADC75B00E52ABAFF4B1864DCA4 /* Build configuration list for PBXNativeTarget "YYCache" */; buildPhases = ( FB266D73E77916D8F749C310FE3DA725 /* Sources */, 24E66C40C65385098DDDA3BEB942D14C /* Frameworks */, F8353EFEFC6450EB98454958645CD4DA /* Headers */, ); buildRules = ( ); dependencies = ( ); name = YYCache; productName = YYCache; productReference = C59BF3C9497204CB11DB55CF490F37D4 /* libYYCache.a */; productType = "com.apple.product-type.library.static"; }; 68FBB5AE4B739F99050CC610B9CA3D65 /* YYWebImage */ = { isa = PBXNativeTarget; buildConfigurationList = 0D77C3A4A548188BFA5C603028585DB6 /* Build configuration list for PBXNativeTarget "YYWebImage" */; buildPhases = ( C80D11A9D20DDD9A009D42E51DF4E32A /* Sources */, B6166C57974AC19F5F196B5C51D33020 /* Frameworks */, E4CEE2544C13A7B2A3E3555F019991B0 /* Headers */, ); buildRules = ( ); dependencies = ( 34C4B743A15245BA4F8E0135145B196D /* PBXTargetDependency */, 4DE1853C83869C22462643A100AFEB4B /* PBXTargetDependency */, ); name = YYWebImage; productName = YYWebImage; productReference = B0AA7F6F79C3BDF0EF518013E51E328C /* libYYWebImage.a */; productType = "com.apple.product-type.library.static"; }; 696B117AFDF09EFF7EE7421385BF0AA4 /* MFCategory */ = { isa = PBXNativeTarget; buildConfigurationList = 50BE0BCB4F362876C60F86B8BC09FB50 /* Build configuration list for PBXNativeTarget "MFCategory" */; buildPhases = ( E258EB08989694E627C291C2FB3A8865 /* Sources */, EE4C9D7FEAC151A0C4BA0D3DCD6E6843 /* Frameworks */, 46F2C47C86584442AFBFA040DE9023D7 /* Headers */, ); buildRules = ( ); dependencies = ( ); name = MFCategory; productName = MFCategory; productReference = FCCCAAB50468DCB0A67106E816BEC34D /* libMFCategory.a */; productType = "com.apple.product-type.library.static"; }; F7BE0A2AFD27A460924EA475C83ED559 /* Pods-MFPictureBrowserDemo */ = { isa = PBXNativeTarget; buildConfigurationList = F04A134A17FEA6B05600F88D47C8DBE1 /* Build configuration list for PBXNativeTarget "Pods-MFPictureBrowserDemo" */; buildPhases = ( 5F4B97C59CDE15F8CF017F4715142B42 /* Sources */, D98C06C61E69855FBA3C4A8986E366D1 /* Frameworks */, ); buildRules = ( ); dependencies = ( 5A8FEF9E213DC3883730877F443E009F /* PBXTargetDependency */, FF2AF7E3737F4E27A2C791466B6D2A98 /* PBXTargetDependency */, 5C2A53ED1FCCA1322CC9BAD82CEE7FF3 /* PBXTargetDependency */, D8BFA87BA55896C9CA601300AC081BED /* PBXTargetDependency */, ); name = "Pods-MFPictureBrowserDemo"; productName = "Pods-MFPictureBrowserDemo"; productReference = FCE259EB1CBDC358F0091FC7E138BB10 /* libPods-MFPictureBrowserDemo.a */; productType = "com.apple.product-type.library.static"; }; /* End PBXNativeTarget section */ /* Begin PBXProject section */ D41D8CD98F00B204E9800998ECF8427E /* Project object */ = { isa = PBXProject; attributes = { LastSwiftUpdateCheck = 0930; LastUpgradeCheck = 0930; }; buildConfigurationList = 2D8E8EC45A3A1A1D94AE762CB5028504 /* Build configuration list for PBXProject "Pods" */; compatibilityVersion = "Xcode 3.2"; developmentRegion = English; hasScannedForEncodings = 0; knownRegions = ( en, ); mainGroup = 7DB346D0F39D3F0E887471402A8071AB; productRefGroup = D3E731E151F4EC4BA9E1BFC8763CECA4 /* Products */; projectDirPath = ""; projectRoot = ""; targets = ( 696B117AFDF09EFF7EE7421385BF0AA4 /* MFCategory */, F7BE0A2AFD27A460924EA475C83ED559 /* Pods-MFPictureBrowserDemo */, 4BCE175B89FF4865888D899363622C97 /* YYCache */, 18FCE031EFADC83B5097BF4CDDAF8C0A /* YYImage */, 68FBB5AE4B739F99050CC610B9CA3D65 /* YYWebImage */, ); }; /* End PBXProject section */ /* Begin PBXSourcesBuildPhase section */ 5F4B97C59CDE15F8CF017F4715142B42 /* Sources */ = { isa = PBXSourcesBuildPhase; buildActionMask = 2147483647; files = ( 5D902E65B6FD3861C04DDCB57ACE93ED /* Pods-MFPictureBrowserDemo-dummy.m in Sources */, ); runOnlyForDeploymentPostprocessing = 0; }; 9B219ABA96956B8421E372E39A841C78 /* Sources */ = { isa = PBXSourcesBuildPhase; buildActionMask = 2147483647; files = ( 545EC49452E887AABE12F54BCF5EB416 /* YYAnimatedImageView.m in Sources */, 6F79380780284ED005F861FACE179D95 /* YYFrameImage.m in Sources */, 52723F23446A4DDB4E9CD073EADFC54E /* YYImage-dummy.m in Sources */, 43B1CFEE4FD272831183D1098AAE2769 /* YYImage.m in Sources */, 042EEBE27D87F926314C3D65D157D3BE /* YYImageCoder.m in Sources */, 85CADF4495AC3FD0DED98ABE434C47DA /* YYSpriteSheetImage.m in Sources */, ); runOnlyForDeploymentPostprocessing = 0; }; C80D11A9D20DDD9A009D42E51DF4E32A /* Sources */ = { isa = PBXSourcesBuildPhase; buildActionMask = 2147483647; files = ( 5975016BDE98C9D90C82144316486F45 /* _YYWebImageSetter.m in Sources */, 97C5A226652C798FA68195BD204DC91F /* CALayer+YYWebImage.m in Sources */, CCF0CCD85D57D5FF6C449F3CB4B15C60 /* MKAnnotationView+YYWebImage.m in Sources */, 272DBC1ADD0B026B6625C91EDF77C771 /* UIButton+YYWebImage.m in Sources */, FED34258DE3C03376648100F27F72B33 /* UIImage+YYWebImage.m in Sources */, 944BB684DC1C161FFC494A6EBEEE275D /* UIImageView+YYWebImage.m in Sources */, 1D333DCFE52ED903A939788CD835B780 /* YYImageCache.m in Sources */, 345870329F21A4EA6978E58820D37217 /* YYWebImage-dummy.m in Sources */, 1548EA871BB792DFD1ADB5D554D5B93B /* YYWebImageManager.m in Sources */, 89608D50EB882EB4B358E475F69E5798 /* YYWebImageOperation.m in Sources */, ); runOnlyForDeploymentPostprocessing = 0; }; E258EB08989694E627C291C2FB3A8865 /* Sources */ = { isa = PBXSourcesBuildPhase; buildActionMask = 2147483647; files = ( 8DFE9DB7B082A4397A6583E5FE6AFDC7 /* MFCategory-dummy.m in Sources */, 680F900015946EE7E91C2FE52E52FD1F /* MFCookiesManager.m in Sources */, 88C85D77169D92CCB835699EE03BB8F2 /* MFDeviceManager.m in Sources */, F65E778FC8D8C242EA652C61BC75C943 /* MFManager.m in Sources */, AE1287B5E96B3DD6CBC14FA306B4A0E6 /* MFREManager.m in Sources */, 99B03C4ECAB497CA6E198DD1BA03D46C /* MFSavePhotoManager.m in Sources */, 0C1F5495E2C48A6CDBE80ED416284D59 /* NSArray+Distinct.m in Sources */, 7DFCF4696E65DB0F0E5377CC4576C6E3 /* NSData+MFHexString.m in Sources */, 749B82B89BD1E41420389983F4CB9D6D /* NSDateFormatter+Cache.m in Sources */, 5AB9C76511C219DD2B281BC4BC22D976 /* NSString+File.m in Sources */, FB1025C581A7F63898FB0A3FFB1E0CCC /* NSString+MFBase64Encoder.m in Sources */, 14A3306CD2222ABCED413D48BBF24307 /* NSString+MFMD5Encoder.m in Sources */, 0E5C782DD1565CBFA38A7BFEB2AFE5AA /* NSTimer+MFWeakTimer.m in Sources */, 60A37919DA2F26EDB3104D36E983E01E /* UIColor+MFHexColor.m in Sources */, E648338CBBBAA0E5DCD952256CA80E7E /* UIColor+MFLDColor.m in Sources */, 18D8488FB14EC002DEFA2CBE1B9D6DF1 /* UIImage+Current.m in Sources */, F62510BD3FA536537AD3326C4F20C87B /* UIView+MFExpandTouchSize.m in Sources */, 94CAFB4DBD9B82564FDA21413629BF4D /* UIView+MFFrame.m in Sources */, 10BAAA9BF7963F6E56402B76F8794D99 /* UIView+MFSnapshot.m in Sources */, 920E2B6A1C691D358B30FD5EFB417CC8 /* UIView+Shadow.m in Sources */, ); runOnlyForDeploymentPostprocessing = 0; }; FB266D73E77916D8F749C310FE3DA725 /* Sources */ = { isa = PBXSourcesBuildPhase; buildActionMask = 2147483647; files = ( 66631C381422CA569BB9DFF407CDE6B5 /* YYCache-dummy.m in Sources */, 7BE0872A542AEB80D672FBBE1CDD9A9B /* YYCache.m in Sources */, 6EB8CC0852A66546FF2F6A983D4C504E /* YYDiskCache.m in Sources */, AEC270D9B9F21B35F6BC70BEFBA1BB86 /* YYKVStorage.m in Sources */, 73A7AF0F36FF0FB9A5AD57E310E8805A /* YYMemoryCache.m in Sources */, ); runOnlyForDeploymentPostprocessing = 0; }; /* End PBXSourcesBuildPhase section */ /* Begin PBXTargetDependency section */ 34C4B743A15245BA4F8E0135145B196D /* PBXTargetDependency */ = { isa = PBXTargetDependency; name = YYCache; target = 4BCE175B89FF4865888D899363622C97 /* YYCache */; targetProxy = 1D21CF61942F9479B58C85A062D3D9BC /* PBXContainerItemProxy */; }; 4DE1853C83869C22462643A100AFEB4B /* PBXTargetDependency */ = { isa = PBXTargetDependency; name = YYImage; target = 18FCE031EFADC83B5097BF4CDDAF8C0A /* YYImage */; targetProxy = 4AF9F1B8F2DF654228223281F30D8405 /* PBXContainerItemProxy */; }; 5A8FEF9E213DC3883730877F443E009F /* PBXTargetDependency */ = { isa = PBXTargetDependency; name = MFCategory; target = 696B117AFDF09EFF7EE7421385BF0AA4 /* MFCategory */; targetProxy = 396E384C2819E11617867A94805B3959 /* PBXContainerItemProxy */; }; 5C2A53ED1FCCA1322CC9BAD82CEE7FF3 /* PBXTargetDependency */ = { isa = PBXTargetDependency; name = YYImage; target = 18FCE031EFADC83B5097BF4CDDAF8C0A /* YYImage */; targetProxy = 5D8BAF5C182B3705C9AEC0AC853058B9 /* PBXContainerItemProxy */; }; D8BFA87BA55896C9CA601300AC081BED /* PBXTargetDependency */ = { isa = PBXTargetDependency; name = YYWebImage; target = 68FBB5AE4B739F99050CC610B9CA3D65 /* YYWebImage */; targetProxy = 9075AAD3904983CF707E3AB7E6E05403 /* PBXContainerItemProxy */; }; FF2AF7E3737F4E27A2C791466B6D2A98 /* PBXTargetDependency */ = { isa = PBXTargetDependency; name = YYCache; target = 4BCE175B89FF4865888D899363622C97 /* YYCache */; targetProxy = EB0EF67BCCD1F4E2255BD42067D45625 /* PBXContainerItemProxy */; }; /* End PBXTargetDependency section */ /* Begin XCBuildConfiguration section */ 0D93E1695669076A303F8E474AA57799 /* Debug */ = { isa = XCBuildConfiguration; baseConfigurationReference = 44DB6F17527AADEF30F125FAA1376E6F /* YYWebImage.xcconfig */; buildSettings = { CODE_SIGN_IDENTITY = "iPhone Developer"; "CODE_SIGN_IDENTITY[sdk=appletvos*]" = ""; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = ""; "CODE_SIGN_IDENTITY[sdk=watchos*]" = ""; GCC_PREFIX_HEADER = "Target Support Files/YYWebImage/YYWebImage-prefix.pch"; IPHONEOS_DEPLOYMENT_TARGET = 6.0; OTHER_LDFLAGS = ""; OTHER_LIBTOOLFLAGS = ""; PRIVATE_HEADERS_FOLDER_PATH = ""; PRODUCT_MODULE_NAME = YYWebImage; PRODUCT_NAME = YYWebImage; PUBLIC_HEADERS_FOLDER_PATH = ""; SDKROOT = iphoneos; SKIP_INSTALL = YES; SWIFT_ACTIVE_COMPILATION_CONDITIONS = "$(inherited) "; TARGETED_DEVICE_FAMILY = "1,2"; }; name = Debug; }; 1114E3974F44495546F5565C63739E29 /* Debug */ = { isa = XCBuildConfiguration; baseConfigurationReference = 73C2768FC233B49CABCDE4A05B5F25FA /* MFCategory.xcconfig */; buildSettings = { CODE_SIGN_IDENTITY = "iPhone Developer"; "CODE_SIGN_IDENTITY[sdk=appletvos*]" = ""; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = ""; "CODE_SIGN_IDENTITY[sdk=watchos*]" = ""; GCC_PREFIX_HEADER = "Target Support Files/MFCategory/MFCategory-prefix.pch"; IPHONEOS_DEPLOYMENT_TARGET = 9.0; OTHER_LDFLAGS = ""; OTHER_LIBTOOLFLAGS = ""; PRIVATE_HEADERS_FOLDER_PATH = ""; PRODUCT_MODULE_NAME = MFCategory; PRODUCT_NAME = MFCategory; PUBLIC_HEADERS_FOLDER_PATH = ""; SDKROOT = iphoneos; SKIP_INSTALL = YES; SWIFT_ACTIVE_COMPILATION_CONDITIONS = "$(inherited) "; TARGETED_DEVICE_FAMILY = "1,2"; }; name = Debug; }; 1818A387811A151C6155D26F30BA694A /* Release */ = { isa = XCBuildConfiguration; baseConfigurationReference = 152F77ECEC4C3DAB70FD364E55B8DE68 /* YYImage.xcconfig */; buildSettings = { CODE_SIGN_IDENTITY = "iPhone Developer"; "CODE_SIGN_IDENTITY[sdk=appletvos*]" = ""; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = ""; "CODE_SIGN_IDENTITY[sdk=watchos*]" = ""; GCC_PREFIX_HEADER = "Target Support Files/YYImage/YYImage-prefix.pch"; IPHONEOS_DEPLOYMENT_TARGET = 6.0; OTHER_LDFLAGS = ""; OTHER_LIBTOOLFLAGS = ""; PRIVATE_HEADERS_FOLDER_PATH = ""; PRODUCT_MODULE_NAME = YYImage; PRODUCT_NAME = YYImage; PUBLIC_HEADERS_FOLDER_PATH = ""; SDKROOT = iphoneos; SKIP_INSTALL = YES; SWIFT_ACTIVE_COMPILATION_CONDITIONS = "$(inherited) "; TARGETED_DEVICE_FAMILY = "1,2"; VALIDATE_PRODUCT = YES; }; name = Release; }; 199D972A13F2B4C56847F7A89CCA83BC /* Debug */ = { isa = XCBuildConfiguration; buildSettings = { ALWAYS_SEARCH_USER_PATHS = NO; CLANG_ANALYZER_NONNULL = YES; CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; CLANG_CXX_LIBRARY = "libc++"; CLANG_ENABLE_MODULES = YES; CLANG_ENABLE_OBJC_ARC = YES; CLANG_ENABLE_OBJC_WEAK = YES; CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; CLANG_WARN_BOOL_CONVERSION = YES; CLANG_WARN_COMMA = YES; CLANG_WARN_CONSTANT_CONVERSION = YES; CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; CLANG_WARN_DOCUMENTATION_COMMENTS = YES; CLANG_WARN_EMPTY_BODY = YES; CLANG_WARN_ENUM_CONVERSION = YES; CLANG_WARN_INFINITE_RECURSION = YES; CLANG_WARN_INT_CONVERSION = YES; CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; CLANG_WARN_STRICT_PROTOTYPES = YES; CLANG_WARN_SUSPICIOUS_MOVE = YES; CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; CLANG_WARN_UNREACHABLE_CODE = YES; CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; CODE_SIGNING_ALLOWED = NO; CODE_SIGNING_REQUIRED = NO; COPY_PHASE_STRIP = NO; DEBUG_INFORMATION_FORMAT = dwarf; ENABLE_STRICT_OBJC_MSGSEND = YES; ENABLE_TESTABILITY = YES; GCC_C_LANGUAGE_STANDARD = gnu11; GCC_DYNAMIC_NO_PIC = NO; GCC_NO_COMMON_BLOCKS = YES; GCC_OPTIMIZATION_LEVEL = 0; GCC_PREPROCESSOR_DEFINITIONS = ( "POD_CONFIGURATION_DEBUG=1", "DEBUG=1", "$(inherited)", ); GCC_WARN_64_TO_32_BIT_CONVERSION = YES; GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; GCC_WARN_UNDECLARED_SELECTOR = YES; GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; IPHONEOS_DEPLOYMENT_TARGET = 9.0; MTL_ENABLE_DEBUG_INFO = YES; ONLY_ACTIVE_ARCH = YES; PRODUCT_NAME = "$(TARGET_NAME)"; STRIP_INSTALLED_PRODUCT = NO; SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG; SYMROOT = "${SRCROOT}/../build"; }; name = Debug; }; 53708C6F1153E182E12337D8E0EFEC02 /* Release */ = { isa = XCBuildConfiguration; baseConfigurationReference = 9745E42EF92EE7FAF160701A0507326D /* Pods-MFPictureBrowserDemo.release.xcconfig */; buildSettings = { ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = NO; CODE_SIGN_IDENTITY = "iPhone Developer"; "CODE_SIGN_IDENTITY[sdk=appletvos*]" = ""; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = ""; "CODE_SIGN_IDENTITY[sdk=watchos*]" = ""; IPHONEOS_DEPLOYMENT_TARGET = 9.0; MACH_O_TYPE = staticlib; OTHER_LDFLAGS = ""; OTHER_LIBTOOLFLAGS = ""; PODS_ROOT = "$(SRCROOT)"; PRODUCT_BUNDLE_IDENTIFIER = "org.cocoapods.${PRODUCT_NAME:rfc1034identifier}"; SDKROOT = iphoneos; SKIP_INSTALL = YES; TARGETED_DEVICE_FAMILY = "1,2"; VALIDATE_PRODUCT = YES; }; name = Release; }; 5D8D5DB4A5A30544F49D3CE9621DD096 /* Debug */ = { isa = XCBuildConfiguration; baseConfigurationReference = 36C2C3FCE3284A25F333CA7188A8B20A /* YYCache.xcconfig */; buildSettings = { CODE_SIGN_IDENTITY = "iPhone Developer"; "CODE_SIGN_IDENTITY[sdk=appletvos*]" = ""; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = ""; "CODE_SIGN_IDENTITY[sdk=watchos*]" = ""; GCC_PREFIX_HEADER = "Target Support Files/YYCache/YYCache-prefix.pch"; IPHONEOS_DEPLOYMENT_TARGET = 6.0; OTHER_LDFLAGS = ""; OTHER_LIBTOOLFLAGS = ""; PRIVATE_HEADERS_FOLDER_PATH = ""; PRODUCT_MODULE_NAME = YYCache; PRODUCT_NAME = YYCache; PUBLIC_HEADERS_FOLDER_PATH = ""; SDKROOT = iphoneos; SKIP_INSTALL = YES; SWIFT_ACTIVE_COMPILATION_CONDITIONS = "$(inherited) "; TARGETED_DEVICE_FAMILY = "1,2"; }; name = Debug; }; 6344CE8B4F9CEBF03E7A9D4C1AFC924E /* Debug */ = { isa = XCBuildConfiguration; baseConfigurationReference = 152F77ECEC4C3DAB70FD364E55B8DE68 /* YYImage.xcconfig */; buildSettings = { CODE_SIGN_IDENTITY = "iPhone Developer"; "CODE_SIGN_IDENTITY[sdk=appletvos*]" = ""; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = ""; "CODE_SIGN_IDENTITY[sdk=watchos*]" = ""; GCC_PREFIX_HEADER = "Target Support Files/YYImage/YYImage-prefix.pch"; IPHONEOS_DEPLOYMENT_TARGET = 6.0; OTHER_LDFLAGS = ""; OTHER_LIBTOOLFLAGS = ""; PRIVATE_HEADERS_FOLDER_PATH = ""; PRODUCT_MODULE_NAME = YYImage; PRODUCT_NAME = YYImage; PUBLIC_HEADERS_FOLDER_PATH = ""; SDKROOT = iphoneos; SKIP_INSTALL = YES; SWIFT_ACTIVE_COMPILATION_CONDITIONS = "$(inherited) "; TARGETED_DEVICE_FAMILY = "1,2"; }; name = Debug; }; 708581FC318DAA60593391EB07E2EB5C /* Release */ = { isa = XCBuildConfiguration; baseConfigurationReference = 44DB6F17527AADEF30F125FAA1376E6F /* YYWebImage.xcconfig */; buildSettings = { CODE_SIGN_IDENTITY = "iPhone Developer"; "CODE_SIGN_IDENTITY[sdk=appletvos*]" = ""; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = ""; "CODE_SIGN_IDENTITY[sdk=watchos*]" = ""; GCC_PREFIX_HEADER = "Target Support Files/YYWebImage/YYWebImage-prefix.pch"; IPHONEOS_DEPLOYMENT_TARGET = 6.0; OTHER_LDFLAGS = ""; OTHER_LIBTOOLFLAGS = ""; PRIVATE_HEADERS_FOLDER_PATH = ""; PRODUCT_MODULE_NAME = YYWebImage; PRODUCT_NAME = YYWebImage; PUBLIC_HEADERS_FOLDER_PATH = ""; SDKROOT = iphoneos; SKIP_INSTALL = YES; SWIFT_ACTIVE_COMPILATION_CONDITIONS = "$(inherited) "; TARGETED_DEVICE_FAMILY = "1,2"; VALIDATE_PRODUCT = YES; }; name = Release; }; 8A27C223D98B5ABDB4D96B75F0419730 /* Release */ = { isa = XCBuildConfiguration; baseConfigurationReference = 36C2C3FCE3284A25F333CA7188A8B20A /* YYCache.xcconfig */; buildSettings = { CODE_SIGN_IDENTITY = "iPhone Developer"; "CODE_SIGN_IDENTITY[sdk=appletvos*]" = ""; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = ""; "CODE_SIGN_IDENTITY[sdk=watchos*]" = ""; GCC_PREFIX_HEADER = "Target Support Files/YYCache/YYCache-prefix.pch"; IPHONEOS_DEPLOYMENT_TARGET = 6.0; OTHER_LDFLAGS = ""; OTHER_LIBTOOLFLAGS = ""; PRIVATE_HEADERS_FOLDER_PATH = ""; PRODUCT_MODULE_NAME = YYCache; PRODUCT_NAME = YYCache; PUBLIC_HEADERS_FOLDER_PATH = ""; SDKROOT = iphoneos; SKIP_INSTALL = YES; SWIFT_ACTIVE_COMPILATION_CONDITIONS = "$(inherited) "; TARGETED_DEVICE_FAMILY = "1,2"; VALIDATE_PRODUCT = YES; }; name = Release; }; C8A0990C92AA9DC668F986271BB0FFF1 /* Release */ = { isa = XCBuildConfiguration; baseConfigurationReference = 73C2768FC233B49CABCDE4A05B5F25FA /* MFCategory.xcconfig */; buildSettings = { CODE_SIGN_IDENTITY = "iPhone Developer"; "CODE_SIGN_IDENTITY[sdk=appletvos*]" = ""; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = ""; "CODE_SIGN_IDENTITY[sdk=watchos*]" = ""; GCC_PREFIX_HEADER = "Target Support Files/MFCategory/MFCategory-prefix.pch"; IPHONEOS_DEPLOYMENT_TARGET = 9.0; OTHER_LDFLAGS = ""; OTHER_LIBTOOLFLAGS = ""; PRIVATE_HEADERS_FOLDER_PATH = ""; PRODUCT_MODULE_NAME = MFCategory; PRODUCT_NAME = MFCategory; PUBLIC_HEADERS_FOLDER_PATH = ""; SDKROOT = iphoneos; SKIP_INSTALL = YES; SWIFT_ACTIVE_COMPILATION_CONDITIONS = "$(inherited) "; TARGETED_DEVICE_FAMILY = "1,2"; VALIDATE_PRODUCT = YES; }; name = Release; }; D0E7A097A1895032B78C742CAE509424 /* Debug */ = { isa = XCBuildConfiguration; baseConfigurationReference = ED0729A69815F74EF312C59E0C1D71BD /* Pods-MFPictureBrowserDemo.debug.xcconfig */; buildSettings = { ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = NO; CODE_SIGN_IDENTITY = "iPhone Developer"; "CODE_SIGN_IDENTITY[sdk=appletvos*]" = ""; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = ""; "CODE_SIGN_IDENTITY[sdk=watchos*]" = ""; IPHONEOS_DEPLOYMENT_TARGET = 9.0; MACH_O_TYPE = staticlib; OTHER_LDFLAGS = ""; OTHER_LIBTOOLFLAGS = ""; PODS_ROOT = "$(SRCROOT)"; PRODUCT_BUNDLE_IDENTIFIER = "org.cocoapods.${PRODUCT_NAME:rfc1034identifier}"; SDKROOT = iphoneos; SKIP_INSTALL = YES; TARGETED_DEVICE_FAMILY = "1,2"; }; name = Debug; }; FDB2FC4A1E5891381CD9D922145497F1 /* Release */ = { isa = XCBuildConfiguration; buildSettings = { ALWAYS_SEARCH_USER_PATHS = NO; CLANG_ANALYZER_NONNULL = YES; CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; CLANG_CXX_LIBRARY = "libc++"; CLANG_ENABLE_MODULES = YES; CLANG_ENABLE_OBJC_ARC = YES; CLANG_ENABLE_OBJC_WEAK = YES; CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; CLANG_WARN_BOOL_CONVERSION = YES; CLANG_WARN_COMMA = YES; CLANG_WARN_CONSTANT_CONVERSION = YES; CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; CLANG_WARN_DOCUMENTATION_COMMENTS = YES; CLANG_WARN_EMPTY_BODY = YES; CLANG_WARN_ENUM_CONVERSION = YES; CLANG_WARN_INFINITE_RECURSION = YES; CLANG_WARN_INT_CONVERSION = YES; CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; CLANG_WARN_STRICT_PROTOTYPES = YES; CLANG_WARN_SUSPICIOUS_MOVE = YES; CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; CLANG_WARN_UNREACHABLE_CODE = YES; CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; CODE_SIGNING_ALLOWED = NO; CODE_SIGNING_REQUIRED = NO; COPY_PHASE_STRIP = NO; DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; ENABLE_NS_ASSERTIONS = NO; ENABLE_STRICT_OBJC_MSGSEND = YES; GCC_C_LANGUAGE_STANDARD = gnu11; GCC_NO_COMMON_BLOCKS = YES; GCC_PREPROCESSOR_DEFINITIONS = ( "POD_CONFIGURATION_RELEASE=1", "$(inherited)", ); GCC_WARN_64_TO_32_BIT_CONVERSION = YES; GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; GCC_WARN_UNDECLARED_SELECTOR = YES; GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; IPHONEOS_DEPLOYMENT_TARGET = 9.0; MTL_ENABLE_DEBUG_INFO = NO; PRODUCT_NAME = "$(TARGET_NAME)"; STRIP_INSTALLED_PRODUCT = NO; SYMROOT = "${SRCROOT}/../build"; }; name = Release; }; /* End XCBuildConfiguration section */ /* Begin XCConfigurationList section */ 0D77C3A4A548188BFA5C603028585DB6 /* Build configuration list for PBXNativeTarget "YYWebImage" */ = { isa = XCConfigurationList; buildConfigurations = ( 0D93E1695669076A303F8E474AA57799 /* Debug */, 708581FC318DAA60593391EB07E2EB5C /* Release */, ); defaultConfigurationIsVisible = 0; defaultConfigurationName = Release; }; 2D8E8EC45A3A1A1D94AE762CB5028504 /* Build configuration list for PBXProject "Pods" */ = { isa = XCConfigurationList; buildConfigurations = ( 199D972A13F2B4C56847F7A89CCA83BC /* Debug */, FDB2FC4A1E5891381CD9D922145497F1 /* Release */, ); defaultConfigurationIsVisible = 0; defaultConfigurationName = Release; }; 30A099ADC75B00E52ABAFF4B1864DCA4 /* Build configuration list for PBXNativeTarget "YYCache" */ = { isa = XCConfigurationList; buildConfigurations = ( 5D8D5DB4A5A30544F49D3CE9621DD096 /* Debug */, 8A27C223D98B5ABDB4D96B75F0419730 /* Release */, ); defaultConfigurationIsVisible = 0; defaultConfigurationName = Release; }; 3CA4CB0D8DDC3C27A68BFC3F9BAFE1FE /* Build configuration list for PBXNativeTarget "YYImage" */ = { isa = XCConfigurationList; buildConfigurations = ( 6344CE8B4F9CEBF03E7A9D4C1AFC924E /* Debug */, 1818A387811A151C6155D26F30BA694A /* Release */, ); defaultConfigurationIsVisible = 0; defaultConfigurationName = Release; }; 50BE0BCB4F362876C60F86B8BC09FB50 /* Build configuration list for PBXNativeTarget "MFCategory" */ = { isa = XCConfigurationList; buildConfigurations = ( 1114E3974F44495546F5565C63739E29 /* Debug */, C8A0990C92AA9DC668F986271BB0FFF1 /* Release */, ); defaultConfigurationIsVisible = 0; defaultConfigurationName = Release; }; F04A134A17FEA6B05600F88D47C8DBE1 /* Build configuration list for PBXNativeTarget "Pods-MFPictureBrowserDemo" */ = { isa = XCConfigurationList; buildConfigurations = ( D0E7A097A1895032B78C742CAE509424 /* Debug */, 53708C6F1153E182E12337D8E0EFEC02 /* Release */, ); defaultConfigurationIsVisible = 0; defaultConfigurationName = Release; }; /* End XCConfigurationList section */ }; rootObject = D41D8CD98F00B204E9800998ECF8427E /* Project object */; } ================================================ FILE: MFPictureBrowserDemo/Pods/Target Support Files/MFCategory/MFCategory-dummy.m ================================================ #import @interface PodsDummy_MFCategory : NSObject @end @implementation PodsDummy_MFCategory @end ================================================ FILE: MFPictureBrowserDemo/Pods/Target Support Files/MFCategory/MFCategory-prefix.pch ================================================ #ifdef __OBJC__ #import #else #ifndef FOUNDATION_EXPORT #if defined(__cplusplus) #define FOUNDATION_EXPORT extern "C" #else #define FOUNDATION_EXPORT extern #endif #endif #endif ================================================ FILE: MFPictureBrowserDemo/Pods/Target Support Files/MFCategory/MFCategory.xcconfig ================================================ CONFIGURATION_BUILD_DIR = ${PODS_CONFIGURATION_BUILD_DIR}/MFCategory GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1 HEADER_SEARCH_PATHS = $(inherited) "${PODS_ROOT}/Headers/Private" "${PODS_ROOT}/Headers/Private/MFCategory" "${PODS_ROOT}/Headers/Public" "${PODS_ROOT}/Headers/Public/MFCategory" OTHER_LDFLAGS = -framework "Foundation" -framework "UIKit" PODS_BUILD_DIR = ${BUILD_DIR} PODS_CONFIGURATION_BUILD_DIR = ${PODS_BUILD_DIR}/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME) PODS_ROOT = ${SRCROOT} PODS_TARGET_SRCROOT = ${PODS_ROOT}/MFCategory PRODUCT_BUNDLE_IDENTIFIER = org.cocoapods.${PRODUCT_NAME:rfc1034identifier} SKIP_INSTALL = YES ================================================ FILE: MFPictureBrowserDemo/Pods/Target Support Files/Pods-MFPictureBrowserDemo/Pods-MFPictureBrowserDemo-acknowledgements.markdown ================================================ # Acknowledgements This application makes use of the following third party libraries: ## MFCategory MIT License Copyright (c) 2018 GodzzZZZ Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ## YYCache The MIT License (MIT) Copyright (c) 2015 ibireme Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ## YYImage The MIT License (MIT) Copyright (c) 2015 ibireme Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ## YYWebImage The MIT License (MIT) Copyright (c) 2015 ibireme Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. Generated by CocoaPods - https://cocoapods.org ================================================ FILE: MFPictureBrowserDemo/Pods/Target Support Files/Pods-MFPictureBrowserDemo/Pods-MFPictureBrowserDemo-acknowledgements.plist ================================================ PreferenceSpecifiers FooterText This application makes use of the following third party libraries: Title Acknowledgements Type PSGroupSpecifier FooterText MIT License Copyright (c) 2018 GodzzZZZ Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. License MIT Title MFCategory Type PSGroupSpecifier FooterText The MIT License (MIT) Copyright (c) 2015 ibireme <ibireme@gmail.com> Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. License MIT Title YYCache Type PSGroupSpecifier FooterText The MIT License (MIT) Copyright (c) 2015 ibireme <ibireme@gmail.com> Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. License MIT Title YYImage Type PSGroupSpecifier FooterText The MIT License (MIT) Copyright (c) 2015 ibireme <ibireme@gmail.com> Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. License MIT Title YYWebImage Type PSGroupSpecifier FooterText Generated by CocoaPods - https://cocoapods.org Title Type PSGroupSpecifier StringsTable Acknowledgements Title Acknowledgements ================================================ FILE: MFPictureBrowserDemo/Pods/Target Support Files/Pods-MFPictureBrowserDemo/Pods-MFPictureBrowserDemo-dummy.m ================================================ #import @interface PodsDummy_Pods_MFPictureBrowserDemo : NSObject @end @implementation PodsDummy_Pods_MFPictureBrowserDemo @end ================================================ FILE: MFPictureBrowserDemo/Pods/Target Support Files/Pods-MFPictureBrowserDemo/Pods-MFPictureBrowserDemo-frameworks.sh ================================================ #!/bin/sh set -e set -u set -o pipefail if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then # If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy # frameworks to, so exit 0 (signalling the script phase was successful). exit 0 fi echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}" SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}" # Used as a return value for each invocation of `strip_invalid_archs` function. STRIP_BINARY_RETVAL=0 # This protects against multiple targets copying the same framework dependency at the same time. The solution # was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????") # Copies and strips a vendored framework install_framework() { if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then local source="${BUILT_PRODUCTS_DIR}/$1" elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")" elif [ -r "$1" ]; then local source="$1" fi local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" if [ -L "${source}" ]; then echo "Symlinked..." source="$(readlink "${source}")" fi # Use filter instead of exclude so missing patterns don't throw errors. echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\"" rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}" local basename basename="$(basename -s .framework "$1")" binary="${destination}/${basename}.framework/${basename}" if ! [ -r "$binary" ]; then binary="${destination}/${basename}" fi # Strip invalid architectures so "fat" simulator / device frameworks work on device if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then strip_invalid_archs "$binary" fi # Resign the code if required by the build settings to avoid unstable apps code_sign_if_enabled "${destination}/$(basename "$1")" # Embed linked Swift runtime libraries. No longer necessary as of Xcode 7. if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then local swift_runtime_libs swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]}) for lib in $swift_runtime_libs; do echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\"" rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}" code_sign_if_enabled "${destination}/${lib}" done fi } # Copies and strips a vendored dSYM install_dsym() { local source="$1" if [ -r "$source" ]; then # Copy the dSYM into a the targets temp dir. echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\"" rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}" local basename basename="$(basename -s .framework.dSYM "$source")" binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}" # Strip invalid architectures so "fat" simulator / device frameworks work on device if [[ "$(file "$binary")" == *"Mach-O dSYM companion"* ]]; then strip_invalid_archs "$binary" fi if [[ $STRIP_BINARY_RETVAL == 1 ]]; then # Move the stripped file into its final destination. echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\"" rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}" else # The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing. touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM" fi fi } # Signs a framework with the provided identity code_sign_if_enabled() { if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then # Use the current code_sign_identitiy echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}" local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'" if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then code_sign_cmd="$code_sign_cmd &" fi echo "$code_sign_cmd" eval "$code_sign_cmd" fi } # Strip invalid architectures strip_invalid_archs() { binary="$1" # Get architectures for current target binary binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)" # Intersect them with the architectures we are building for intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)" # If there are no archs supported by this binary then warn the user if [[ -z "$intersected_archs" ]]; then echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)." STRIP_BINARY_RETVAL=0 return fi stripped="" for arch in $binary_archs; do if ! [[ "${ARCHS}" == *"$arch"* ]]; then # Strip non-valid architectures in-place lipo -remove "$arch" -output "$binary" "$binary" || exit 1 stripped="$stripped $arch" fi done if [[ "$stripped" ]]; then echo "Stripped $binary of architectures:$stripped" fi STRIP_BINARY_RETVAL=1 } if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then wait fi ================================================ FILE: MFPictureBrowserDemo/Pods/Target Support Files/Pods-MFPictureBrowserDemo/Pods-MFPictureBrowserDemo-resources.sh ================================================ #!/bin/sh set -e set -u set -o pipefail if [ -z ${UNLOCALIZED_RESOURCES_FOLDER_PATH+x} ]; then # If UNLOCALIZED_RESOURCES_FOLDER_PATH is not set, then there's nowhere for us to copy # resources to, so exit 0 (signalling the script phase was successful). exit 0 fi mkdir -p "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}" RESOURCES_TO_COPY=${PODS_ROOT}/resources-to-copy-${TARGETNAME}.txt > "$RESOURCES_TO_COPY" XCASSET_FILES=() # This protects against multiple targets copying the same framework dependency at the same time. The solution # was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????") case "${TARGETED_DEVICE_FAMILY:-}" in 1,2) TARGET_DEVICE_ARGS="--target-device ipad --target-device iphone" ;; 1) TARGET_DEVICE_ARGS="--target-device iphone" ;; 2) TARGET_DEVICE_ARGS="--target-device ipad" ;; 3) TARGET_DEVICE_ARGS="--target-device tv" ;; 4) TARGET_DEVICE_ARGS="--target-device watch" ;; *) TARGET_DEVICE_ARGS="--target-device mac" ;; esac install_resource() { if [[ "$1" = /* ]] ; then RESOURCE_PATH="$1" else RESOURCE_PATH="${PODS_ROOT}/$1" fi if [[ ! -e "$RESOURCE_PATH" ]] ; then cat << EOM error: Resource "$RESOURCE_PATH" not found. Run 'pod install' to update the copy resources script. EOM exit 1 fi case $RESOURCE_PATH in *.storyboard) echo "ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile ${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .storyboard`.storyboardc $RESOURCE_PATH --sdk ${SDKROOT} ${TARGET_DEVICE_ARGS}" || true ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .storyboard`.storyboardc" "$RESOURCE_PATH" --sdk "${SDKROOT}" ${TARGET_DEVICE_ARGS} ;; *.xib) echo "ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile ${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .xib`.nib $RESOURCE_PATH --sdk ${SDKROOT} ${TARGET_DEVICE_ARGS}" || true ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .xib`.nib" "$RESOURCE_PATH" --sdk "${SDKROOT}" ${TARGET_DEVICE_ARGS} ;; *.framework) echo "mkdir -p ${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" || true mkdir -p "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" $RESOURCE_PATH ${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" || true rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" ;; *.xcdatamodel) echo "xcrun momc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH"`.mom\"" || true xcrun momc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodel`.mom" ;; *.xcdatamodeld) echo "xcrun momc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodeld`.momd\"" || true xcrun momc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodeld`.momd" ;; *.xcmappingmodel) echo "xcrun mapc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcmappingmodel`.cdm\"" || true xcrun mapc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcmappingmodel`.cdm" ;; *.xcassets) ABSOLUTE_XCASSET_FILE="$RESOURCE_PATH" XCASSET_FILES+=("$ABSOLUTE_XCASSET_FILE") ;; *) echo "$RESOURCE_PATH" || true echo "$RESOURCE_PATH" >> "$RESOURCES_TO_COPY" ;; esac } mkdir -p "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}" rsync -avr --copy-links --no-relative --exclude '*/.svn/*' --files-from="$RESOURCES_TO_COPY" / "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}" if [[ "${ACTION}" == "install" ]] && [[ "${SKIP_INSTALL}" == "NO" ]]; then mkdir -p "${INSTALL_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}" rsync -avr --copy-links --no-relative --exclude '*/.svn/*' --files-from="$RESOURCES_TO_COPY" / "${INSTALL_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}" fi rm -f "$RESOURCES_TO_COPY" if [[ -n "${WRAPPER_EXTENSION}" ]] && [ "`xcrun --find actool`" ] && [ -n "${XCASSET_FILES:-}" ] then # Find all other xcassets (this unfortunately includes those of path pods and other targets). OTHER_XCASSETS=$(find "$PWD" -iname "*.xcassets" -type d) while read line; do if [[ $line != "${PODS_ROOT}*" ]]; then XCASSET_FILES+=("$line") fi done <<<"$OTHER_XCASSETS" if [ -z ${ASSETCATALOG_COMPILER_APPICON_NAME+x} ]; then printf "%s\0" "${XCASSET_FILES[@]}" | xargs -0 xcrun actool --output-format human-readable-text --notices --warnings --platform "${PLATFORM_NAME}" --minimum-deployment-target "${!DEPLOYMENT_TARGET_SETTING_NAME}" ${TARGET_DEVICE_ARGS} --compress-pngs --compile "${BUILT_PRODUCTS_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}" else printf "%s\0" "${XCASSET_FILES[@]}" | xargs -0 xcrun actool --output-format human-readable-text --notices --warnings --platform "${PLATFORM_NAME}" --minimum-deployment-target "${!DEPLOYMENT_TARGET_SETTING_NAME}" ${TARGET_DEVICE_ARGS} --compress-pngs --compile "${BUILT_PRODUCTS_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}" --app-icon "${ASSETCATALOG_COMPILER_APPICON_NAME}" --output-partial-info-plist "${TARGET_BUILD_DIR}/assetcatalog_generated_info.plist" fi fi ================================================ FILE: MFPictureBrowserDemo/Pods/Target Support Files/Pods-MFPictureBrowserDemo/Pods-MFPictureBrowserDemo.debug.xcconfig ================================================ FRAMEWORK_SEARCH_PATHS = $(inherited) "${PODS_ROOT}/YYImage/Vendor" GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1 HEADER_SEARCH_PATHS = $(inherited) "${PODS_ROOT}/Headers/Public" "${PODS_ROOT}/Headers/Public/MFCategory" "${PODS_ROOT}/Headers/Public/YYCache" "${PODS_ROOT}/Headers/Public/YYImage" "${PODS_ROOT}/Headers/Public/YYWebImage" LIBRARY_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/MFCategory" "${PODS_CONFIGURATION_BUILD_DIR}/YYCache" "${PODS_CONFIGURATION_BUILD_DIR}/YYImage" "${PODS_CONFIGURATION_BUILD_DIR}/YYWebImage" OTHER_CFLAGS = $(inherited) -isystem "${PODS_ROOT}/Headers/Public" -isystem "${PODS_ROOT}/Headers/Public/MFCategory" -isystem "${PODS_ROOT}/Headers/Public/YYCache" -isystem "${PODS_ROOT}/Headers/Public/YYImage" -isystem "${PODS_ROOT}/Headers/Public/YYWebImage" OTHER_LDFLAGS = $(inherited) -ObjC -l"MFCategory" -l"YYCache" -l"YYImage" -l"YYWebImage" -l"sqlite3" -l"z" -framework "Accelerate" -framework "AssetsLibrary" -framework "CoreFoundation" -framework "Foundation" -framework "ImageIO" -framework "MobileCoreServices" -framework "QuartzCore" -framework "UIKit" -framework "WebP" PODS_BUILD_DIR = ${BUILD_DIR} PODS_CONFIGURATION_BUILD_DIR = ${PODS_BUILD_DIR}/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME) PODS_PODFILE_DIR_PATH = ${SRCROOT}/. PODS_ROOT = ${SRCROOT}/Pods ================================================ FILE: MFPictureBrowserDemo/Pods/Target Support Files/Pods-MFPictureBrowserDemo/Pods-MFPictureBrowserDemo.release.xcconfig ================================================ FRAMEWORK_SEARCH_PATHS = $(inherited) "${PODS_ROOT}/YYImage/Vendor" GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1 HEADER_SEARCH_PATHS = $(inherited) "${PODS_ROOT}/Headers/Public" "${PODS_ROOT}/Headers/Public/MFCategory" "${PODS_ROOT}/Headers/Public/YYCache" "${PODS_ROOT}/Headers/Public/YYImage" "${PODS_ROOT}/Headers/Public/YYWebImage" LIBRARY_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/MFCategory" "${PODS_CONFIGURATION_BUILD_DIR}/YYCache" "${PODS_CONFIGURATION_BUILD_DIR}/YYImage" "${PODS_CONFIGURATION_BUILD_DIR}/YYWebImage" OTHER_CFLAGS = $(inherited) -isystem "${PODS_ROOT}/Headers/Public" -isystem "${PODS_ROOT}/Headers/Public/MFCategory" -isystem "${PODS_ROOT}/Headers/Public/YYCache" -isystem "${PODS_ROOT}/Headers/Public/YYImage" -isystem "${PODS_ROOT}/Headers/Public/YYWebImage" OTHER_LDFLAGS = $(inherited) -ObjC -l"MFCategory" -l"YYCache" -l"YYImage" -l"YYWebImage" -l"sqlite3" -l"z" -framework "Accelerate" -framework "AssetsLibrary" -framework "CoreFoundation" -framework "Foundation" -framework "ImageIO" -framework "MobileCoreServices" -framework "QuartzCore" -framework "UIKit" -framework "WebP" PODS_BUILD_DIR = ${BUILD_DIR} PODS_CONFIGURATION_BUILD_DIR = ${PODS_BUILD_DIR}/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME) PODS_PODFILE_DIR_PATH = ${SRCROOT}/. PODS_ROOT = ${SRCROOT}/Pods ================================================ FILE: MFPictureBrowserDemo/Pods/Target Support Files/YYCache/YYCache-dummy.m ================================================ #import @interface PodsDummy_YYCache : NSObject @end @implementation PodsDummy_YYCache @end ================================================ FILE: MFPictureBrowserDemo/Pods/Target Support Files/YYCache/YYCache-prefix.pch ================================================ #ifdef __OBJC__ #import #else #ifndef FOUNDATION_EXPORT #if defined(__cplusplus) #define FOUNDATION_EXPORT extern "C" #else #define FOUNDATION_EXPORT extern #endif #endif #endif ================================================ FILE: MFPictureBrowserDemo/Pods/Target Support Files/YYCache/YYCache.xcconfig ================================================ CONFIGURATION_BUILD_DIR = ${PODS_CONFIGURATION_BUILD_DIR}/YYCache GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1 HEADER_SEARCH_PATHS = $(inherited) "${PODS_ROOT}/Headers/Private" "${PODS_ROOT}/Headers/Private/YYCache" "${PODS_ROOT}/Headers/Public" "${PODS_ROOT}/Headers/Public/YYCache" OTHER_LDFLAGS = -l"sqlite3" -framework "CoreFoundation" -framework "QuartzCore" -framework "UIKit" PODS_BUILD_DIR = ${BUILD_DIR} PODS_CONFIGURATION_BUILD_DIR = ${PODS_BUILD_DIR}/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME) PODS_ROOT = ${SRCROOT} PODS_TARGET_SRCROOT = ${PODS_ROOT}/YYCache PRODUCT_BUNDLE_IDENTIFIER = org.cocoapods.${PRODUCT_NAME:rfc1034identifier} SKIP_INSTALL = YES ================================================ FILE: MFPictureBrowserDemo/Pods/Target Support Files/YYImage/YYImage-dummy.m ================================================ #import @interface PodsDummy_YYImage : NSObject @end @implementation PodsDummy_YYImage @end ================================================ FILE: MFPictureBrowserDemo/Pods/Target Support Files/YYImage/YYImage-prefix.pch ================================================ #ifdef __OBJC__ #import #else #ifndef FOUNDATION_EXPORT #if defined(__cplusplus) #define FOUNDATION_EXPORT extern "C" #else #define FOUNDATION_EXPORT extern #endif #endif #endif ================================================ FILE: MFPictureBrowserDemo/Pods/Target Support Files/YYImage/YYImage.xcconfig ================================================ CONFIGURATION_BUILD_DIR = ${PODS_CONFIGURATION_BUILD_DIR}/YYImage FRAMEWORK_SEARCH_PATHS = $(inherited) "${PODS_ROOT}/YYImage/Vendor" GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1 HEADER_SEARCH_PATHS = $(inherited) "${PODS_ROOT}/Headers/Private" "${PODS_ROOT}/Headers/Private/YYImage" "${PODS_ROOT}/Headers/Public" "${PODS_ROOT}/Headers/Public/YYImage" OTHER_LDFLAGS = -l"z" -framework "Accelerate" -framework "AssetsLibrary" -framework "CoreFoundation" -framework "ImageIO" -framework "MobileCoreServices" -framework "QuartzCore" -framework "UIKit" -framework "WebP" PODS_BUILD_DIR = ${BUILD_DIR} PODS_CONFIGURATION_BUILD_DIR = ${PODS_BUILD_DIR}/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME) PODS_ROOT = ${SRCROOT} PODS_TARGET_SRCROOT = ${PODS_ROOT}/YYImage PRODUCT_BUNDLE_IDENTIFIER = org.cocoapods.${PRODUCT_NAME:rfc1034identifier} SKIP_INSTALL = YES ================================================ FILE: MFPictureBrowserDemo/Pods/Target Support Files/YYWebImage/YYWebImage-dummy.m ================================================ #import @interface PodsDummy_YYWebImage : NSObject @end @implementation PodsDummy_YYWebImage @end ================================================ FILE: MFPictureBrowserDemo/Pods/Target Support Files/YYWebImage/YYWebImage-prefix.pch ================================================ #ifdef __OBJC__ #import #else #ifndef FOUNDATION_EXPORT #if defined(__cplusplus) #define FOUNDATION_EXPORT extern "C" #else #define FOUNDATION_EXPORT extern #endif #endif #endif ================================================ FILE: MFPictureBrowserDemo/Pods/Target Support Files/YYWebImage/YYWebImage.xcconfig ================================================ CONFIGURATION_BUILD_DIR = ${PODS_CONFIGURATION_BUILD_DIR}/YYWebImage GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1 HEADER_SEARCH_PATHS = $(inherited) "${PODS_ROOT}/Headers/Private" "${PODS_ROOT}/Headers/Private/YYWebImage" "${PODS_ROOT}/Headers/Public" "${PODS_ROOT}/Headers/Public/YYCache" "${PODS_ROOT}/Headers/Public/YYImage" "${PODS_ROOT}/Headers/Public/YYWebImage" LIBRARY_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/YYCache" "${PODS_CONFIGURATION_BUILD_DIR}/YYImage" OTHER_LDFLAGS = -framework "Accelerate" -framework "AssetsLibrary" -framework "CoreFoundation" -framework "ImageIO" -framework "MobileCoreServices" -framework "QuartzCore" -framework "UIKit" PODS_BUILD_DIR = ${BUILD_DIR} PODS_CONFIGURATION_BUILD_DIR = ${PODS_BUILD_DIR}/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME) PODS_ROOT = ${SRCROOT} PODS_TARGET_SRCROOT = ${PODS_ROOT}/YYWebImage PRODUCT_BUNDLE_IDENTIFIER = org.cocoapods.${PRODUCT_NAME:rfc1034identifier} SKIP_INSTALL = YES ================================================ FILE: MFPictureBrowserDemo/Pods/YYCache/LICENSE ================================================ The MIT License (MIT) Copyright (c) 2015 ibireme Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ================================================ FILE: MFPictureBrowserDemo/Pods/YYCache/README.md ================================================ YYCache ============== [![License MIT](https://img.shields.io/badge/license-MIT-green.svg?style=flat)](https://raw.githubusercontent.com/ibireme/YYCache/master/LICENSE)  [![Carthage compatible](https://img.shields.io/badge/Carthage-compatible-4BC51D.svg?style=flat)](https://github.com/Carthage/Carthage)  [![CocoaPods](http://img.shields.io/cocoapods/v/YYCache.svg?style=flat)](http://cocoapods.org/?q= YYCache)  [![CocoaPods](http://img.shields.io/cocoapods/p/YYCache.svg?style=flat)](http://cocoapods.org/?q= YYCache)  [![Support](https://img.shields.io/badge/support-iOS%206%2B%20-blue.svg?style=flat)](https://www.apple.com/nl/ios/)  [![Build Status](https://travis-ci.org/ibireme/YYCache.svg?branch=master)](https://travis-ci.org/ibireme/YYCache) High performance cache framework for iOS.
(It's a component of [YYKit](https://github.com/ibireme/YYKit)) Performance ============== ![Memory cache benchmark result](https://raw.github.com/ibireme/YYCache/master/Benchmark/Result_memory.png ) ![Disk benchmark result](https://raw.github.com/ibireme/YYCache/master/Benchmark/Result_disk.png ) You may [download](http://www.sqlite.org/download.html) and compile the latest version of sqlite and ignore the libsqlite3.dylib in iOS system to get higher performance. See `Benchmark/CacheBenchmark.xcodeproj` for more benchmark case. Features ============== - **LRU**: Objects can be evicted with least-recently-used algorithm. - **Limitation**: Cache limitation can be controlled with count, cost, age and free space. - **Compatibility**: The API is similar to `NSCache`, all methods are thread-safe. - **Memory Cache** - **Release Control**: Objects can be released synchronously/asynchronously on main thread or background thread. - **Automatically Clear**: It can be configured to automatically evict objects when receive memory warning or app enter background. - **Disk Cache** - **Customization**: It supports custom archive and unarchive method to store object which does not adopt NSCoding. - **Storage Type Control**: It can automatically decide the storage type (sqlite / file) for each object to get better performance. Installation ============== ### CocoaPods 1. Add `pod 'YYCache'` to your Podfile. 2. Run `pod install` or `pod update`. 3. Import \. ### Carthage 1. Add `github "ibireme/YYCache"` to your Cartfile. 2. Run `carthage update --platform ios` and add the framework to your project. 3. Import \. ### Manually 1. Download all the files in the YYCache subdirectory. 2. Add the source files to your Xcode project. 3. Link with required frameworks: * UIKit * CoreFoundation * QuartzCore * sqlite3 4. Import `YYCache.h`. Documentation ============== Full API documentation is available on [CocoaDocs](http://cocoadocs.org/docsets/YYCache/).
You can also install documentation locally using [appledoc](https://github.com/tomaz/appledoc). Requirements ============== This library requires `iOS 6.0+` and `Xcode 7.0+`. License ============== YYCache is provided under the MIT license. See LICENSE file for details.

--- 中文介绍 ============== 高性能 iOS 缓存框架。
(该项目是 [YYKit](https://github.com/ibireme/YYKit) 组件之一) 性能 ============== iPhone 6 上,内存缓存每秒响应次数 (越高越好): ![Memory cache benchmark result](https://raw.github.com/ibireme/YYCache/master/Benchmark/Result_memory.png ) iPhone 6 上,磁盘缓存每秒响应次数 (越高越好): ![Disk benchmark result](https://raw.github.com/ibireme/YYCache/master/Benchmark/Result_disk.png ) 推荐到 SQLite 官网[下载](http://www.sqlite.org/download.html)和编译最新的 SQLite,以替换 iOS 自带的 libsqlite3.dylib,以获得最高 1.5~3 倍的性能提升。 更多测试代码和用例见 `Benchmark/CacheBenchmark.xcodeproj`。 特性 ============== - **LRU**: 缓存支持 LRU (least-recently-used) 淘汰算法。 - **缓存控制**: 支持多种缓存控制方法:总数量、总大小、存活时间、空闲空间。 - **兼容性**: API 基本和 `NSCache` 保持一致, 所有方法都是线程安全的。 - **内存缓存** - **对象释放控制**: 对象的释放(release) 可以配置为同步或异步进行,可以配置在主线程或后台线程进行。 - **自动清空**: 当收到内存警告或 App 进入后台时,缓存可以配置为自动清空。 - **磁盘缓存** - **可定制性**: 磁盘缓存支持自定义的归档解档方法,以支持那些没有实现 NSCoding 协议的对象。 - **存储类型控制**: 磁盘缓存支持对每个对象的存储类型 (SQLite/文件) 进行自动或手动控制,以获得更高的存取性能。 安装 ============== ### CocoaPods 1. 在 Podfile 中添加 `pod 'YYCache'`。 2. 执行 `pod install` 或 `pod update`。 3. 导入 \。 ### Carthage 1. 在 Cartfile 中添加 `github "ibireme/YYCache"`。 2. 执行 `carthage update --platform ios` 并将生成的 framework 添加到你的工程。 3. 导入 \。 ### 手动安装 1. 下载 YYCache 文件夹内的所有内容。 2. 将 YYCache 内的源文件添加(拖放)到你的工程。 3. 链接以下的 frameworks: * UIKit * CoreFoundation * QuartzCore * sqlite3 4. 导入 `YYCache.h`。 文档 ============== 你可以在 [CocoaDocs](http://cocoadocs.org/docsets/YYCache/) 查看在线 API 文档,也可以用 [appledoc](https://github.com/tomaz/appledoc) 本地生成文档。 系统要求 ============== 该项目最低支持 `iOS 6.0` 和 `Xcode 7.0`。 许可证 ============== YYCache 使用 MIT 许可证,详情见 LICENSE 文件。 相关链接 ============== [YYCache 设计思路与技术细节](http://blog.ibireme.com/2015/10/26/yycache/) ================================================ FILE: MFPictureBrowserDemo/Pods/YYCache/YYCache/YYCache.h ================================================ // // YYCache.h // YYCache // // Created by ibireme on 15/2/13. // Copyright (c) 2015 ibireme. // // This source code is licensed under the MIT-style license found in the // LICENSE file in the root directory of this source tree. // #import #if __has_include() FOUNDATION_EXPORT double YYCacheVersionNumber; FOUNDATION_EXPORT const unsigned char YYCacheVersionString[]; #import #import #import #elif __has_include() #import #import #import #else #import "YYMemoryCache.h" #import "YYDiskCache.h" #import "YYKVStorage.h" #endif NS_ASSUME_NONNULL_BEGIN /** `YYCache` is a thread safe key-value cache. It use `YYMemoryCache` to store objects in a small and fast memory cache, and use `YYDiskCache` to persisting objects to a large and slow disk cache. See `YYMemoryCache` and `YYDiskCache` for more information. */ @interface YYCache : NSObject /** The name of the cache, readonly. */ @property (copy, readonly) NSString *name; /** The underlying memory cache. see `YYMemoryCache` for more information.*/ @property (strong, readonly) YYMemoryCache *memoryCache; /** The underlying disk cache. see `YYDiskCache` for more information.*/ @property (strong, readonly) YYDiskCache *diskCache; /** Create a new instance with the specified name. Multiple instances with the same name will make the cache unstable. @param name The name of the cache. It will create a dictionary with the name in the app's caches dictionary for disk cache. Once initialized you should not read and write to this directory. @result A new cache object, or nil if an error occurs. */ - (nullable instancetype)initWithName:(NSString *)name; /** Create a new instance with the specified path. Multiple instances with the same name will make the cache unstable. @param path Full path of a directory in which the cache will write data. Once initialized you should not read and write to this directory. @result A new cache object, or nil if an error occurs. */ - (nullable instancetype)initWithPath:(NSString *)path NS_DESIGNATED_INITIALIZER; /** Convenience Initializers Create a new instance with the specified name. Multiple instances with the same name will make the cache unstable. @param name The name of the cache. It will create a dictionary with the name in the app's caches dictionary for disk cache. Once initialized you should not read and write to this directory. @result A new cache object, or nil if an error occurs. */ + (nullable instancetype)cacheWithName:(NSString *)name; /** Convenience Initializers Create a new instance with the specified path. Multiple instances with the same name will make the cache unstable. @param path Full path of a directory in which the cache will write data. Once initialized you should not read and write to this directory. @result A new cache object, or nil if an error occurs. */ + (nullable instancetype)cacheWithPath:(NSString *)path; - (instancetype)init UNAVAILABLE_ATTRIBUTE; + (instancetype)new UNAVAILABLE_ATTRIBUTE; #pragma mark - Access Methods ///============================================================================= /// @name Access Methods ///============================================================================= /** Returns a boolean value that indicates whether a given key is in cache. This method may blocks the calling thread until file read finished. @param key A string identifying the value. If nil, just return NO. @return Whether the key is in cache. */ - (BOOL)containsObjectForKey:(NSString *)key; /** Returns a boolean value with the block that indicates whether a given key is in cache. This method returns immediately and invoke the passed block in background queue when the operation finished. @param key A string identifying the value. If nil, just return NO. @param block A block which will be invoked in background queue when finished. */ - (void)containsObjectForKey:(NSString *)key withBlock:(nullable void(^)(NSString *key, BOOL contains))block; /** Returns the value associated with a given key. This method may blocks the calling thread until file read finished. @param key A string identifying the value. If nil, just return nil. @return The value associated with key, or nil if no value is associated with key. */ - (nullable id)objectForKey:(NSString *)key; /** Returns the value associated with a given key. This method returns immediately and invoke the passed block in background queue when the operation finished. @param key A string identifying the value. If nil, just return nil. @param block A block which will be invoked in background queue when finished. */ - (void)objectForKey:(NSString *)key withBlock:(nullable void(^)(NSString *key, id object))block; /** Sets the value of the specified key in the cache. This method may blocks the calling thread until file write finished. @param object The object to be stored in the cache. If nil, it calls `removeObjectForKey:`. @param key The key with which to associate the value. If nil, this method has no effect. */ - (void)setObject:(nullable id)object forKey:(NSString *)key; /** Sets the value of the specified key in the cache. This method returns immediately and invoke the passed block in background queue when the operation finished. @param object The object to be stored in the cache. If nil, it calls `removeObjectForKey:`. @param block A block which will be invoked in background queue when finished. */ - (void)setObject:(nullable id)object forKey:(NSString *)key withBlock:(nullable void(^)(void))block; /** Removes the value of the specified key in the cache. This method may blocks the calling thread until file delete finished. @param key The key identifying the value to be removed. If nil, this method has no effect. */ - (void)removeObjectForKey:(NSString *)key; /** Removes the value of the specified key in the cache. This method returns immediately and invoke the passed block in background queue when the operation finished. @param key The key identifying the value to be removed. If nil, this method has no effect. @param block A block which will be invoked in background queue when finished. */ - (void)removeObjectForKey:(NSString *)key withBlock:(nullable void(^)(NSString *key))block; /** Empties the cache. This method may blocks the calling thread until file delete finished. */ - (void)removeAllObjects; /** Empties the cache. This method returns immediately and invoke the passed block in background queue when the operation finished. @param block A block which will be invoked in background queue when finished. */ - (void)removeAllObjectsWithBlock:(void(^)(void))block; /** Empties the cache with block. This method returns immediately and executes the clear operation with block in background. @warning You should not send message to this instance in these blocks. @param progress This block will be invoked during removing, pass nil to ignore. @param end This block will be invoked at the end, pass nil to ignore. */ - (void)removeAllObjectsWithProgressBlock:(nullable void(^)(int removedCount, int totalCount))progress endBlock:(nullable void(^)(BOOL error))end; @end NS_ASSUME_NONNULL_END ================================================ FILE: MFPictureBrowserDemo/Pods/YYCache/YYCache/YYCache.m ================================================ // // YYCache.m // YYCache // // Created by ibireme on 15/2/13. // Copyright (c) 2015 ibireme. // // This source code is licensed under the MIT-style license found in the // LICENSE file in the root directory of this source tree. // #import "YYCache.h" #import "YYMemoryCache.h" #import "YYDiskCache.h" @implementation YYCache - (instancetype) init { NSLog(@"Use \"initWithName\" or \"initWithPath\" to create YYCache instance."); return [self initWithPath:@""]; } - (instancetype)initWithName:(NSString *)name { if (name.length == 0) return nil; NSString *cacheFolder = [NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES) firstObject]; NSString *path = [cacheFolder stringByAppendingPathComponent:name]; return [self initWithPath:path]; } - (instancetype)initWithPath:(NSString *)path { if (path.length == 0) return nil; YYDiskCache *diskCache = [[YYDiskCache alloc] initWithPath:path]; if (!diskCache) return nil; NSString *name = [path lastPathComponent]; YYMemoryCache *memoryCache = [YYMemoryCache new]; memoryCache.name = name; self = [super init]; _name = name; _diskCache = diskCache; _memoryCache = memoryCache; return self; } + (instancetype)cacheWithName:(NSString *)name { return [[self alloc] initWithName:name]; } + (instancetype)cacheWithPath:(NSString *)path { return [[self alloc] initWithPath:path]; } - (BOOL)containsObjectForKey:(NSString *)key { return [_memoryCache containsObjectForKey:key] || [_diskCache containsObjectForKey:key]; } - (void)containsObjectForKey:(NSString *)key withBlock:(void (^)(NSString *key, BOOL contains))block { if (!block) return; if ([_memoryCache containsObjectForKey:key]) { dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{ block(key, YES); }); } else { [_diskCache containsObjectForKey:key withBlock:block]; } } - (id)objectForKey:(NSString *)key { id object = [_memoryCache objectForKey:key]; if (!object) { object = [_diskCache objectForKey:key]; if (object) { [_memoryCache setObject:object forKey:key]; } } return object; } - (void)objectForKey:(NSString *)key withBlock:(void (^)(NSString *key, id object))block { if (!block) return; id object = [_memoryCache objectForKey:key]; if (object) { dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{ block(key, object); }); } else { [_diskCache objectForKey:key withBlock:^(NSString *key, id object) { if (object && ![_memoryCache objectForKey:key]) { [_memoryCache setObject:object forKey:key]; } block(key, object); }]; } } - (void)setObject:(id)object forKey:(NSString *)key { [_memoryCache setObject:object forKey:key]; [_diskCache setObject:object forKey:key]; } - (void)setObject:(id)object forKey:(NSString *)key withBlock:(void (^)(void))block { [_memoryCache setObject:object forKey:key]; [_diskCache setObject:object forKey:key withBlock:block]; } - (void)removeObjectForKey:(NSString *)key { [_memoryCache removeObjectForKey:key]; [_diskCache removeObjectForKey:key]; } - (void)removeObjectForKey:(NSString *)key withBlock:(void (^)(NSString *key))block { [_memoryCache removeObjectForKey:key]; [_diskCache removeObjectForKey:key withBlock:block]; } - (void)removeAllObjects { [_memoryCache removeAllObjects]; [_diskCache removeAllObjects]; } - (void)removeAllObjectsWithBlock:(void(^)(void))block { [_memoryCache removeAllObjects]; [_diskCache removeAllObjectsWithBlock:block]; } - (void)removeAllObjectsWithProgressBlock:(void(^)(int removedCount, int totalCount))progress endBlock:(void(^)(BOOL error))end { [_memoryCache removeAllObjects]; [_diskCache removeAllObjectsWithProgressBlock:progress endBlock:end]; } - (NSString *)description { if (_name) return [NSString stringWithFormat:@"<%@: %p> (%@)", self.class, self, _name]; else return [NSString stringWithFormat:@"<%@: %p>", self.class, self]; } @end ================================================ FILE: MFPictureBrowserDemo/Pods/YYCache/YYCache/YYDiskCache.h ================================================ // // YYDiskCache.h // YYCache // // Created by ibireme on 15/2/11. // Copyright (c) 2015 ibireme. // // This source code is licensed under the MIT-style license found in the // LICENSE file in the root directory of this source tree. // #import NS_ASSUME_NONNULL_BEGIN /** YYDiskCache is a thread-safe cache that stores key-value pairs backed by SQLite and file system (similar to NSURLCache's disk cache). YYDiskCache has these features: * It use LRU (least-recently-used) to remove objects. * It can be controlled by cost, count, and age. * It can be configured to automatically evict objects when there's no free disk space. * It can automatically decide the storage type (sqlite/file) for each object to get better performance. You may compile the latest version of sqlite and ignore the libsqlite3.dylib in iOS system to get 2x~4x speed up. */ @interface YYDiskCache : NSObject #pragma mark - Attribute ///============================================================================= /// @name Attribute ///============================================================================= /** The name of the cache. Default is nil. */ @property (nullable, copy) NSString *name; /** The path of the cache (read-only). */ @property (readonly) NSString *path; /** If the object's data size (in bytes) is larger than this value, then object will be stored as a file, otherwise the object will be stored in sqlite. 0 means all objects will be stored as separated files, NSUIntegerMax means all objects will be stored in sqlite. The default value is 20480 (20KB). */ @property (readonly) NSUInteger inlineThreshold; /** If this block is not nil, then the block will be used to archive object instead of NSKeyedArchiver. You can use this block to support the objects which do not conform to the `NSCoding` protocol. The default value is nil. */ @property (nullable, copy) NSData *(^customArchiveBlock)(id object); /** If this block is not nil, then the block will be used to unarchive object instead of NSKeyedUnarchiver. You can use this block to support the objects which do not conform to the `NSCoding` protocol. The default value is nil. */ @property (nullable, copy) id (^customUnarchiveBlock)(NSData *data); /** When an object needs to be saved as a file, this block will be invoked to generate a file name for a specified key. If the block is nil, the cache use md5(key) as default file name. The default value is nil. */ @property (nullable, copy) NSString *(^customFileNameBlock)(NSString *key); #pragma mark - Limit ///============================================================================= /// @name Limit ///============================================================================= /** The maximum number of objects the cache should hold. @discussion The default value is NSUIntegerMax, which means no limit. This is not a strict limit — if the cache goes over the limit, some objects in the cache could be evicted later in background queue. */ @property NSUInteger countLimit; /** The maximum total cost that the cache can hold before it starts evicting objects. @discussion The default value is NSUIntegerMax, which means no limit. This is not a strict limit — if the cache goes over the limit, some objects in the cache could be evicted later in background queue. */ @property NSUInteger costLimit; /** The maximum expiry time of objects in cache. @discussion The default value is DBL_MAX, which means no limit. This is not a strict limit — if an object goes over the limit, the objects could be evicted later in background queue. */ @property NSTimeInterval ageLimit; /** The minimum free disk space (in bytes) which the cache should kept. @discussion The default value is 0, which means no limit. If the free disk space is lower than this value, the cache will remove objects to free some disk space. This is not a strict limit—if the free disk space goes over the limit, the objects could be evicted later in background queue. */ @property NSUInteger freeDiskSpaceLimit; /** The auto trim check time interval in seconds. Default is 60 (1 minute). @discussion The cache holds an internal timer to check whether the cache reaches its limits, and if the limit is reached, it begins to evict objects. */ @property NSTimeInterval autoTrimInterval; /** Set `YES` to enable error logs for debug. */ @property BOOL errorLogsEnabled; #pragma mark - Initializer ///============================================================================= /// @name Initializer ///============================================================================= - (instancetype)init UNAVAILABLE_ATTRIBUTE; + (instancetype)new UNAVAILABLE_ATTRIBUTE; /** Create a new cache based on the specified path. @param path Full path of a directory in which the cache will write data. Once initialized you should not read and write to this directory. @return A new cache object, or nil if an error occurs. @warning If the cache instance for the specified path already exists in memory, this method will return it directly, instead of creating a new instance. */ - (nullable instancetype)initWithPath:(NSString *)path; /** The designated initializer. @param path Full path of a directory in which the cache will write data. Once initialized you should not read and write to this directory. @param threshold The data store inline threshold in bytes. If the object's data size (in bytes) is larger than this value, then object will be stored as a file, otherwise the object will be stored in sqlite. 0 means all objects will be stored as separated files, NSUIntegerMax means all objects will be stored in sqlite. If you don't know your object's size, 20480 is a good choice. After first initialized you should not change this value of the specified path. @return A new cache object, or nil if an error occurs. @warning If the cache instance for the specified path already exists in memory, this method will return it directly, instead of creating a new instance. */ - (nullable instancetype)initWithPath:(NSString *)path inlineThreshold:(NSUInteger)threshold NS_DESIGNATED_INITIALIZER; #pragma mark - Access Methods ///============================================================================= /// @name Access Methods ///============================================================================= /** Returns a boolean value that indicates whether a given key is in cache. This method may blocks the calling thread until file read finished. @param key A string identifying the value. If nil, just return NO. @return Whether the key is in cache. */ - (BOOL)containsObjectForKey:(NSString *)key; /** Returns a boolean value with the block that indicates whether a given key is in cache. This method returns immediately and invoke the passed block in background queue when the operation finished. @param key A string identifying the value. If nil, just return NO. @param block A block which will be invoked in background queue when finished. */ - (void)containsObjectForKey:(NSString *)key withBlock:(void(^)(NSString *key, BOOL contains))block; /** Returns the value associated with a given key. This method may blocks the calling thread until file read finished. @param key A string identifying the value. If nil, just return nil. @return The value associated with key, or nil if no value is associated with key. */ - (nullable id)objectForKey:(NSString *)key; /** Returns the value associated with a given key. This method returns immediately and invoke the passed block in background queue when the operation finished. @param key A string identifying the value. If nil, just return nil. @param block A block which will be invoked in background queue when finished. */ - (void)objectForKey:(NSString *)key withBlock:(void(^)(NSString *key, id _Nullable object))block; /** Sets the value of the specified key in the cache. This method may blocks the calling thread until file write finished. @param object The object to be stored in the cache. If nil, it calls `removeObjectForKey:`. @param key The key with which to associate the value. If nil, this method has no effect. */ - (void)setObject:(nullable id)object forKey:(NSString *)key; /** Sets the value of the specified key in the cache. This method returns immediately and invoke the passed block in background queue when the operation finished. @param object The object to be stored in the cache. If nil, it calls `removeObjectForKey:`. @param block A block which will be invoked in background queue when finished. */ - (void)setObject:(nullable id)object forKey:(NSString *)key withBlock:(void(^)(void))block; /** Removes the value of the specified key in the cache. This method may blocks the calling thread until file delete finished. @param key The key identifying the value to be removed. If nil, this method has no effect. */ - (void)removeObjectForKey:(NSString *)key; /** Removes the value of the specified key in the cache. This method returns immediately and invoke the passed block in background queue when the operation finished. @param key The key identifying the value to be removed. If nil, this method has no effect. @param block A block which will be invoked in background queue when finished. */ - (void)removeObjectForKey:(NSString *)key withBlock:(void(^)(NSString *key))block; /** Empties the cache. This method may blocks the calling thread until file delete finished. */ - (void)removeAllObjects; /** Empties the cache. This method returns immediately and invoke the passed block in background queue when the operation finished. @param block A block which will be invoked in background queue when finished. */ - (void)removeAllObjectsWithBlock:(void(^)(void))block; /** Empties the cache with block. This method returns immediately and executes the clear operation with block in background. @warning You should not send message to this instance in these blocks. @param progress This block will be invoked during removing, pass nil to ignore. @param end This block will be invoked at the end, pass nil to ignore. */ - (void)removeAllObjectsWithProgressBlock:(nullable void(^)(int removedCount, int totalCount))progress endBlock:(nullable void(^)(BOOL error))end; /** Returns the number of objects in this cache. This method may blocks the calling thread until file read finished. @return The total objects count. */ - (NSInteger)totalCount; /** Get the number of objects in this cache. This method returns immediately and invoke the passed block in background queue when the operation finished. @param block A block which will be invoked in background queue when finished. */ - (void)totalCountWithBlock:(void(^)(NSInteger totalCount))block; /** Returns the total cost (in bytes) of objects in this cache. This method may blocks the calling thread until file read finished. @return The total objects cost in bytes. */ - (NSInteger)totalCost; /** Get the total cost (in bytes) of objects in this cache. This method returns immediately and invoke the passed block in background queue when the operation finished. @param block A block which will be invoked in background queue when finished. */ - (void)totalCostWithBlock:(void(^)(NSInteger totalCost))block; #pragma mark - Trim ///============================================================================= /// @name Trim ///============================================================================= /** Removes objects from the cache use LRU, until the `totalCount` is below the specified value. This method may blocks the calling thread until operation finished. @param count The total count allowed to remain after the cache has been trimmed. */ - (void)trimToCount:(NSUInteger)count; /** Removes objects from the cache use LRU, until the `totalCount` is below the specified value. This method returns immediately and invoke the passed block in background queue when the operation finished. @param count The total count allowed to remain after the cache has been trimmed. @param block A block which will be invoked in background queue when finished. */ - (void)trimToCount:(NSUInteger)count withBlock:(void(^)(void))block; /** Removes objects from the cache use LRU, until the `totalCost` is below the specified value. This method may blocks the calling thread until operation finished. @param cost The total cost allowed to remain after the cache has been trimmed. */ - (void)trimToCost:(NSUInteger)cost; /** Removes objects from the cache use LRU, until the `totalCost` is below the specified value. This method returns immediately and invoke the passed block in background queue when the operation finished. @param cost The total cost allowed to remain after the cache has been trimmed. @param block A block which will be invoked in background queue when finished. */ - (void)trimToCost:(NSUInteger)cost withBlock:(void(^)(void))block; /** Removes objects from the cache use LRU, until all expiry objects removed by the specified value. This method may blocks the calling thread until operation finished. @param age The maximum age of the object. */ - (void)trimToAge:(NSTimeInterval)age; /** Removes objects from the cache use LRU, until all expiry objects removed by the specified value. This method returns immediately and invoke the passed block in background queue when the operation finished. @param age The maximum age of the object. @param block A block which will be invoked in background queue when finished. */ - (void)trimToAge:(NSTimeInterval)age withBlock:(void(^)(void))block; #pragma mark - Extended Data ///============================================================================= /// @name Extended Data ///============================================================================= /** Get extended data from an object. @discussion See 'setExtendedData:toObject:' for more information. @param object An object. @return The extended data. */ + (nullable NSData *)getExtendedDataFromObject:(id)object; /** Set extended data to an object. @discussion You can set any extended data to an object before you save the object to disk cache. The extended data will also be saved with this object. You can get the extended data later with "getExtendedDataFromObject:". @param extendedData The extended data (pass nil to remove). @param object The object. */ + (void)setExtendedData:(nullable NSData *)extendedData toObject:(id)object; @end NS_ASSUME_NONNULL_END ================================================ FILE: MFPictureBrowserDemo/Pods/YYCache/YYCache/YYDiskCache.m ================================================ // // YYDiskCache.m // YYCache // // Created by ibireme on 15/2/11. // Copyright (c) 2015 ibireme. // // This source code is licensed under the MIT-style license found in the // LICENSE file in the root directory of this source tree. // #import "YYDiskCache.h" #import "YYKVStorage.h" #import #import #import #import #define Lock() dispatch_semaphore_wait(self->_lock, DISPATCH_TIME_FOREVER) #define Unlock() dispatch_semaphore_signal(self->_lock) static const int extended_data_key; /// Free disk space in bytes. static int64_t _YYDiskSpaceFree() { NSError *error = nil; NSDictionary *attrs = [[NSFileManager defaultManager] attributesOfFileSystemForPath:NSHomeDirectory() error:&error]; if (error) return -1; int64_t space = [[attrs objectForKey:NSFileSystemFreeSize] longLongValue]; if (space < 0) space = -1; return space; } /// String's md5 hash. static NSString *_YYNSStringMD5(NSString *string) { if (!string) return nil; NSData *data = [string dataUsingEncoding:NSUTF8StringEncoding]; unsigned char result[CC_MD5_DIGEST_LENGTH]; CC_MD5(data.bytes, (CC_LONG)data.length, result); return [NSString stringWithFormat: @"%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x", result[0], result[1], result[2], result[3], result[4], result[5], result[6], result[7], result[8], result[9], result[10], result[11], result[12], result[13], result[14], result[15] ]; } /// weak reference for all instances static NSMapTable *_globalInstances; static dispatch_semaphore_t _globalInstancesLock; static void _YYDiskCacheInitGlobal() { static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ _globalInstancesLock = dispatch_semaphore_create(1); _globalInstances = [[NSMapTable alloc] initWithKeyOptions:NSPointerFunctionsStrongMemory valueOptions:NSPointerFunctionsWeakMemory capacity:0]; }); } static YYDiskCache *_YYDiskCacheGetGlobal(NSString *path) { if (path.length == 0) return nil; _YYDiskCacheInitGlobal(); dispatch_semaphore_wait(_globalInstancesLock, DISPATCH_TIME_FOREVER); id cache = [_globalInstances objectForKey:path]; dispatch_semaphore_signal(_globalInstancesLock); return cache; } static void _YYDiskCacheSetGlobal(YYDiskCache *cache) { if (cache.path.length == 0) return; _YYDiskCacheInitGlobal(); dispatch_semaphore_wait(_globalInstancesLock, DISPATCH_TIME_FOREVER); [_globalInstances setObject:cache forKey:cache.path]; dispatch_semaphore_signal(_globalInstancesLock); } @implementation YYDiskCache { YYKVStorage *_kv; dispatch_semaphore_t _lock; dispatch_queue_t _queue; } - (void)_trimRecursively { __weak typeof(self) _self = self; dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(_autoTrimInterval * NSEC_PER_SEC)), dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_LOW, 0), ^{ __strong typeof(_self) self = _self; if (!self) return; [self _trimInBackground]; [self _trimRecursively]; }); } - (void)_trimInBackground { __weak typeof(self) _self = self; dispatch_async(_queue, ^{ __strong typeof(_self) self = _self; if (!self) return; Lock(); [self _trimToCost:self.costLimit]; [self _trimToCount:self.countLimit]; [self _trimToAge:self.ageLimit]; [self _trimToFreeDiskSpace:self.freeDiskSpaceLimit]; Unlock(); }); } - (void)_trimToCost:(NSUInteger)costLimit { if (costLimit >= INT_MAX) return; [_kv removeItemsToFitSize:(int)costLimit]; } - (void)_trimToCount:(NSUInteger)countLimit { if (countLimit >= INT_MAX) return; [_kv removeItemsToFitCount:(int)countLimit]; } - (void)_trimToAge:(NSTimeInterval)ageLimit { if (ageLimit <= 0) { [_kv removeAllItems]; return; } long timestamp = time(NULL); if (timestamp <= ageLimit) return; long age = timestamp - ageLimit; if (age >= INT_MAX) return; [_kv removeItemsEarlierThanTime:(int)age]; } - (void)_trimToFreeDiskSpace:(NSUInteger)targetFreeDiskSpace { if (targetFreeDiskSpace == 0) return; int64_t totalBytes = [_kv getItemsSize]; if (totalBytes <= 0) return; int64_t diskFreeBytes = _YYDiskSpaceFree(); if (diskFreeBytes < 0) return; int64_t needTrimBytes = targetFreeDiskSpace - diskFreeBytes; if (needTrimBytes <= 0) return; int64_t costLimit = totalBytes - needTrimBytes; if (costLimit < 0) costLimit = 0; [self _trimToCost:(int)costLimit]; } - (NSString *)_filenameForKey:(NSString *)key { NSString *filename = nil; if (_customFileNameBlock) filename = _customFileNameBlock(key); if (!filename) filename = _YYNSStringMD5(key); return filename; } - (void)_appWillBeTerminated { Lock(); _kv = nil; Unlock(); } #pragma mark - public - (void)dealloc { [[NSNotificationCenter defaultCenter] removeObserver:self name:UIApplicationWillTerminateNotification object:nil]; } - (instancetype)init { @throw [NSException exceptionWithName:@"YYDiskCache init error" reason:@"YYDiskCache must be initialized with a path. Use 'initWithPath:' or 'initWithPath:inlineThreshold:' instead." userInfo:nil]; return [self initWithPath:@"" inlineThreshold:0]; } - (instancetype)initWithPath:(NSString *)path { return [self initWithPath:path inlineThreshold:1024 * 20]; // 20KB } - (instancetype)initWithPath:(NSString *)path inlineThreshold:(NSUInteger)threshold { self = [super init]; if (!self) return nil; YYDiskCache *globalCache = _YYDiskCacheGetGlobal(path); if (globalCache) return globalCache; YYKVStorageType type; if (threshold == 0) { type = YYKVStorageTypeFile; } else if (threshold == NSUIntegerMax) { type = YYKVStorageTypeSQLite; } else { type = YYKVStorageTypeMixed; } YYKVStorage *kv = [[YYKVStorage alloc] initWithPath:path type:type]; if (!kv) return nil; _kv = kv; _path = path; _lock = dispatch_semaphore_create(1); _queue = dispatch_queue_create("com.ibireme.cache.disk", DISPATCH_QUEUE_CONCURRENT); _inlineThreshold = threshold; _countLimit = NSUIntegerMax; _costLimit = NSUIntegerMax; _ageLimit = DBL_MAX; _freeDiskSpaceLimit = 0; _autoTrimInterval = 60; [self _trimRecursively]; _YYDiskCacheSetGlobal(self); [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(_appWillBeTerminated) name:UIApplicationWillTerminateNotification object:nil]; return self; } - (BOOL)containsObjectForKey:(NSString *)key { if (!key) return NO; Lock(); BOOL contains = [_kv itemExistsForKey:key]; Unlock(); return contains; } - (void)containsObjectForKey:(NSString *)key withBlock:(void(^)(NSString *key, BOOL contains))block { if (!block) return; __weak typeof(self) _self = self; dispatch_async(_queue, ^{ __strong typeof(_self) self = _self; BOOL contains = [self containsObjectForKey:key]; block(key, contains); }); } - (id)objectForKey:(NSString *)key { if (!key) return nil; Lock(); YYKVStorageItem *item = [_kv getItemForKey:key]; Unlock(); if (!item.value) return nil; id object = nil; if (_customUnarchiveBlock) { object = _customUnarchiveBlock(item.value); } else { @try { object = [NSKeyedUnarchiver unarchiveObjectWithData:item.value]; } @catch (NSException *exception) { // nothing to do... } } if (object && item.extendedData) { [YYDiskCache setExtendedData:item.extendedData toObject:object]; } return object; } - (void)objectForKey:(NSString *)key withBlock:(void(^)(NSString *key, id object))block { if (!block) return; __weak typeof(self) _self = self; dispatch_async(_queue, ^{ __strong typeof(_self) self = _self; id object = [self objectForKey:key]; block(key, object); }); } - (void)setObject:(id)object forKey:(NSString *)key { if (!key) return; if (!object) { [self removeObjectForKey:key]; return; } NSData *extendedData = [YYDiskCache getExtendedDataFromObject:object]; NSData *value = nil; if (_customArchiveBlock) { value = _customArchiveBlock(object); } else { @try { value = [NSKeyedArchiver archivedDataWithRootObject:object]; } @catch (NSException *exception) { // nothing to do... } } if (!value) return; NSString *filename = nil; if (_kv.type != YYKVStorageTypeSQLite) { if (value.length > _inlineThreshold) { filename = [self _filenameForKey:key]; } } Lock(); [_kv saveItemWithKey:key value:value filename:filename extendedData:extendedData]; Unlock(); } - (void)setObject:(id)object forKey:(NSString *)key withBlock:(void(^)(void))block { __weak typeof(self) _self = self; dispatch_async(_queue, ^{ __strong typeof(_self) self = _self; [self setObject:object forKey:key]; if (block) block(); }); } - (void)removeObjectForKey:(NSString *)key { if (!key) return; Lock(); [_kv removeItemForKey:key]; Unlock(); } - (void)removeObjectForKey:(NSString *)key withBlock:(void(^)(NSString *key))block { __weak typeof(self) _self = self; dispatch_async(_queue, ^{ __strong typeof(_self) self = _self; [self removeObjectForKey:key]; if (block) block(key); }); } - (void)removeAllObjects { Lock(); [_kv removeAllItems]; Unlock(); } - (void)removeAllObjectsWithBlock:(void(^)(void))block { __weak typeof(self) _self = self; dispatch_async(_queue, ^{ __strong typeof(_self) self = _self; [self removeAllObjects]; if (block) block(); }); } - (void)removeAllObjectsWithProgressBlock:(void(^)(int removedCount, int totalCount))progress endBlock:(void(^)(BOOL error))end { __weak typeof(self) _self = self; dispatch_async(_queue, ^{ __strong typeof(_self) self = _self; if (!self) { if (end) end(YES); return; } Lock(); [_kv removeAllItemsWithProgressBlock:progress endBlock:end]; Unlock(); }); } - (NSInteger)totalCount { Lock(); int count = [_kv getItemsCount]; Unlock(); return count; } - (void)totalCountWithBlock:(void(^)(NSInteger totalCount))block { if (!block) return; __weak typeof(self) _self = self; dispatch_async(_queue, ^{ __strong typeof(_self) self = _self; NSInteger totalCount = [self totalCount]; block(totalCount); }); } - (NSInteger)totalCost { Lock(); int count = [_kv getItemsSize]; Unlock(); return count; } - (void)totalCostWithBlock:(void(^)(NSInteger totalCost))block { if (!block) return; __weak typeof(self) _self = self; dispatch_async(_queue, ^{ __strong typeof(_self) self = _self; NSInteger totalCost = [self totalCost]; block(totalCost); }); } - (void)trimToCount:(NSUInteger)count { Lock(); [self _trimToCount:count]; Unlock(); } - (void)trimToCount:(NSUInteger)count withBlock:(void(^)(void))block { __weak typeof(self) _self = self; dispatch_async(_queue, ^{ __strong typeof(_self) self = _self; [self trimToCount:count]; if (block) block(); }); } - (void)trimToCost:(NSUInteger)cost { Lock(); [self _trimToCost:cost]; Unlock(); } - (void)trimToCost:(NSUInteger)cost withBlock:(void(^)(void))block { __weak typeof(self) _self = self; dispatch_async(_queue, ^{ __strong typeof(_self) self = _self; [self trimToCost:cost]; if (block) block(); }); } - (void)trimToAge:(NSTimeInterval)age { Lock(); [self _trimToAge:age]; Unlock(); } - (void)trimToAge:(NSTimeInterval)age withBlock:(void(^)(void))block { __weak typeof(self) _self = self; dispatch_async(_queue, ^{ __strong typeof(_self) self = _self; [self trimToAge:age]; if (block) block(); }); } + (NSData *)getExtendedDataFromObject:(id)object { if (!object) return nil; return (NSData *)objc_getAssociatedObject(object, &extended_data_key); } + (void)setExtendedData:(NSData *)extendedData toObject:(id)object { if (!object) return; objc_setAssociatedObject(object, &extended_data_key, extendedData, OBJC_ASSOCIATION_RETAIN_NONATOMIC); } - (NSString *)description { if (_name) return [NSString stringWithFormat:@"<%@: %p> (%@:%@)", self.class, self, _name, _path]; else return [NSString stringWithFormat:@"<%@: %p> (%@)", self.class, self, _path]; } - (BOOL)errorLogsEnabled { Lock(); BOOL enabled = _kv.errorLogsEnabled; Unlock(); return enabled; } - (void)setErrorLogsEnabled:(BOOL)errorLogsEnabled { Lock(); _kv.errorLogsEnabled = errorLogsEnabled; Unlock(); } @end ================================================ FILE: MFPictureBrowserDemo/Pods/YYCache/YYCache/YYKVStorage.h ================================================ // // YYKVStorage.h // YYCache // // Created by ibireme on 15/4/22. // Copyright (c) 2015 ibireme. // // This source code is licensed under the MIT-style license found in the // LICENSE file in the root directory of this source tree. // #import NS_ASSUME_NONNULL_BEGIN /** YYKVStorageItem is used by `YYKVStorage` to store key-value pair and meta data. Typically, you should not use this class directly. */ @interface YYKVStorageItem : NSObject @property (nonatomic, strong) NSString *key; ///< key @property (nonatomic, strong) NSData *value; ///< value @property (nullable, nonatomic, strong) NSString *filename; ///< filename (nil if inline) @property (nonatomic) int size; ///< value's size in bytes @property (nonatomic) int modTime; ///< modification unix timestamp @property (nonatomic) int accessTime; ///< last access unix timestamp @property (nullable, nonatomic, strong) NSData *extendedData; ///< extended data (nil if no extended data) @end /** Storage type, indicated where the `YYKVStorageItem.value` stored. @discussion Typically, write data to sqlite is faster than extern file, but reading performance is dependent on data size. In my test (on iPhone 6 64G), read data from extern file is faster than from sqlite when the data is larger than 20KB. * If you want to store large number of small datas (such as contacts cache), use YYKVStorageTypeSQLite to get better performance. * If you want to store large files (such as image cache), use YYKVStorageTypeFile to get better performance. * You can use YYKVStorageTypeMixed and choice your storage type for each item. See for more information. */ typedef NS_ENUM(NSUInteger, YYKVStorageType) { /// The `value` is stored as a file in file system. YYKVStorageTypeFile = 0, /// The `value` is stored in sqlite with blob type. YYKVStorageTypeSQLite = 1, /// The `value` is stored in file system or sqlite based on your choice. YYKVStorageTypeMixed = 2, }; /** YYKVStorage is a key-value storage based on sqlite and file system. Typically, you should not use this class directly. @discussion The designated initializer for YYKVStorage is `initWithPath:type:`. After initialized, a directory is created based on the `path` to hold key-value data. Once initialized you should not read or write this directory without the instance. You may compile the latest version of sqlite and ignore the libsqlite3.dylib in iOS system to get 2x~4x speed up. @warning The instance of this class is *NOT* thread safe, you need to make sure that there's only one thread to access the instance at the same time. If you really need to process large amounts of data in multi-thread, you should split the data to multiple KVStorage instance (sharding). */ @interface YYKVStorage : NSObject #pragma mark - Attribute ///============================================================================= /// @name Attribute ///============================================================================= @property (nonatomic, readonly) NSString *path; ///< The path of this storage. @property (nonatomic, readonly) YYKVStorageType type; ///< The type of this storage. @property (nonatomic) BOOL errorLogsEnabled; ///< Set `YES` to enable error logs for debug. #pragma mark - Initializer ///============================================================================= /// @name Initializer ///============================================================================= - (instancetype)init UNAVAILABLE_ATTRIBUTE; + (instancetype)new UNAVAILABLE_ATTRIBUTE; /** The designated initializer. @param path Full path of a directory in which the storage will write data. If the directory is not exists, it will try to create one, otherwise it will read the data in this directory. @param type The storage type. After first initialized you should not change the type of the specified path. @return A new storage object, or nil if an error occurs. @warning Multiple instances with the same path will make the storage unstable. */ - (nullable instancetype)initWithPath:(NSString *)path type:(YYKVStorageType)type NS_DESIGNATED_INITIALIZER; #pragma mark - Save Items ///============================================================================= /// @name Save Items ///============================================================================= /** Save an item or update the item with 'key' if it already exists. @discussion This method will save the item.key, item.value, item.filename and item.extendedData to disk or sqlite, other properties will be ignored. item.key and item.value should not be empty (nil or zero length). If the `type` is YYKVStorageTypeFile, then the item.filename should not be empty. If the `type` is YYKVStorageTypeSQLite, then the item.filename will be ignored. It the `type` is YYKVStorageTypeMixed, then the item.value will be saved to file system if the item.filename is not empty, otherwise it will be saved to sqlite. @param item An item. @return Whether succeed. */ - (BOOL)saveItem:(YYKVStorageItem *)item; /** Save an item or update the item with 'key' if it already exists. @discussion This method will save the key-value pair to sqlite. If the `type` is YYKVStorageTypeFile, then this method will failed. @param key The key, should not be empty (nil or zero length). @param value The key, should not be empty (nil or zero length). @return Whether succeed. */ - (BOOL)saveItemWithKey:(NSString *)key value:(NSData *)value; /** Save an item or update the item with 'key' if it already exists. @discussion If the `type` is YYKVStorageTypeFile, then the `filename` should not be empty. If the `type` is YYKVStorageTypeSQLite, then the `filename` will be ignored. It the `type` is YYKVStorageTypeMixed, then the `value` will be saved to file system if the `filename` is not empty, otherwise it will be saved to sqlite. @param key The key, should not be empty (nil or zero length). @param value The key, should not be empty (nil or zero length). @param filename The filename. @param extendedData The extended data for this item (pass nil to ignore it). @return Whether succeed. */ - (BOOL)saveItemWithKey:(NSString *)key value:(NSData *)value filename:(nullable NSString *)filename extendedData:(nullable NSData *)extendedData; #pragma mark - Remove Items ///============================================================================= /// @name Remove Items ///============================================================================= /** Remove an item with 'key'. @param key The item's key. @return Whether succeed. */ - (BOOL)removeItemForKey:(NSString *)key; /** Remove items with an array of keys. @param keys An array of specified keys. @return Whether succeed. */ - (BOOL)removeItemForKeys:(NSArray *)keys; /** Remove all items which `value` is larger than a specified size. @param size The maximum size in bytes. @return Whether succeed. */ - (BOOL)removeItemsLargerThanSize:(int)size; /** Remove all items which last access time is earlier than a specified timestamp. @param time The specified unix timestamp. @return Whether succeed. */ - (BOOL)removeItemsEarlierThanTime:(int)time; /** Remove items to make the total size not larger than a specified size. The least recently used (LRU) items will be removed first. @param maxSize The specified size in bytes. @return Whether succeed. */ - (BOOL)removeItemsToFitSize:(int)maxSize; /** Remove items to make the total count not larger than a specified count. The least recently used (LRU) items will be removed first. @param maxCount The specified item count. @return Whether succeed. */ - (BOOL)removeItemsToFitCount:(int)maxCount; /** Remove all items in background queue. @discussion This method will remove the files and sqlite database to a trash folder, and then clear the folder in background queue. So this method is much faster than `removeAllItemsWithProgressBlock:endBlock:`. @return Whether succeed. */ - (BOOL)removeAllItems; /** Remove all items. @warning You should not send message to this instance in these blocks. @param progress This block will be invoked during removing, pass nil to ignore. @param end This block will be invoked at the end, pass nil to ignore. */ - (void)removeAllItemsWithProgressBlock:(nullable void(^)(int removedCount, int totalCount))progress endBlock:(nullable void(^)(BOOL error))end; #pragma mark - Get Items ///============================================================================= /// @name Get Items ///============================================================================= /** Get item with a specified key. @param key A specified key. @return Item for the key, or nil if not exists / error occurs. */ - (nullable YYKVStorageItem *)getItemForKey:(NSString *)key; /** Get item information with a specified key. The `value` in this item will be ignored. @param key A specified key. @return Item information for the key, or nil if not exists / error occurs. */ - (nullable YYKVStorageItem *)getItemInfoForKey:(NSString *)key; /** Get item value with a specified key. @param key A specified key. @return Item's value, or nil if not exists / error occurs. */ - (nullable NSData *)getItemValueForKey:(NSString *)key; /** Get items with an array of keys. @param keys An array of specified keys. @return An array of `YYKVStorageItem`, or nil if not exists / error occurs. */ - (nullable NSArray *)getItemForKeys:(NSArray *)keys; /** Get item infomartions with an array of keys. The `value` in items will be ignored. @param keys An array of specified keys. @return An array of `YYKVStorageItem`, or nil if not exists / error occurs. */ - (nullable NSArray *)getItemInfoForKeys:(NSArray *)keys; /** Get items value with an array of keys. @param keys An array of specified keys. @return A dictionary which key is 'key' and value is 'value', or nil if not exists / error occurs. */ - (nullable NSDictionary *)getItemValueForKeys:(NSArray *)keys; #pragma mark - Get Storage Status ///============================================================================= /// @name Get Storage Status ///============================================================================= /** Whether an item exists for a specified key. @param key A specified key. @return `YES` if there's an item exists for the key, `NO` if not exists or an error occurs. */ - (BOOL)itemExistsForKey:(NSString *)key; /** Get total item count. @return Total item count, -1 when an error occurs. */ - (int)getItemsCount; /** Get item value's total size in bytes. @return Total size in bytes, -1 when an error occurs. */ - (int)getItemsSize; @end NS_ASSUME_NONNULL_END ================================================ FILE: MFPictureBrowserDemo/Pods/YYCache/YYCache/YYKVStorage.m ================================================ // // YYKVStorage.m // YYCache // // Created by ibireme on 15/4/22. // Copyright (c) 2015 ibireme. // // This source code is licensed under the MIT-style license found in the // LICENSE file in the root directory of this source tree. // #import "YYKVStorage.h" #import #import #if __has_include() #import #else #import "sqlite3.h" #endif static const NSUInteger kMaxErrorRetryCount = 8; static const NSTimeInterval kMinRetryTimeInterval = 2.0; static const int kPathLengthMax = PATH_MAX - 64; static NSString *const kDBFileName = @"manifest.sqlite"; static NSString *const kDBShmFileName = @"manifest.sqlite-shm"; static NSString *const kDBWalFileName = @"manifest.sqlite-wal"; static NSString *const kDataDirectoryName = @"data"; static NSString *const kTrashDirectoryName = @"trash"; /* File: /path/ /manifest.sqlite /manifest.sqlite-shm /manifest.sqlite-wal /data/ /e10adc3949ba59abbe56e057f20f883e /e10adc3949ba59abbe56e057f20f883e /trash/ /unused_file_or_folder SQL: create table if not exists manifest ( key text, filename text, size integer, inline_data blob, modification_time integer, last_access_time integer, extended_data blob, primary key(key) ); create index if not exists last_access_time_idx on manifest(last_access_time); */ /// Returns nil in App Extension. static UIApplication *_YYSharedApplication() { static BOOL isAppExtension = NO; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ Class cls = NSClassFromString(@"UIApplication"); if(!cls || ![cls respondsToSelector:@selector(sharedApplication)]) isAppExtension = YES; if ([[[NSBundle mainBundle] bundlePath] hasSuffix:@".appex"]) isAppExtension = YES; }); #pragma clang diagnostic push #pragma clang diagnostic ignored "-Wundeclared-selector" return isAppExtension ? nil : [UIApplication performSelector:@selector(sharedApplication)]; #pragma clang diagnostic pop } @implementation YYKVStorageItem @end @implementation YYKVStorage { dispatch_queue_t _trashQueue; NSString *_path; NSString *_dbPath; NSString *_dataPath; NSString *_trashPath; sqlite3 *_db; CFMutableDictionaryRef _dbStmtCache; NSTimeInterval _dbLastOpenErrorTime; NSUInteger _dbOpenErrorCount; } #pragma mark - db - (BOOL)_dbOpen { if (_db) return YES; int result = sqlite3_open(_dbPath.UTF8String, &_db); if (result == SQLITE_OK) { CFDictionaryKeyCallBacks keyCallbacks = kCFCopyStringDictionaryKeyCallBacks; CFDictionaryValueCallBacks valueCallbacks = {0}; _dbStmtCache = CFDictionaryCreateMutable(CFAllocatorGetDefault(), 0, &keyCallbacks, &valueCallbacks); _dbLastOpenErrorTime = 0; _dbOpenErrorCount = 0; return YES; } else { _db = NULL; if (_dbStmtCache) CFRelease(_dbStmtCache); _dbStmtCache = NULL; _dbLastOpenErrorTime = CACurrentMediaTime(); _dbOpenErrorCount++; if (_errorLogsEnabled) { NSLog(@"%s line:%d sqlite open failed (%d).", __FUNCTION__, __LINE__, result); } return NO; } } - (BOOL)_dbClose { if (!_db) return YES; int result = 0; BOOL retry = NO; BOOL stmtFinalized = NO; if (_dbStmtCache) CFRelease(_dbStmtCache); _dbStmtCache = NULL; do { retry = NO; result = sqlite3_close(_db); if (result == SQLITE_BUSY || result == SQLITE_LOCKED) { if (!stmtFinalized) { stmtFinalized = YES; sqlite3_stmt *stmt; while ((stmt = sqlite3_next_stmt(_db, nil)) != 0) { sqlite3_finalize(stmt); retry = YES; } } } else if (result != SQLITE_OK) { if (_errorLogsEnabled) { NSLog(@"%s line:%d sqlite close failed (%d).", __FUNCTION__, __LINE__, result); } } } while (retry); _db = NULL; return YES; } - (BOOL)_dbCheck { if (!_db) { if (_dbOpenErrorCount < kMaxErrorRetryCount && CACurrentMediaTime() - _dbLastOpenErrorTime > kMinRetryTimeInterval) { return [self _dbOpen] && [self _dbInitialize]; } else { return NO; } } return YES; } - (BOOL)_dbInitialize { NSString *sql = @"pragma journal_mode = wal; pragma synchronous = normal; create table if not exists manifest (key text, filename text, size integer, inline_data blob, modification_time integer, last_access_time integer, extended_data blob, primary key(key)); create index if not exists last_access_time_idx on manifest(last_access_time);"; return [self _dbExecute:sql]; } - (void)_dbCheckpoint { if (![self _dbCheck]) return; // Cause a checkpoint to occur, merge `sqlite-wal` file to `sqlite` file. sqlite3_wal_checkpoint(_db, NULL); } - (BOOL)_dbExecute:(NSString *)sql { if (sql.length == 0) return NO; if (![self _dbCheck]) return NO; char *error = NULL; int result = sqlite3_exec(_db, sql.UTF8String, NULL, NULL, &error); if (error) { if (_errorLogsEnabled) NSLog(@"%s line:%d sqlite exec error (%d): %s", __FUNCTION__, __LINE__, result, error); sqlite3_free(error); } return result == SQLITE_OK; } - (sqlite3_stmt *)_dbPrepareStmt:(NSString *)sql { if (![self _dbCheck] || sql.length == 0 || !_dbStmtCache) return NULL; sqlite3_stmt *stmt = (sqlite3_stmt *)CFDictionaryGetValue(_dbStmtCache, (__bridge const void *)(sql)); if (!stmt) { int result = sqlite3_prepare_v2(_db, sql.UTF8String, -1, &stmt, NULL); if (result != SQLITE_OK) { if (_errorLogsEnabled) NSLog(@"%s line:%d sqlite stmt prepare error (%d): %s", __FUNCTION__, __LINE__, result, sqlite3_errmsg(_db)); return NULL; } CFDictionarySetValue(_dbStmtCache, (__bridge const void *)(sql), stmt); } else { sqlite3_reset(stmt); } return stmt; } - (NSString *)_dbJoinedKeys:(NSArray *)keys { NSMutableString *string = [NSMutableString new]; for (NSUInteger i = 0,max = keys.count; i < max; i++) { [string appendString:@"?"]; if (i + 1 != max) { [string appendString:@","]; } } return string; } - (void)_dbBindJoinedKeys:(NSArray *)keys stmt:(sqlite3_stmt *)stmt fromIndex:(int)index{ for (int i = 0, max = (int)keys.count; i < max; i++) { NSString *key = keys[i]; sqlite3_bind_text(stmt, index + i, key.UTF8String, -1, NULL); } } - (BOOL)_dbSaveWithKey:(NSString *)key value:(NSData *)value fileName:(NSString *)fileName extendedData:(NSData *)extendedData { NSString *sql = @"insert or replace into manifest (key, filename, size, inline_data, modification_time, last_access_time, extended_data) values (?1, ?2, ?3, ?4, ?5, ?6, ?7);"; sqlite3_stmt *stmt = [self _dbPrepareStmt:sql]; if (!stmt) return NO; int timestamp = (int)time(NULL); sqlite3_bind_text(stmt, 1, key.UTF8String, -1, NULL); sqlite3_bind_text(stmt, 2, fileName.UTF8String, -1, NULL); sqlite3_bind_int(stmt, 3, (int)value.length); if (fileName.length == 0) { sqlite3_bind_blob(stmt, 4, value.bytes, (int)value.length, 0); } else { sqlite3_bind_blob(stmt, 4, NULL, 0, 0); } sqlite3_bind_int(stmt, 5, timestamp); sqlite3_bind_int(stmt, 6, timestamp); sqlite3_bind_blob(stmt, 7, extendedData.bytes, (int)extendedData.length, 0); int result = sqlite3_step(stmt); if (result != SQLITE_DONE) { if (_errorLogsEnabled) NSLog(@"%s line:%d sqlite insert error (%d): %s", __FUNCTION__, __LINE__, result, sqlite3_errmsg(_db)); return NO; } return YES; } - (BOOL)_dbUpdateAccessTimeWithKey:(NSString *)key { NSString *sql = @"update manifest set last_access_time = ?1 where key = ?2;"; sqlite3_stmt *stmt = [self _dbPrepareStmt:sql]; if (!stmt) return NO; sqlite3_bind_int(stmt, 1, (int)time(NULL)); sqlite3_bind_text(stmt, 2, key.UTF8String, -1, NULL); int result = sqlite3_step(stmt); if (result != SQLITE_DONE) { if (_errorLogsEnabled) NSLog(@"%s line:%d sqlite update error (%d): %s", __FUNCTION__, __LINE__, result, sqlite3_errmsg(_db)); return NO; } return YES; } - (BOOL)_dbUpdateAccessTimeWithKeys:(NSArray *)keys { if (![self _dbCheck]) return NO; int t = (int)time(NULL); NSString *sql = [NSString stringWithFormat:@"update manifest set last_access_time = %d where key in (%@);", t, [self _dbJoinedKeys:keys]]; sqlite3_stmt *stmt = NULL; int result = sqlite3_prepare_v2(_db, sql.UTF8String, -1, &stmt, NULL); if (result != SQLITE_OK) { if (_errorLogsEnabled) NSLog(@"%s line:%d sqlite stmt prepare error (%d): %s", __FUNCTION__, __LINE__, result, sqlite3_errmsg(_db)); return NO; } [self _dbBindJoinedKeys:keys stmt:stmt fromIndex:1]; result = sqlite3_step(stmt); sqlite3_finalize(stmt); if (result != SQLITE_DONE) { if (_errorLogsEnabled) NSLog(@"%s line:%d sqlite update error (%d): %s", __FUNCTION__, __LINE__, result, sqlite3_errmsg(_db)); return NO; } return YES; } - (BOOL)_dbDeleteItemWithKey:(NSString *)key { NSString *sql = @"delete from manifest where key = ?1;"; sqlite3_stmt *stmt = [self _dbPrepareStmt:sql]; if (!stmt) return NO; sqlite3_bind_text(stmt, 1, key.UTF8String, -1, NULL); int result = sqlite3_step(stmt); if (result != SQLITE_DONE) { if (_errorLogsEnabled) NSLog(@"%s line:%d db delete error (%d): %s", __FUNCTION__, __LINE__, result, sqlite3_errmsg(_db)); return NO; } return YES; } - (BOOL)_dbDeleteItemWithKeys:(NSArray *)keys { if (![self _dbCheck]) return NO; NSString *sql = [NSString stringWithFormat:@"delete from manifest where key in (%@);", [self _dbJoinedKeys:keys]]; sqlite3_stmt *stmt = NULL; int result = sqlite3_prepare_v2(_db, sql.UTF8String, -1, &stmt, NULL); if (result != SQLITE_OK) { if (_errorLogsEnabled) NSLog(@"%s line:%d sqlite stmt prepare error (%d): %s", __FUNCTION__, __LINE__, result, sqlite3_errmsg(_db)); return NO; } [self _dbBindJoinedKeys:keys stmt:stmt fromIndex:1]; result = sqlite3_step(stmt); sqlite3_finalize(stmt); if (result == SQLITE_ERROR) { if (_errorLogsEnabled) NSLog(@"%s line:%d sqlite delete error (%d): %s", __FUNCTION__, __LINE__, result, sqlite3_errmsg(_db)); return NO; } return YES; } - (BOOL)_dbDeleteItemsWithSizeLargerThan:(int)size { NSString *sql = @"delete from manifest where size > ?1;"; sqlite3_stmt *stmt = [self _dbPrepareStmt:sql]; if (!stmt) return NO; sqlite3_bind_int(stmt, 1, size); int result = sqlite3_step(stmt); if (result != SQLITE_DONE) { if (_errorLogsEnabled) NSLog(@"%s line:%d sqlite delete error (%d): %s", __FUNCTION__, __LINE__, result, sqlite3_errmsg(_db)); return NO; } return YES; } - (BOOL)_dbDeleteItemsWithTimeEarlierThan:(int)time { NSString *sql = @"delete from manifest where last_access_time < ?1;"; sqlite3_stmt *stmt = [self _dbPrepareStmt:sql]; if (!stmt) return NO; sqlite3_bind_int(stmt, 1, time); int result = sqlite3_step(stmt); if (result != SQLITE_DONE) { if (_errorLogsEnabled) NSLog(@"%s line:%d sqlite delete error (%d): %s", __FUNCTION__, __LINE__, result, sqlite3_errmsg(_db)); return NO; } return YES; } - (YYKVStorageItem *)_dbGetItemFromStmt:(sqlite3_stmt *)stmt excludeInlineData:(BOOL)excludeInlineData { int i = 0; char *key = (char *)sqlite3_column_text(stmt, i++); char *filename = (char *)sqlite3_column_text(stmt, i++); int size = sqlite3_column_int(stmt, i++); const void *inline_data = excludeInlineData ? NULL : sqlite3_column_blob(stmt, i); int inline_data_bytes = excludeInlineData ? 0 : sqlite3_column_bytes(stmt, i++); int modification_time = sqlite3_column_int(stmt, i++); int last_access_time = sqlite3_column_int(stmt, i++); const void *extended_data = sqlite3_column_blob(stmt, i); int extended_data_bytes = sqlite3_column_bytes(stmt, i++); YYKVStorageItem *item = [YYKVStorageItem new]; if (key) item.key = [NSString stringWithUTF8String:key]; if (filename && *filename != 0) item.filename = [NSString stringWithUTF8String:filename]; item.size = size; if (inline_data_bytes > 0 && inline_data) item.value = [NSData dataWithBytes:inline_data length:inline_data_bytes]; item.modTime = modification_time; item.accessTime = last_access_time; if (extended_data_bytes > 0 && extended_data) item.extendedData = [NSData dataWithBytes:extended_data length:extended_data_bytes]; return item; } - (YYKVStorageItem *)_dbGetItemWithKey:(NSString *)key excludeInlineData:(BOOL)excludeInlineData { NSString *sql = excludeInlineData ? @"select key, filename, size, modification_time, last_access_time, extended_data from manifest where key = ?1;" : @"select key, filename, size, inline_data, modification_time, last_access_time, extended_data from manifest where key = ?1;"; sqlite3_stmt *stmt = [self _dbPrepareStmt:sql]; if (!stmt) return nil; sqlite3_bind_text(stmt, 1, key.UTF8String, -1, NULL); YYKVStorageItem *item = nil; int result = sqlite3_step(stmt); if (result == SQLITE_ROW) { item = [self _dbGetItemFromStmt:stmt excludeInlineData:excludeInlineData]; } else { if (result != SQLITE_DONE) { if (_errorLogsEnabled) NSLog(@"%s line:%d sqlite query error (%d): %s", __FUNCTION__, __LINE__, result, sqlite3_errmsg(_db)); } } return item; } - (NSMutableArray *)_dbGetItemWithKeys:(NSArray *)keys excludeInlineData:(BOOL)excludeInlineData { if (![self _dbCheck]) return nil; NSString *sql; if (excludeInlineData) { sql = [NSString stringWithFormat:@"select key, filename, size, modification_time, last_access_time, extended_data from manifest where key in (%@);", [self _dbJoinedKeys:keys]]; } else { sql = [NSString stringWithFormat:@"select key, filename, size, inline_data, modification_time, last_access_time, extended_data from manifest where key in (%@)", [self _dbJoinedKeys:keys]]; } sqlite3_stmt *stmt = NULL; int result = sqlite3_prepare_v2(_db, sql.UTF8String, -1, &stmt, NULL); if (result != SQLITE_OK) { if (_errorLogsEnabled) NSLog(@"%s line:%d sqlite stmt prepare error (%d): %s", __FUNCTION__, __LINE__, result, sqlite3_errmsg(_db)); return nil; } [self _dbBindJoinedKeys:keys stmt:stmt fromIndex:1]; NSMutableArray *items = [NSMutableArray new]; do { result = sqlite3_step(stmt); if (result == SQLITE_ROW) { YYKVStorageItem *item = [self _dbGetItemFromStmt:stmt excludeInlineData:excludeInlineData]; if (item) [items addObject:item]; } else if (result == SQLITE_DONE) { break; } else { if (_errorLogsEnabled) NSLog(@"%s line:%d sqlite query error (%d): %s", __FUNCTION__, __LINE__, result, sqlite3_errmsg(_db)); items = nil; break; } } while (1); sqlite3_finalize(stmt); return items; } - (NSData *)_dbGetValueWithKey:(NSString *)key { NSString *sql = @"select inline_data from manifest where key = ?1;"; sqlite3_stmt *stmt = [self _dbPrepareStmt:sql]; if (!stmt) return nil; sqlite3_bind_text(stmt, 1, key.UTF8String, -1, NULL); int result = sqlite3_step(stmt); if (result == SQLITE_ROW) { const void *inline_data = sqlite3_column_blob(stmt, 0); int inline_data_bytes = sqlite3_column_bytes(stmt, 0); if (!inline_data || inline_data_bytes <= 0) return nil; return [NSData dataWithBytes:inline_data length:inline_data_bytes]; } else { if (result != SQLITE_DONE) { if (_errorLogsEnabled) NSLog(@"%s line:%d sqlite query error (%d): %s", __FUNCTION__, __LINE__, result, sqlite3_errmsg(_db)); } return nil; } } - (NSString *)_dbGetFilenameWithKey:(NSString *)key { NSString *sql = @"select filename from manifest where key = ?1;"; sqlite3_stmt *stmt = [self _dbPrepareStmt:sql]; if (!stmt) return nil; sqlite3_bind_text(stmt, 1, key.UTF8String, -1, NULL); int result = sqlite3_step(stmt); if (result == SQLITE_ROW) { char *filename = (char *)sqlite3_column_text(stmt, 0); if (filename && *filename != 0) { return [NSString stringWithUTF8String:filename]; } } else { if (result != SQLITE_DONE) { if (_errorLogsEnabled) NSLog(@"%s line:%d sqlite query error (%d): %s", __FUNCTION__, __LINE__, result, sqlite3_errmsg(_db)); } } return nil; } - (NSMutableArray *)_dbGetFilenameWithKeys:(NSArray *)keys { if (![self _dbCheck]) return nil; NSString *sql = [NSString stringWithFormat:@"select filename from manifest where key in (%@);", [self _dbJoinedKeys:keys]]; sqlite3_stmt *stmt = NULL; int result = sqlite3_prepare_v2(_db, sql.UTF8String, -1, &stmt, NULL); if (result != SQLITE_OK) { if (_errorLogsEnabled) NSLog(@"%s line:%d sqlite stmt prepare error (%d): %s", __FUNCTION__, __LINE__, result, sqlite3_errmsg(_db)); return nil; } [self _dbBindJoinedKeys:keys stmt:stmt fromIndex:1]; NSMutableArray *filenames = [NSMutableArray new]; do { result = sqlite3_step(stmt); if (result == SQLITE_ROW) { char *filename = (char *)sqlite3_column_text(stmt, 0); if (filename && *filename != 0) { NSString *name = [NSString stringWithUTF8String:filename]; if (name) [filenames addObject:name]; } } else if (result == SQLITE_DONE) { break; } else { if (_errorLogsEnabled) NSLog(@"%s line:%d sqlite query error (%d): %s", __FUNCTION__, __LINE__, result, sqlite3_errmsg(_db)); filenames = nil; break; } } while (1); sqlite3_finalize(stmt); return filenames; } - (NSMutableArray *)_dbGetFilenamesWithSizeLargerThan:(int)size { NSString *sql = @"select filename from manifest where size > ?1 and filename is not null;"; sqlite3_stmt *stmt = [self _dbPrepareStmt:sql]; if (!stmt) return nil; sqlite3_bind_int(stmt, 1, size); NSMutableArray *filenames = [NSMutableArray new]; do { int result = sqlite3_step(stmt); if (result == SQLITE_ROW) { char *filename = (char *)sqlite3_column_text(stmt, 0); if (filename && *filename != 0) { NSString *name = [NSString stringWithUTF8String:filename]; if (name) [filenames addObject:name]; } } else if (result == SQLITE_DONE) { break; } else { if (_errorLogsEnabled) NSLog(@"%s line:%d sqlite query error (%d): %s", __FUNCTION__, __LINE__, result, sqlite3_errmsg(_db)); filenames = nil; break; } } while (1); return filenames; } - (NSMutableArray *)_dbGetFilenamesWithTimeEarlierThan:(int)time { NSString *sql = @"select filename from manifest where last_access_time < ?1 and filename is not null;"; sqlite3_stmt *stmt = [self _dbPrepareStmt:sql]; if (!stmt) return nil; sqlite3_bind_int(stmt, 1, time); NSMutableArray *filenames = [NSMutableArray new]; do { int result = sqlite3_step(stmt); if (result == SQLITE_ROW) { char *filename = (char *)sqlite3_column_text(stmt, 0); if (filename && *filename != 0) { NSString *name = [NSString stringWithUTF8String:filename]; if (name) [filenames addObject:name]; } } else if (result == SQLITE_DONE) { break; } else { if (_errorLogsEnabled) NSLog(@"%s line:%d sqlite query error (%d): %s", __FUNCTION__, __LINE__, result, sqlite3_errmsg(_db)); filenames = nil; break; } } while (1); return filenames; } - (NSMutableArray *)_dbGetItemSizeInfoOrderByTimeAscWithLimit:(int)count { NSString *sql = @"select key, filename, size from manifest order by last_access_time asc limit ?1;"; sqlite3_stmt *stmt = [self _dbPrepareStmt:sql]; if (!stmt) return nil; sqlite3_bind_int(stmt, 1, count); NSMutableArray *items = [NSMutableArray new]; do { int result = sqlite3_step(stmt); if (result == SQLITE_ROW) { char *key = (char *)sqlite3_column_text(stmt, 0); char *filename = (char *)sqlite3_column_text(stmt, 1); int size = sqlite3_column_int(stmt, 2); NSString *keyStr = key ? [NSString stringWithUTF8String:key] : nil; if (keyStr) { YYKVStorageItem *item = [YYKVStorageItem new]; item.key = key ? [NSString stringWithUTF8String:key] : nil; item.filename = filename ? [NSString stringWithUTF8String:filename] : nil; item.size = size; [items addObject:item]; } } else if (result == SQLITE_DONE) { break; } else { if (_errorLogsEnabled) NSLog(@"%s line:%d sqlite query error (%d): %s", __FUNCTION__, __LINE__, result, sqlite3_errmsg(_db)); items = nil; break; } } while (1); return items; } - (int)_dbGetItemCountWithKey:(NSString *)key { NSString *sql = @"select count(key) from manifest where key = ?1;"; sqlite3_stmt *stmt = [self _dbPrepareStmt:sql]; if (!stmt) return -1; sqlite3_bind_text(stmt, 1, key.UTF8String, -1, NULL); int result = sqlite3_step(stmt); if (result != SQLITE_ROW) { if (_errorLogsEnabled) NSLog(@"%s line:%d sqlite query error (%d): %s", __FUNCTION__, __LINE__, result, sqlite3_errmsg(_db)); return -1; } return sqlite3_column_int(stmt, 0); } - (int)_dbGetTotalItemSize { NSString *sql = @"select sum(size) from manifest;"; sqlite3_stmt *stmt = [self _dbPrepareStmt:sql]; if (!stmt) return -1; int result = sqlite3_step(stmt); if (result != SQLITE_ROW) { if (_errorLogsEnabled) NSLog(@"%s line:%d sqlite query error (%d): %s", __FUNCTION__, __LINE__, result, sqlite3_errmsg(_db)); return -1; } return sqlite3_column_int(stmt, 0); } - (int)_dbGetTotalItemCount { NSString *sql = @"select count(*) from manifest;"; sqlite3_stmt *stmt = [self _dbPrepareStmt:sql]; if (!stmt) return -1; int result = sqlite3_step(stmt); if (result != SQLITE_ROW) { if (_errorLogsEnabled) NSLog(@"%s line:%d sqlite query error (%d): %s", __FUNCTION__, __LINE__, result, sqlite3_errmsg(_db)); return -1; } return sqlite3_column_int(stmt, 0); } #pragma mark - file - (BOOL)_fileWriteWithName:(NSString *)filename data:(NSData *)data { NSString *path = [_dataPath stringByAppendingPathComponent:filename]; return [data writeToFile:path atomically:NO]; } - (NSData *)_fileReadWithName:(NSString *)filename { NSString *path = [_dataPath stringByAppendingPathComponent:filename]; NSData *data = [NSData dataWithContentsOfFile:path]; return data; } - (BOOL)_fileDeleteWithName:(NSString *)filename { NSString *path = [_dataPath stringByAppendingPathComponent:filename]; return [[NSFileManager defaultManager] removeItemAtPath:path error:NULL]; } - (BOOL)_fileMoveAllToTrash { CFUUIDRef uuidRef = CFUUIDCreate(NULL); CFStringRef uuid = CFUUIDCreateString(NULL, uuidRef); CFRelease(uuidRef); NSString *tmpPath = [_trashPath stringByAppendingPathComponent:(__bridge NSString *)(uuid)]; BOOL suc = [[NSFileManager defaultManager] moveItemAtPath:_dataPath toPath:tmpPath error:nil]; if (suc) { suc = [[NSFileManager defaultManager] createDirectoryAtPath:_dataPath withIntermediateDirectories:YES attributes:nil error:NULL]; } CFRelease(uuid); return suc; } - (void)_fileEmptyTrashInBackground { NSString *trashPath = _trashPath; dispatch_queue_t queue = _trashQueue; dispatch_async(queue, ^{ NSFileManager *manager = [NSFileManager new]; NSArray *directoryContents = [manager contentsOfDirectoryAtPath:trashPath error:NULL]; for (NSString *path in directoryContents) { NSString *fullPath = [trashPath stringByAppendingPathComponent:path]; [manager removeItemAtPath:fullPath error:NULL]; } }); } #pragma mark - private /** Delete all files and empty in background. Make sure the db is closed. */ - (void)_reset { [[NSFileManager defaultManager] removeItemAtPath:[_path stringByAppendingPathComponent:kDBFileName] error:nil]; [[NSFileManager defaultManager] removeItemAtPath:[_path stringByAppendingPathComponent:kDBShmFileName] error:nil]; [[NSFileManager defaultManager] removeItemAtPath:[_path stringByAppendingPathComponent:kDBWalFileName] error:nil]; [self _fileMoveAllToTrash]; [self _fileEmptyTrashInBackground]; } #pragma mark - public - (instancetype)init { @throw [NSException exceptionWithName:@"YYKVStorage init error" reason:@"Please use the designated initializer and pass the 'path' and 'type'." userInfo:nil]; return [self initWithPath:@"" type:YYKVStorageTypeFile]; } - (instancetype)initWithPath:(NSString *)path type:(YYKVStorageType)type { if (path.length == 0 || path.length > kPathLengthMax) { NSLog(@"YYKVStorage init error: invalid path: [%@].", path); return nil; } if (type > YYKVStorageTypeMixed) { NSLog(@"YYKVStorage init error: invalid type: %lu.", (unsigned long)type); return nil; } self = [super init]; _path = path.copy; _type = type; _dataPath = [path stringByAppendingPathComponent:kDataDirectoryName]; _trashPath = [path stringByAppendingPathComponent:kTrashDirectoryName]; _trashQueue = dispatch_queue_create("com.ibireme.cache.disk.trash", DISPATCH_QUEUE_SERIAL); _dbPath = [path stringByAppendingPathComponent:kDBFileName]; _errorLogsEnabled = YES; NSError *error = nil; if (![[NSFileManager defaultManager] createDirectoryAtPath:path withIntermediateDirectories:YES attributes:nil error:&error] || ![[NSFileManager defaultManager] createDirectoryAtPath:[path stringByAppendingPathComponent:kDataDirectoryName] withIntermediateDirectories:YES attributes:nil error:&error] || ![[NSFileManager defaultManager] createDirectoryAtPath:[path stringByAppendingPathComponent:kTrashDirectoryName] withIntermediateDirectories:YES attributes:nil error:&error]) { NSLog(@"YYKVStorage init error:%@", error); return nil; } if (![self _dbOpen] || ![self _dbInitialize]) { // db file may broken... [self _dbClose]; [self _reset]; // rebuild if (![self _dbOpen] || ![self _dbInitialize]) { [self _dbClose]; NSLog(@"YYKVStorage init error: fail to open sqlite db."); return nil; } } [self _fileEmptyTrashInBackground]; // empty the trash if failed at last time return self; } - (void)dealloc { UIBackgroundTaskIdentifier taskID = [_YYSharedApplication() beginBackgroundTaskWithExpirationHandler:^{}]; [self _dbClose]; if (taskID != UIBackgroundTaskInvalid) { [_YYSharedApplication() endBackgroundTask:taskID]; } } - (BOOL)saveItem:(YYKVStorageItem *)item { return [self saveItemWithKey:item.key value:item.value filename:item.filename extendedData:item.extendedData]; } - (BOOL)saveItemWithKey:(NSString *)key value:(NSData *)value { return [self saveItemWithKey:key value:value filename:nil extendedData:nil]; } - (BOOL)saveItemWithKey:(NSString *)key value:(NSData *)value filename:(NSString *)filename extendedData:(NSData *)extendedData { if (key.length == 0 || value.length == 0) return NO; if (_type == YYKVStorageTypeFile && filename.length == 0) { return NO; } if (filename.length) { if (![self _fileWriteWithName:filename data:value]) { return NO; } if (![self _dbSaveWithKey:key value:value fileName:filename extendedData:extendedData]) { [self _fileDeleteWithName:filename]; return NO; } return YES; } else { if (_type != YYKVStorageTypeSQLite) { NSString *filename = [self _dbGetFilenameWithKey:key]; if (filename) { [self _fileDeleteWithName:filename]; } } return [self _dbSaveWithKey:key value:value fileName:nil extendedData:extendedData]; } } - (BOOL)removeItemForKey:(NSString *)key { if (key.length == 0) return NO; switch (_type) { case YYKVStorageTypeSQLite: { return [self _dbDeleteItemWithKey:key]; } break; case YYKVStorageTypeFile: case YYKVStorageTypeMixed: { NSString *filename = [self _dbGetFilenameWithKey:key]; if (filename) { [self _fileDeleteWithName:filename]; } return [self _dbDeleteItemWithKey:key]; } break; default: return NO; } } - (BOOL)removeItemForKeys:(NSArray *)keys { if (keys.count == 0) return NO; switch (_type) { case YYKVStorageTypeSQLite: { return [self _dbDeleteItemWithKeys:keys]; } break; case YYKVStorageTypeFile: case YYKVStorageTypeMixed: { NSArray *filenames = [self _dbGetFilenameWithKeys:keys]; for (NSString *filename in filenames) { [self _fileDeleteWithName:filename]; } return [self _dbDeleteItemWithKeys:keys]; } break; default: return NO; } } - (BOOL)removeItemsLargerThanSize:(int)size { if (size == INT_MAX) return YES; if (size <= 0) return [self removeAllItems]; switch (_type) { case YYKVStorageTypeSQLite: { if ([self _dbDeleteItemsWithSizeLargerThan:size]) { [self _dbCheckpoint]; return YES; } } break; case YYKVStorageTypeFile: case YYKVStorageTypeMixed: { NSArray *filenames = [self _dbGetFilenamesWithSizeLargerThan:size]; for (NSString *name in filenames) { [self _fileDeleteWithName:name]; } if ([self _dbDeleteItemsWithSizeLargerThan:size]) { [self _dbCheckpoint]; return YES; } } break; } return NO; } - (BOOL)removeItemsEarlierThanTime:(int)time { if (time <= 0) return YES; if (time == INT_MAX) return [self removeAllItems]; switch (_type) { case YYKVStorageTypeSQLite: { if ([self _dbDeleteItemsWithTimeEarlierThan:time]) { [self _dbCheckpoint]; return YES; } } break; case YYKVStorageTypeFile: case YYKVStorageTypeMixed: { NSArray *filenames = [self _dbGetFilenamesWithTimeEarlierThan:time]; for (NSString *name in filenames) { [self _fileDeleteWithName:name]; } if ([self _dbDeleteItemsWithTimeEarlierThan:time]) { [self _dbCheckpoint]; return YES; } } break; } return NO; } - (BOOL)removeItemsToFitSize:(int)maxSize { if (maxSize == INT_MAX) return YES; if (maxSize <= 0) return [self removeAllItems]; int total = [self _dbGetTotalItemSize]; if (total < 0) return NO; if (total <= maxSize) return YES; NSArray *items = nil; BOOL suc = NO; do { int perCount = 16; items = [self _dbGetItemSizeInfoOrderByTimeAscWithLimit:perCount]; for (YYKVStorageItem *item in items) { if (total > maxSize) { if (item.filename) { [self _fileDeleteWithName:item.filename]; } suc = [self _dbDeleteItemWithKey:item.key]; total -= item.size; } else { break; } if (!suc) break; } } while (total > maxSize && items.count > 0 && suc); if (suc) [self _dbCheckpoint]; return suc; } - (BOOL)removeItemsToFitCount:(int)maxCount { if (maxCount == INT_MAX) return YES; if (maxCount <= 0) return [self removeAllItems]; int total = [self _dbGetTotalItemCount]; if (total < 0) return NO; if (total <= maxCount) return YES; NSArray *items = nil; BOOL suc = NO; do { int perCount = 16; items = [self _dbGetItemSizeInfoOrderByTimeAscWithLimit:perCount]; for (YYKVStorageItem *item in items) { if (total > maxCount) { if (item.filename) { [self _fileDeleteWithName:item.filename]; } suc = [self _dbDeleteItemWithKey:item.key]; total--; } else { break; } if (!suc) break; } } while (total > maxCount && items.count > 0 && suc); if (suc) [self _dbCheckpoint]; return suc; } - (BOOL)removeAllItems { if (![self _dbClose]) return NO; [self _reset]; if (![self _dbOpen]) return NO; if (![self _dbInitialize]) return NO; return YES; } - (void)removeAllItemsWithProgressBlock:(void(^)(int removedCount, int totalCount))progress endBlock:(void(^)(BOOL error))end { int total = [self _dbGetTotalItemCount]; if (total <= 0) { if (end) end(total < 0); } else { int left = total; int perCount = 32; NSArray *items = nil; BOOL suc = NO; do { items = [self _dbGetItemSizeInfoOrderByTimeAscWithLimit:perCount]; for (YYKVStorageItem *item in items) { if (left > 0) { if (item.filename) { [self _fileDeleteWithName:item.filename]; } suc = [self _dbDeleteItemWithKey:item.key]; left--; } else { break; } if (!suc) break; } if (progress) progress(total - left, total); } while (left > 0 && items.count > 0 && suc); if (suc) [self _dbCheckpoint]; if (end) end(!suc); } } - (YYKVStorageItem *)getItemForKey:(NSString *)key { if (key.length == 0) return nil; YYKVStorageItem *item = [self _dbGetItemWithKey:key excludeInlineData:NO]; if (item) { [self _dbUpdateAccessTimeWithKey:key]; if (item.filename) { item.value = [self _fileReadWithName:item.filename]; if (!item.value) { [self _dbDeleteItemWithKey:key]; item = nil; } } } return item; } - (YYKVStorageItem *)getItemInfoForKey:(NSString *)key { if (key.length == 0) return nil; YYKVStorageItem *item = [self _dbGetItemWithKey:key excludeInlineData:YES]; return item; } - (NSData *)getItemValueForKey:(NSString *)key { if (key.length == 0) return nil; NSData *value = nil; switch (_type) { case YYKVStorageTypeFile: { NSString *filename = [self _dbGetFilenameWithKey:key]; if (filename) { value = [self _fileReadWithName:filename]; if (!value) { [self _dbDeleteItemWithKey:key]; value = nil; } } } break; case YYKVStorageTypeSQLite: { value = [self _dbGetValueWithKey:key]; } break; case YYKVStorageTypeMixed: { NSString *filename = [self _dbGetFilenameWithKey:key]; if (filename) { value = [self _fileReadWithName:filename]; if (!value) { [self _dbDeleteItemWithKey:key]; value = nil; } } else { value = [self _dbGetValueWithKey:key]; } } break; } if (value) { [self _dbUpdateAccessTimeWithKey:key]; } return value; } - (NSArray *)getItemForKeys:(NSArray *)keys { if (keys.count == 0) return nil; NSMutableArray *items = [self _dbGetItemWithKeys:keys excludeInlineData:NO]; if (_type != YYKVStorageTypeSQLite) { for (NSInteger i = 0, max = items.count; i < max; i++) { YYKVStorageItem *item = items[i]; if (item.filename) { item.value = [self _fileReadWithName:item.filename]; if (!item.value) { if (item.key) [self _dbDeleteItemWithKey:item.key]; [items removeObjectAtIndex:i]; i--; max--; } } } } if (items.count > 0) { [self _dbUpdateAccessTimeWithKeys:keys]; } return items.count ? items : nil; } - (NSArray *)getItemInfoForKeys:(NSArray *)keys { if (keys.count == 0) return nil; return [self _dbGetItemWithKeys:keys excludeInlineData:YES]; } - (NSDictionary *)getItemValueForKeys:(NSArray *)keys { NSMutableArray *items = (NSMutableArray *)[self getItemForKeys:keys]; NSMutableDictionary *kv = [NSMutableDictionary new]; for (YYKVStorageItem *item in items) { if (item.key && item.value) { [kv setObject:item.value forKey:item.key]; } } return kv.count ? kv : nil; } - (BOOL)itemExistsForKey:(NSString *)key { if (key.length == 0) return NO; return [self _dbGetItemCountWithKey:key] > 0; } - (int)getItemsCount { return [self _dbGetTotalItemCount]; } - (int)getItemsSize { return [self _dbGetTotalItemSize]; } @end ================================================ FILE: MFPictureBrowserDemo/Pods/YYCache/YYCache/YYMemoryCache.h ================================================ // // YYMemoryCache.h // YYCache // // Created by ibireme on 15/2/7. // Copyright (c) 2015 ibireme. // // This source code is licensed under the MIT-style license found in the // LICENSE file in the root directory of this source tree. // #import NS_ASSUME_NONNULL_BEGIN /** YYMemoryCache is a fast in-memory cache that stores key-value pairs. In contrast to NSDictionary, keys are retained and not copied. The API and performance is similar to `NSCache`, all methods are thread-safe. YYMemoryCache objects differ from NSCache in a few ways: * It uses LRU (least-recently-used) to remove objects; NSCache's eviction method is non-deterministic. * It can be controlled by cost, count and age; NSCache's limits are imprecise. * It can be configured to automatically evict objects when receive memory warning or app enter background. The time of `Access Methods` in YYMemoryCache is typically in constant time (O(1)). */ @interface YYMemoryCache : NSObject #pragma mark - Attribute ///============================================================================= /// @name Attribute ///============================================================================= /** The name of the cache. Default is nil. */ @property (nullable, copy) NSString *name; /** The number of objects in the cache (read-only) */ @property (readonly) NSUInteger totalCount; /** The total cost of objects in the cache (read-only). */ @property (readonly) NSUInteger totalCost; #pragma mark - Limit ///============================================================================= /// @name Limit ///============================================================================= /** The maximum number of objects the cache should hold. @discussion The default value is NSUIntegerMax, which means no limit. This is not a strict limit—if the cache goes over the limit, some objects in the cache could be evicted later in backgound thread. */ @property NSUInteger countLimit; /** The maximum total cost that the cache can hold before it starts evicting objects. @discussion The default value is NSUIntegerMax, which means no limit. This is not a strict limit—if the cache goes over the limit, some objects in the cache could be evicted later in backgound thread. */ @property NSUInteger costLimit; /** The maximum expiry time of objects in cache. @discussion The default value is DBL_MAX, which means no limit. This is not a strict limit—if an object goes over the limit, the object could be evicted later in backgound thread. */ @property NSTimeInterval ageLimit; /** The auto trim check time interval in seconds. Default is 5.0. @discussion The cache holds an internal timer to check whether the cache reaches its limits, and if the limit is reached, it begins to evict objects. */ @property NSTimeInterval autoTrimInterval; /** If `YES`, the cache will remove all objects when the app receives a memory warning. The default value is `YES`. */ @property BOOL shouldRemoveAllObjectsOnMemoryWarning; /** If `YES`, The cache will remove all objects when the app enter background. The default value is `YES`. */ @property BOOL shouldRemoveAllObjectsWhenEnteringBackground; /** A block to be executed when the app receives a memory warning. The default value is nil. */ @property (nullable, copy) void(^didReceiveMemoryWarningBlock)(YYMemoryCache *cache); /** A block to be executed when the app enter background. The default value is nil. */ @property (nullable, copy) void(^didEnterBackgroundBlock)(YYMemoryCache *cache); /** If `YES`, the key-value pair will be released on main thread, otherwise on background thread. Default is NO. @discussion You may set this value to `YES` if the key-value object contains the instance which should be released in main thread (such as UIView/CALayer). */ @property BOOL releaseOnMainThread; /** If `YES`, the key-value pair will be released asynchronously to avoid blocking the access methods, otherwise it will be released in the access method (such as removeObjectForKey:). Default is YES. */ @property BOOL releaseAsynchronously; #pragma mark - Access Methods ///============================================================================= /// @name Access Methods ///============================================================================= /** Returns a Boolean value that indicates whether a given key is in cache. @param key An object identifying the value. If nil, just return `NO`. @return Whether the key is in cache. */ - (BOOL)containsObjectForKey:(id)key; /** Returns the value associated with a given key. @param key An object identifying the value. If nil, just return nil. @return The value associated with key, or nil if no value is associated with key. */ - (nullable id)objectForKey:(id)key; /** Sets the value of the specified key in the cache (0 cost). @param object The object to be stored in the cache. If nil, it calls `removeObjectForKey:`. @param key The key with which to associate the value. If nil, this method has no effect. @discussion Unlike an NSMutableDictionary object, a cache does not copy the key objects that are put into it. */ - (void)setObject:(nullable id)object forKey:(id)key; /** Sets the value of the specified key in the cache, and associates the key-value pair with the specified cost. @param object The object to store in the cache. If nil, it calls `removeObjectForKey`. @param key The key with which to associate the value. If nil, this method has no effect. @param cost The cost with which to associate the key-value pair. @discussion Unlike an NSMutableDictionary object, a cache does not copy the key objects that are put into it. */ - (void)setObject:(nullable id)object forKey:(id)key withCost:(NSUInteger)cost; /** Removes the value of the specified key in the cache. @param key The key identifying the value to be removed. If nil, this method has no effect. */ - (void)removeObjectForKey:(id)key; /** Empties the cache immediately. */ - (void)removeAllObjects; #pragma mark - Trim ///============================================================================= /// @name Trim ///============================================================================= /** Removes objects from the cache with LRU, until the `totalCount` is below or equal to the specified value. @param count The total count allowed to remain after the cache has been trimmed. */ - (void)trimToCount:(NSUInteger)count; /** Removes objects from the cache with LRU, until the `totalCost` is or equal to the specified value. @param cost The total cost allowed to remain after the cache has been trimmed. */ - (void)trimToCost:(NSUInteger)cost; /** Removes objects from the cache with LRU, until all expiry objects removed by the specified value. @param age The maximum age (in seconds) of objects. */ - (void)trimToAge:(NSTimeInterval)age; @end NS_ASSUME_NONNULL_END ================================================ FILE: MFPictureBrowserDemo/Pods/YYCache/YYCache/YYMemoryCache.m ================================================ // // YYMemoryCache.m // YYCache // // Created by ibireme on 15/2/7. // Copyright (c) 2015 ibireme. // // This source code is licensed under the MIT-style license found in the // LICENSE file in the root directory of this source tree. // #import "YYMemoryCache.h" #import #import #import #import static inline dispatch_queue_t YYMemoryCacheGetReleaseQueue() { return dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_LOW, 0); } /** A node in linked map. Typically, you should not use this class directly. */ @interface _YYLinkedMapNode : NSObject { @package __unsafe_unretained _YYLinkedMapNode *_prev; // retained by dic __unsafe_unretained _YYLinkedMapNode *_next; // retained by dic id _key; id _value; NSUInteger _cost; NSTimeInterval _time; } @end @implementation _YYLinkedMapNode @end /** A linked map used by YYMemoryCache. It's not thread-safe and does not validate the parameters. Typically, you should not use this class directly. */ @interface _YYLinkedMap : NSObject { @package CFMutableDictionaryRef _dic; // do not set object directly NSUInteger _totalCost; NSUInteger _totalCount; _YYLinkedMapNode *_head; // MRU, do not change it directly _YYLinkedMapNode *_tail; // LRU, do not change it directly BOOL _releaseOnMainThread; BOOL _releaseAsynchronously; } /// Insert a node at head and update the total cost. /// Node and node.key should not be nil. - (void)insertNodeAtHead:(_YYLinkedMapNode *)node; /// Bring a inner node to header. /// Node should already inside the dic. - (void)bringNodeToHead:(_YYLinkedMapNode *)node; /// Remove a inner node and update the total cost. /// Node should already inside the dic. - (void)removeNode:(_YYLinkedMapNode *)node; /// Remove tail node if exist. - (_YYLinkedMapNode *)removeTailNode; /// Remove all node in background queue. - (void)removeAll; @end @implementation _YYLinkedMap - (instancetype)init { self = [super init]; _dic = CFDictionaryCreateMutable(CFAllocatorGetDefault(), 0, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks); _releaseOnMainThread = NO; _releaseAsynchronously = YES; return self; } - (void)dealloc { CFRelease(_dic); } - (void)insertNodeAtHead:(_YYLinkedMapNode *)node { CFDictionarySetValue(_dic, (__bridge const void *)(node->_key), (__bridge const void *)(node)); _totalCost += node->_cost; _totalCount++; if (_head) { node->_next = _head; _head->_prev = node; _head = node; } else { _head = _tail = node; } } - (void)bringNodeToHead:(_YYLinkedMapNode *)node { if (_head == node) return; if (_tail == node) { _tail = node->_prev; _tail->_next = nil; } else { node->_next->_prev = node->_prev; node->_prev->_next = node->_next; } node->_next = _head; node->_prev = nil; _head->_prev = node; _head = node; } - (void)removeNode:(_YYLinkedMapNode *)node { CFDictionaryRemoveValue(_dic, (__bridge const void *)(node->_key)); _totalCost -= node->_cost; _totalCount--; if (node->_next) node->_next->_prev = node->_prev; if (node->_prev) node->_prev->_next = node->_next; if (_head == node) _head = node->_next; if (_tail == node) _tail = node->_prev; } - (_YYLinkedMapNode *)removeTailNode { if (!_tail) return nil; _YYLinkedMapNode *tail = _tail; CFDictionaryRemoveValue(_dic, (__bridge const void *)(_tail->_key)); _totalCost -= _tail->_cost; _totalCount--; if (_head == _tail) { _head = _tail = nil; } else { _tail = _tail->_prev; _tail->_next = nil; } return tail; } - (void)removeAll { _totalCost = 0; _totalCount = 0; _head = nil; _tail = nil; if (CFDictionaryGetCount(_dic) > 0) { CFMutableDictionaryRef holder = _dic; _dic = CFDictionaryCreateMutable(CFAllocatorGetDefault(), 0, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks); if (_releaseAsynchronously) { dispatch_queue_t queue = _releaseOnMainThread ? dispatch_get_main_queue() : YYMemoryCacheGetReleaseQueue(); dispatch_async(queue, ^{ CFRelease(holder); // hold and release in specified queue }); } else if (_releaseOnMainThread && !pthread_main_np()) { dispatch_async(dispatch_get_main_queue(), ^{ CFRelease(holder); // hold and release in specified queue }); } else { CFRelease(holder); } } } @end @implementation YYMemoryCache { pthread_mutex_t _lock; _YYLinkedMap *_lru; dispatch_queue_t _queue; } - (void)_trimRecursively { __weak typeof(self) _self = self; dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(_autoTrimInterval * NSEC_PER_SEC)), dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_LOW, 0), ^{ __strong typeof(_self) self = _self; if (!self) return; [self _trimInBackground]; [self _trimRecursively]; }); } - (void)_trimInBackground { dispatch_async(_queue, ^{ [self _trimToCost:self->_costLimit]; [self _trimToCount:self->_countLimit]; [self _trimToAge:self->_ageLimit]; }); } - (void)_trimToCost:(NSUInteger)costLimit { BOOL finish = NO; pthread_mutex_lock(&_lock); if (costLimit == 0) { [_lru removeAll]; finish = YES; } else if (_lru->_totalCost <= costLimit) { finish = YES; } pthread_mutex_unlock(&_lock); if (finish) return; NSMutableArray *holder = [NSMutableArray new]; while (!finish) { if (pthread_mutex_trylock(&_lock) == 0) { if (_lru->_totalCost > costLimit) { _YYLinkedMapNode *node = [_lru removeTailNode]; if (node) [holder addObject:node]; } else { finish = YES; } pthread_mutex_unlock(&_lock); } else { usleep(10 * 1000); //10 ms } } if (holder.count) { dispatch_queue_t queue = _lru->_releaseOnMainThread ? dispatch_get_main_queue() : YYMemoryCacheGetReleaseQueue(); dispatch_async(queue, ^{ [holder count]; // release in queue }); } } - (void)_trimToCount:(NSUInteger)countLimit { BOOL finish = NO; pthread_mutex_lock(&_lock); if (countLimit == 0) { [_lru removeAll]; finish = YES; } else if (_lru->_totalCount <= countLimit) { finish = YES; } pthread_mutex_unlock(&_lock); if (finish) return; NSMutableArray *holder = [NSMutableArray new]; while (!finish) { if (pthread_mutex_trylock(&_lock) == 0) { if (_lru->_totalCount > countLimit) { _YYLinkedMapNode *node = [_lru removeTailNode]; if (node) [holder addObject:node]; } else { finish = YES; } pthread_mutex_unlock(&_lock); } else { usleep(10 * 1000); //10 ms } } if (holder.count) { dispatch_queue_t queue = _lru->_releaseOnMainThread ? dispatch_get_main_queue() : YYMemoryCacheGetReleaseQueue(); dispatch_async(queue, ^{ [holder count]; // release in queue }); } } - (void)_trimToAge:(NSTimeInterval)ageLimit { BOOL finish = NO; NSTimeInterval now = CACurrentMediaTime(); pthread_mutex_lock(&_lock); if (ageLimit <= 0) { [_lru removeAll]; finish = YES; } else if (!_lru->_tail || (now - _lru->_tail->_time) <= ageLimit) { finish = YES; } pthread_mutex_unlock(&_lock); if (finish) return; NSMutableArray *holder = [NSMutableArray new]; while (!finish) { if (pthread_mutex_trylock(&_lock) == 0) { if (_lru->_tail && (now - _lru->_tail->_time) > ageLimit) { _YYLinkedMapNode *node = [_lru removeTailNode]; if (node) [holder addObject:node]; } else { finish = YES; } pthread_mutex_unlock(&_lock); } else { usleep(10 * 1000); //10 ms } } if (holder.count) { dispatch_queue_t queue = _lru->_releaseOnMainThread ? dispatch_get_main_queue() : YYMemoryCacheGetReleaseQueue(); dispatch_async(queue, ^{ [holder count]; // release in queue }); } } - (void)_appDidReceiveMemoryWarningNotification { if (self.didReceiveMemoryWarningBlock) { self.didReceiveMemoryWarningBlock(self); } if (self.shouldRemoveAllObjectsOnMemoryWarning) { [self removeAllObjects]; } } - (void)_appDidEnterBackgroundNotification { if (self.didEnterBackgroundBlock) { self.didEnterBackgroundBlock(self); } if (self.shouldRemoveAllObjectsWhenEnteringBackground) { [self removeAllObjects]; } } #pragma mark - public - (instancetype)init { self = super.init; pthread_mutex_init(&_lock, NULL); _lru = [_YYLinkedMap new]; _queue = dispatch_queue_create("com.ibireme.cache.memory", DISPATCH_QUEUE_SERIAL); _countLimit = NSUIntegerMax; _costLimit = NSUIntegerMax; _ageLimit = DBL_MAX; _autoTrimInterval = 5.0; _shouldRemoveAllObjectsOnMemoryWarning = YES; _shouldRemoveAllObjectsWhenEnteringBackground = YES; [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(_appDidReceiveMemoryWarningNotification) name:UIApplicationDidReceiveMemoryWarningNotification object:nil]; [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(_appDidEnterBackgroundNotification) name:UIApplicationDidEnterBackgroundNotification object:nil]; [self _trimRecursively]; return self; } - (void)dealloc { [[NSNotificationCenter defaultCenter] removeObserver:self name:UIApplicationDidReceiveMemoryWarningNotification object:nil]; [[NSNotificationCenter defaultCenter] removeObserver:self name:UIApplicationDidEnterBackgroundNotification object:nil]; [_lru removeAll]; pthread_mutex_destroy(&_lock); } - (NSUInteger)totalCount { pthread_mutex_lock(&_lock); NSUInteger count = _lru->_totalCount; pthread_mutex_unlock(&_lock); return count; } - (NSUInteger)totalCost { pthread_mutex_lock(&_lock); NSUInteger totalCost = _lru->_totalCost; pthread_mutex_unlock(&_lock); return totalCost; } - (BOOL)releaseOnMainThread { pthread_mutex_lock(&_lock); BOOL releaseOnMainThread = _lru->_releaseOnMainThread; pthread_mutex_unlock(&_lock); return releaseOnMainThread; } - (void)setReleaseOnMainThread:(BOOL)releaseOnMainThread { pthread_mutex_lock(&_lock); _lru->_releaseOnMainThread = releaseOnMainThread; pthread_mutex_unlock(&_lock); } - (BOOL)releaseAsynchronously { pthread_mutex_lock(&_lock); BOOL releaseAsynchronously = _lru->_releaseAsynchronously; pthread_mutex_unlock(&_lock); return releaseAsynchronously; } - (void)setReleaseAsynchronously:(BOOL)releaseAsynchronously { pthread_mutex_lock(&_lock); _lru->_releaseAsynchronously = releaseAsynchronously; pthread_mutex_unlock(&_lock); } - (BOOL)containsObjectForKey:(id)key { if (!key) return NO; pthread_mutex_lock(&_lock); BOOL contains = CFDictionaryContainsKey(_lru->_dic, (__bridge const void *)(key)); pthread_mutex_unlock(&_lock); return contains; } - (id)objectForKey:(id)key { if (!key) return nil; pthread_mutex_lock(&_lock); _YYLinkedMapNode *node = CFDictionaryGetValue(_lru->_dic, (__bridge const void *)(key)); if (node) { node->_time = CACurrentMediaTime(); [_lru bringNodeToHead:node]; } pthread_mutex_unlock(&_lock); return node ? node->_value : nil; } - (void)setObject:(id)object forKey:(id)key { [self setObject:object forKey:key withCost:0]; } - (void)setObject:(id)object forKey:(id)key withCost:(NSUInteger)cost { if (!key) return; if (!object) { [self removeObjectForKey:key]; return; } pthread_mutex_lock(&_lock); _YYLinkedMapNode *node = CFDictionaryGetValue(_lru->_dic, (__bridge const void *)(key)); NSTimeInterval now = CACurrentMediaTime(); if (node) { _lru->_totalCost -= node->_cost; _lru->_totalCost += cost; node->_cost = cost; node->_time = now; node->_value = object; [_lru bringNodeToHead:node]; } else { node = [_YYLinkedMapNode new]; node->_cost = cost; node->_time = now; node->_key = key; node->_value = object; [_lru insertNodeAtHead:node]; } if (_lru->_totalCost > _costLimit) { dispatch_async(_queue, ^{ [self trimToCost:_costLimit]; }); } if (_lru->_totalCount > _countLimit) { _YYLinkedMapNode *node = [_lru removeTailNode]; if (_lru->_releaseAsynchronously) { dispatch_queue_t queue = _lru->_releaseOnMainThread ? dispatch_get_main_queue() : YYMemoryCacheGetReleaseQueue(); dispatch_async(queue, ^{ [node class]; //hold and release in queue }); } else if (_lru->_releaseOnMainThread && !pthread_main_np()) { dispatch_async(dispatch_get_main_queue(), ^{ [node class]; //hold and release in queue }); } } pthread_mutex_unlock(&_lock); } - (void)removeObjectForKey:(id)key { if (!key) return; pthread_mutex_lock(&_lock); _YYLinkedMapNode *node = CFDictionaryGetValue(_lru->_dic, (__bridge const void *)(key)); if (node) { [_lru removeNode:node]; if (_lru->_releaseAsynchronously) { dispatch_queue_t queue = _lru->_releaseOnMainThread ? dispatch_get_main_queue() : YYMemoryCacheGetReleaseQueue(); dispatch_async(queue, ^{ [node class]; //hold and release in queue }); } else if (_lru->_releaseOnMainThread && !pthread_main_np()) { dispatch_async(dispatch_get_main_queue(), ^{ [node class]; //hold and release in queue }); } } pthread_mutex_unlock(&_lock); } - (void)removeAllObjects { pthread_mutex_lock(&_lock); [_lru removeAll]; pthread_mutex_unlock(&_lock); } - (void)trimToCount:(NSUInteger)count { if (count == 0) { [self removeAllObjects]; return; } [self _trimToCount:count]; } - (void)trimToCost:(NSUInteger)cost { [self _trimToCost:cost]; } - (void)trimToAge:(NSTimeInterval)age { [self _trimToAge:age]; } - (NSString *)description { if (_name) return [NSString stringWithFormat:@"<%@: %p> (%@)", self.class, self, _name]; else return [NSString stringWithFormat:@"<%@: %p>", self.class, self]; } @end ================================================ FILE: MFPictureBrowserDemo/Pods/YYImage/LICENSE ================================================ The MIT License (MIT) Copyright (c) 2015 ibireme Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ================================================ FILE: MFPictureBrowserDemo/Pods/YYImage/README.md ================================================ YYImage ============== [![License MIT](https://img.shields.io/badge/license-MIT-green.svg?style=flat)](https://raw.githubusercontent.com/ibireme/YYImage/master/LICENSE)  [![Carthage compatible](https://img.shields.io/badge/Carthage-compatible-4BC51D.svg?style=flat)](https://github.com/Carthage/Carthage)  [![CocoaPods](http://img.shields.io/cocoapods/v/YYImage.svg?style=flat)](http://cocoapods.org/?q= YYImage)  [![CocoaPods](http://img.shields.io/cocoapods/p/YYImage.svg?style=flat)](http://cocoapods.org/?q= YYImage)  [![Support](https://img.shields.io/badge/support-iOS%206%2B%20-blue.svg?style=flat)](https://www.apple.com/nl/ios/)  [![Build Status](https://travis-ci.org/ibireme/YYImage.svg?branch=master)](https://travis-ci.org/ibireme/YYImage) Image framework for iOS to display/encode/decode animated WebP, APNG, GIF, and more.
(It's a component of [YYKit](https://github.com/ibireme/YYKit)) ![niconiconi~](https://raw.github.com/ibireme/YYImage/master/Demo/YYImageDemo/niconiconi@2x.gif ) Features ============== - Display/encode/decode animated image with these types:
    WebP, APNG, GIF. - Display/encode/decode still image with these types:
    WebP, PNG, GIF, JPEG, JP2, TIFF, BMP, ICO, ICNS. - Baseline/progressive/interlaced image decode with these types:
    PNG, GIF, JPEG, BMP. - Display frame based image animation and sprite sheet animation. - Dynamic memory buffer for lower memory usage. - Fully compatible with UIImage and UIImageView class. - Extendable protocol for custom image animation. - Fully documented. Usage ============== ###Display animated image // File: ani@3x.gif UIImage *image = [YYImage imageNamed:@"ani.gif"]; UIImageView *imageView = [[YYAnimatedImageView alloc] initWithImage:image]; [self.view addSubView:imageView]; ###Display frame animation // Files: frame1.png, frame2.png, frame3.png NSArray *paths = @[@"/ani/frame1.png", @"/ani/frame2.png", @"/ani/frame3.png"]; NSArray *times = @[@0.1, @0.2, @0.1]; UIImage *image = [YYFrameImage alloc] initWithImagePaths:paths frameDurations:times repeats:YES]; UIImageView *imageView = [YYAnimatedImageView alloc] initWithImage:image]; [self.view addSubView:imageView]; ###Display sprite sheet animation // 8 * 12 sprites in a single sheet image UIImage *spriteSheet = [UIImage imageNamed:@"sprite-sheet"]; NSMutableArray *contentRects = [NSMutableArray new]; NSMutableArray *durations = [NSMutableArray new]; for (int j = 0; j < 12; j++) { for (int i = 0; i < 8; i++) { CGRect rect; rect.size = CGSizeMake(img.size.width / 8, img.size.height / 12); rect.origin.x = img.size.width / 8 * i; rect.origin.y = img.size.height / 12 * j; [contentRects addObject:[NSValue valueWithCGRect:rect]]; [durations addObject:@(1 / 60.0)]; } } YYSpriteSheetImage *sprite; sprite = [[YYSpriteSheetImage alloc] initWithSpriteSheetImage:img contentRects:contentRects frameDurations:durations loopCount:0]; YYAnimatedImageView *imageView = [YYAnimatedImageView new]; imageView.size = CGSizeMake(img.size.width / 8, img.size.height / 12); imageView.image = sprite; [self.view addSubView:imageView]; ###Animation control YYAnimatedImageView *imageView = ...; // pause: [imageView stopAnimating]; // play: [imageView startAnimating]; // set frame index: imageView.currentAnimatedImageIndex = 12; // get current status image.currentIsPlayingAnimation; ###Image decoder // Decode single frame: NSData *data = [NSData dataWithContentsOfFile:@"/tmp/image.webp"]; YYImageDecoder *decoder = [YYImageDecoder decoderWithData:data scale:2.0]; UIImage image = [decoder frameAtIndex:0 decodeForDisplay:YES].image; // Progressive: NSMutableData *data = [NSMutableData new]; YYImageDecoder *decoder = [[YYImageDecoder alloc] initWithScale:2.0]; while(newDataArrived) { [data appendData:newData]; [decoder updateData:data final:NO]; if (decoder.frameCount > 0) { UIImage image = [decoder frameAtIndex:0 decodeForDisplay:YES].image; // progressive display... } } [decoder updateData:data final:YES]; UIImage image = [decoder frameAtIndex:0 decodeForDisplay:YES].image; // final display... ###Image encoder // Encode still image: YYImageEncoder *jpegEncoder = [[YYImageEncoder alloc] initWithType:YYImageTypeJPEG]; jpegEncoder.quality = 0.9; [jpegEncoder addImage:image duration:0]; NSData jpegData = [jpegEncoder encode]; // Encode animated image: YYImageEncoder *webpEncoder = [[YYImageEncoder alloc] initWithType:YYImageTypeWebP]; webpEncoder.loopCount = 5; [webpEncoder addImage:image0 duration:0.1]; [webpEncoder addImage:image1 duration:0.15]; [webpEncoder addImage:image2 duration:0.2]; NSData webpData = [webpEncoder encode]; ###Image type detection // Get image type from image data YYImageType type = YYImageDetectType(data); if (type == YYImageTypePNG) ... Installation ============== ### CocoaPods 1. Update cocoapods to the latest version. 2. Add `pod 'YYImage'` to your Podfile. 3. Run `pod install` or `pod update`. 4. Import \. 5. Notice: it doesn't include WebP subspec by default, if you want to support WebP format, you may add `pod 'YYImage/WebP'` to your Podfile. ### Carthage 1. Add `github "ibireme/YYImage"` to your Cartfile. 2. Run `carthage update --platform ios` and add the framework to your project. 3. Import \. 4. Notice: carthage framework doesn't include WebP component, if you want to support WebP format, use CocoaPods or install manually. ### Manually 1. Download all the files in the YYImage subdirectory. 2. Add the source files to your Xcode project. 3. Link with required frameworks: * UIKit * CoreFoundation * QuartzCore * AssetsLibrary * ImageIO * Accelerate * MobileCoreServices * libz 4. Import `YYImage.h`. 5. Notice: if you want to support WebP format, you may add `Vendor/WebP.framework`(static library) to your Xcode project. FAQ ============== _Q: Why I can't display WebP image?_ A: Make sure you added the `WebP.framework` in your project. You may call `YYImageWebPAvailable()` to check whether the WebP subspec is installed correctly. _Q: Why I can't play APNG animation?_ A: You should disable the `Compress PNG Files` and `Remove Text Metadata From PNG Files` in your project's build settings. Or you can rename your APNG file's extension name with `apng`. Documentation ============== Full API documentation is available on [CocoaDocs](http://cocoadocs.org/docsets/YYImage/).
You can also install documentation locally using [appledoc](https://github.com/tomaz/appledoc). Requirements ============== This library requires `iOS 6.0+` and `Xcode 7.0+`. License ============== YYImage is provided under the MIT license. See LICENSE file for details.

--- 中文介绍 ============== YYImage: 功能强大的 iOS 图像框架。
(该项目是 [YYKit](https://github.com/ibireme/YYKit) 组件之一) ![niconiconi~](https://raw.github.com/ibireme/YYImage/master/Demo/YYImageDemo/niconiconi@2x.gif ) 特性 ============== - 支持以下类型动画图像的播放/编码/解码:
    WebP, APNG, GIF。 - 支持以下类型静态图像的显示/编码/解码:
    WebP, PNG, GIF, JPEG, JP2, TIFF, BMP, ICO, ICNS。 - 支持以下类型图片的渐进式/逐行扫描/隔行扫描解码:
    PNG, GIF, JPEG, BMP。 - 支持多张图片构成的帧动画播放,支持单张图片的 sprite sheet 动画。 - 高效的动态内存缓存管理,以保证高性能低内存的动画播放。 - 完全兼容 UIImage 和 UIImageView,使用方便。 - 保留可扩展的接口,以支持自定义动画。 - 每个类和方法都有完善的文档注释。 用法 ============== ###显示动画类型的图片 // 文件: ani@3x.gif UIImage *image = [YYImage imageNamed:@"ani.gif"]; UIImageView *imageView = [[YYAnimatedImageView alloc] initWithImage:image]; [self.view addSubView:imageView]; ###播放帧动画 // 文件: frame1.png, frame2.png, frame3.png NSArray *paths = @[@"/ani/frame1.png", @"/ani/frame2.png", @"/ani/frame3.png"]; NSArray *times = @[@0.1, @0.2, @0.1]; UIImage *image = [YYFrameImage alloc] initWithImagePaths:paths frameDurations:times repeats:YES]; UIImageView *imageView = [YYAnimatedImageView alloc] initWithImage:image]; [self.view addSubView:imageView]; ###播放 sprite sheet 动画 // 8 * 12 sprites in a single sheet image UIImage *spriteSheet = [UIImage imageNamed:@"sprite-sheet"]; NSMutableArray *contentRects = [NSMutableArray new]; NSMutableArray *durations = [NSMutableArray new]; for (int j = 0; j < 12; j++) { for (int i = 0; i < 8; i++) { CGRect rect; rect.size = CGSizeMake(img.size.width / 8, img.size.height / 12); rect.origin.x = img.size.width / 8 * i; rect.origin.y = img.size.height / 12 * j; [contentRects addObject:[NSValue valueWithCGRect:rect]]; [durations addObject:@(1 / 60.0)]; } } YYSpriteSheetImage *sprite; sprite = [[YYSpriteSheetImage alloc] initWithSpriteSheetImage:img contentRects:contentRects frameDurations:durations loopCount:0]; YYAnimatedImageView *imageView = [YYAnimatedImageView new]; imageView.size = CGSizeMake(img.size.width / 8, img.size.height / 12); imageView.image = sprite; [self.view addSubView:imageView]; ###动画播放控制 YYAnimatedImageView *imageView = ...; // 暂停: [imageView stopAnimating]; // 播放: [imageView startAnimating]; // 设置播放进度: imageView.currentAnimatedImageIndex = 12; // 获取播放状态: image.currentIsPlayingAnimation; //上面两个属性都支持 KVO。 ###图片解码 // 解码单帧图片: NSData *data = [NSData dataWithContentsOfFile:@"/tmp/image.webp"]; YYImageDecoder *decoder = [YYImageDecoder decoderWithData:data scale:2.0]; UIImage image = [decoder frameAtIndex:0 decodeForDisplay:YES].image; // 渐进式图片解码 (可用于图片下载显示): NSMutableData *data = [NSMutableData new]; YYImageDecoder *decoder = [[YYImageDecoder alloc] initWithScale:2.0]; while(newDataArrived) { [data appendData:newData]; [decoder updateData:data final:NO]; if (decoder.frameCount > 0) { UIImage image = [decoder frameAtIndex:0 decodeForDisplay:YES].image; // progressive display... } } [decoder updateData:data final:YES]; UIImage image = [decoder frameAtIndex:0 decodeForDisplay:YES].image; // final display... ###图片编码 // 编码静态图 (支持各种常见图片格式): YYImageEncoder *jpegEncoder = [[YYImageEncoder alloc] initWithType:YYImageTypeJPEG]; jpegEncoder.quality = 0.9; [jpegEncoder addImage:image duration:0]; NSData jpegData = [jpegEncoder encode]; // 编码动态图 (支持 GIF/APNG/WebP): YYImageEncoder *webpEncoder = [[YYImageEncoder alloc] initWithType:YYImageTypeWebP]; webpEncoder.loopCount = 5; [webpEncoder addImage:image0 duration:0.1]; [webpEncoder addImage:image1 duration:0.15]; [webpEncoder addImage:image2 duration:0.2]; NSData webpData = [webpEncoder encode]; ###图片类型探测 // 获取图片类型 YYImageType type = YYImageDetectType(data); if (type == YYImageTypePNG) ... 安装 ============== ### CocoaPods 1. 将 cocoapods 更新至最新版本. 2. 在 Podfile 中添加 `pod 'YYImage'`。 3. 执行 `pod install` 或 `pod update`。 4. 导入 \。 5. 注意:pod 配置并没有包含 WebP 组件, 如果你需要支持 WebP,可以在 Podfile 中添加 `pod 'YYImage/WebP'`。 ### Carthage 1. 在 Cartfile 中添加 `github "ibireme/YYImage"`。 2. 执行 `carthage update --platform ios` 并将生成的 framework 添加到你的工程。 3. 导入 \。 4. 注意:carthage framework 并没有包含 WebP 组件。如果你需要支持 WebP,可以用 CocoaPods 安装,或者手动安装。 ### 手动安装 1. 下载 YYImage 文件夹内的所有内容。 2. 将 YYImage 内的源文件添加(拖放)到你的工程。 3. 链接以下 frameworks: * UIKit * CoreFoundation * QuartzCore * AssetsLibrary * ImageIO * Accelerate * MobileCoreServices * libz 4. 导入 `YYImage.h`。 5. 注意:如果你需要支持 WebP,可以将 `Vendor/WebP.framework`(静态库) 加入你的工程。 常见问题 ============== _Q: 为什么我不能显示 WebP 图片?_ A: 确保 `WebP.framework` 已经被添加到你的工程内了。你可以调用 `YYImageWebPAvailable()` 来检查一下 WebP 组件是否被正确安装。 _Q: 为什么我不能播放 APNG 动画?_ A: 你应该禁用 Build Settings 中的 `Compress PNG Files` 和 `Remove Text Metadata From PNG Files`. 或者你也可以把 APNG 文件的扩展名改为`apng`. 文档 ============== 你可以在 [CocoaDocs](http://cocoadocs.org/docsets/YYImage/) 查看在线 API 文档,也可以用 [appledoc](https://github.com/tomaz/appledoc) 本地生成文档。 系统要求 ============== 该项目最低支持 `iOS 6.0` 和 `Xcode 7.0`。 许可证 ============== YYImage 使用 MIT 许可证,详情见 LICENSE 文件。 相关链接 ============== [移动端图片格式调研](http://blog.ibireme.com/2015/11/02/mobile_image_benchmark/)
[iOS 处理图片的一些小 Tip](http://blog.ibireme.com/2015/11/02/ios_image_tips/) ================================================ FILE: MFPictureBrowserDemo/Pods/YYImage/Vendor/WebP.framework/Headers/config.h ================================================ /* src/webp/config.h. Generated from config.h.in by configure. */ /* src/webp/config.h.in. Generated from configure.ac by autoheader. */ /* Define if building universal (internal helper macro) */ /* #undef AC_APPLE_UNIVERSAL_BUILD */ /* Set to 1 if __builtin_bswap16 is available */ #define HAVE_BUILTIN_BSWAP16 1 /* Set to 1 if __builtin_bswap32 is available */ #define HAVE_BUILTIN_BSWAP32 1 /* Set to 1 if __builtin_bswap64 is available */ #define HAVE_BUILTIN_BSWAP64 1 /* Define to 1 if you have the header file. */ #define HAVE_DLFCN_H 1 /* Define to 1 if you have the header file. */ /* #undef HAVE_GLUT_GLUT_H */ /* Define to 1 if you have the header file. */ /* #undef HAVE_GL_GLUT_H */ /* Define to 1 if you have the header file. */ #define HAVE_INTTYPES_H 1 /* Define to 1 if you have the header file. */ #define HAVE_MEMORY_H 1 /* Define to 1 if you have the header file. */ /* #undef HAVE_OPENGL_GLUT_H */ /* Have PTHREAD_PRIO_INHERIT. */ #define HAVE_PTHREAD_PRIO_INHERIT 1 /* Define to 1 if you have the header file. */ /* #undef HAVE_SHLWAPI_H */ /* Define to 1 if you have the header file. */ #define HAVE_STDINT_H 1 /* Define to 1 if you have the header file. */ #define HAVE_STDLIB_H 1 /* Define to 1 if you have the header file. */ #define HAVE_STRINGS_H 1 /* Define to 1 if you have the header file. */ #define HAVE_STRING_H 1 /* Define to 1 if you have the header file. */ #define HAVE_SYS_STAT_H 1 /* Define to 1 if you have the header file. */ #define HAVE_SYS_TYPES_H 1 /* Define to 1 if you have the header file. */ #define HAVE_UNISTD_H 1 /* Define to 1 if you have the header file. */ /* #undef HAVE_WINCODEC_H */ /* Define to 1 if you have the header file. */ /* #undef HAVE_WINDOWS_H */ /* Define to the sub-directory in which libtool stores uninstalled libraries. */ #define LT_OBJDIR ".libs/" /* Name of package */ #define PACKAGE "libwebp" /* Define to the address where bug reports for this package should be sent. */ #define PACKAGE_BUGREPORT "https://bugs.chromium.org/p/webp" /* Define to the full name of this package. */ #define PACKAGE_NAME "libwebp" /* Define to the full name and version of this package. */ #define PACKAGE_STRING "libwebp 0.5.0" /* Define to the one symbol short name of this package. */ #define PACKAGE_TARNAME "libwebp" /* Define to the home page for this package. */ #define PACKAGE_URL "http://developers.google.com/speed/webp" /* Define to the version of this package. */ #define PACKAGE_VERSION "0.5.0" /* Define to necessary symbol if this constant uses a non-standard name on your system. */ /* #undef PTHREAD_CREATE_JOINABLE */ /* Define to 1 if you have the ANSI C header files. */ #define STDC_HEADERS 1 /* Version number of package */ #define VERSION "0.5.0" /* Enable experimental code */ /* #undef WEBP_EXPERIMENTAL_FEATURES */ /* Define to 1 to force aligned memory operations */ /* #undef WEBP_FORCE_ALIGNED */ /* Set to 1 if AVX2 is supported */ /* #undef WEBP_HAVE_AVX2 */ /* Set to 1 if GIF library is installed */ /* #undef WEBP_HAVE_GIF */ /* Set to 1 if OpenGL is supported */ /* #undef WEBP_HAVE_GL */ /* Set to 1 if JPEG library is installed */ /* #undef WEBP_HAVE_JPEG */ /* Set to 1 if PNG library is installed */ /* #undef WEBP_HAVE_PNG */ /* Set to 1 if SSE2 is supported */ /* #undef WEBP_HAVE_SSE2 */ /* Set to 1 if SSE4.1 is supported */ /* #undef WEBP_HAVE_SSE41 */ /* Set to 1 if TIFF library is installed */ /* #undef WEBP_HAVE_TIFF */ /* Undefine this to disable thread support. */ #define WEBP_USE_THREAD 1 /* Define WORDS_BIGENDIAN to 1 if your processor stores words with the most significant byte first (like Motorola and SPARC, unlike Intel). */ #if defined AC_APPLE_UNIVERSAL_BUILD # if defined __BIG_ENDIAN__ # define WORDS_BIGENDIAN 1 # endif #else # ifndef WORDS_BIGENDIAN /* # undef WORDS_BIGENDIAN */ # endif #endif ================================================ FILE: MFPictureBrowserDemo/Pods/YYImage/Vendor/WebP.framework/Headers/decode.h ================================================ // Copyright 2010 Google Inc. All Rights Reserved. // // Use of this source code is governed by a BSD-style license // that can be found in the COPYING file in the root of the source // tree. An additional intellectual property rights grant can be found // in the file PATENTS. All contributing project authors may // be found in the AUTHORS file in the root of the source tree. // ----------------------------------------------------------------------------- // // Main decoding functions for WebP images. // // Author: Skal (pascal.massimino@gmail.com) #ifndef WEBP_WEBP_DECODE_H_ #define WEBP_WEBP_DECODE_H_ #include "./types.h" #ifdef __cplusplus extern "C" { #endif #define WEBP_DECODER_ABI_VERSION 0x0208 // MAJOR(8b) + MINOR(8b) // Note: forward declaring enumerations is not allowed in (strict) C and C++, // the types are left here for reference. // typedef enum VP8StatusCode VP8StatusCode; // typedef enum WEBP_CSP_MODE WEBP_CSP_MODE; typedef struct WebPRGBABuffer WebPRGBABuffer; typedef struct WebPYUVABuffer WebPYUVABuffer; typedef struct WebPDecBuffer WebPDecBuffer; typedef struct WebPIDecoder WebPIDecoder; typedef struct WebPBitstreamFeatures WebPBitstreamFeatures; typedef struct WebPDecoderOptions WebPDecoderOptions; typedef struct WebPDecoderConfig WebPDecoderConfig; // Return the decoder's version number, packed in hexadecimal using 8bits for // each of major/minor/revision. E.g: v2.5.7 is 0x020507. WEBP_EXTERN(int) WebPGetDecoderVersion(void); // Retrieve basic header information: width, height. // This function will also validate the header and return 0 in // case of formatting error. // Pointers 'width' and 'height' can be passed NULL if deemed irrelevant. WEBP_EXTERN(int) WebPGetInfo(const uint8_t* data, size_t data_size, int* width, int* height); // Decodes WebP images pointed to by 'data' and returns RGBA samples, along // with the dimensions in *width and *height. The ordering of samples in // memory is R, G, B, A, R, G, B, A... in scan order (endian-independent). // The returned pointer should be deleted calling WebPFree(). // Returns NULL in case of error. WEBP_EXTERN(uint8_t*) WebPDecodeRGBA(const uint8_t* data, size_t data_size, int* width, int* height); // Same as WebPDecodeRGBA, but returning A, R, G, B, A, R, G, B... ordered data. WEBP_EXTERN(uint8_t*) WebPDecodeARGB(const uint8_t* data, size_t data_size, int* width, int* height); // Same as WebPDecodeRGBA, but returning B, G, R, A, B, G, R, A... ordered data. WEBP_EXTERN(uint8_t*) WebPDecodeBGRA(const uint8_t* data, size_t data_size, int* width, int* height); // Same as WebPDecodeRGBA, but returning R, G, B, R, G, B... ordered data. // If the bitstream contains transparency, it is ignored. WEBP_EXTERN(uint8_t*) WebPDecodeRGB(const uint8_t* data, size_t data_size, int* width, int* height); // Same as WebPDecodeRGB, but returning B, G, R, B, G, R... ordered data. WEBP_EXTERN(uint8_t*) WebPDecodeBGR(const uint8_t* data, size_t data_size, int* width, int* height); // Decode WebP images pointed to by 'data' to Y'UV format(*). The pointer // returned is the Y samples buffer. Upon return, *u and *v will point to // the U and V chroma data. These U and V buffers need NOT be passed to // WebPFree(), unlike the returned Y luma one. The dimension of the U and V // planes are both (*width + 1) / 2 and (*height + 1)/ 2. // Upon return, the Y buffer has a stride returned as '*stride', while U and V // have a common stride returned as '*uv_stride'. // Return NULL in case of error. // (*) Also named Y'CbCr. See: http://en.wikipedia.org/wiki/YCbCr WEBP_EXTERN(uint8_t*) WebPDecodeYUV(const uint8_t* data, size_t data_size, int* width, int* height, uint8_t** u, uint8_t** v, int* stride, int* uv_stride); // Releases memory returned by the WebPDecode*() functions above. WEBP_EXTERN(void) WebPFree(void* ptr); // These five functions are variants of the above ones, that decode the image // directly into a pre-allocated buffer 'output_buffer'. The maximum storage // available in this buffer is indicated by 'output_buffer_size'. If this // storage is not sufficient (or an error occurred), NULL is returned. // Otherwise, output_buffer is returned, for convenience. // The parameter 'output_stride' specifies the distance (in bytes) // between scanlines. Hence, output_buffer_size is expected to be at least // output_stride x picture-height. WEBP_EXTERN(uint8_t*) WebPDecodeRGBAInto( const uint8_t* data, size_t data_size, uint8_t* output_buffer, size_t output_buffer_size, int output_stride); WEBP_EXTERN(uint8_t*) WebPDecodeARGBInto( const uint8_t* data, size_t data_size, uint8_t* output_buffer, size_t output_buffer_size, int output_stride); WEBP_EXTERN(uint8_t*) WebPDecodeBGRAInto( const uint8_t* data, size_t data_size, uint8_t* output_buffer, size_t output_buffer_size, int output_stride); // RGB and BGR variants. Here too the transparency information, if present, // will be dropped and ignored. WEBP_EXTERN(uint8_t*) WebPDecodeRGBInto( const uint8_t* data, size_t data_size, uint8_t* output_buffer, size_t output_buffer_size, int output_stride); WEBP_EXTERN(uint8_t*) WebPDecodeBGRInto( const uint8_t* data, size_t data_size, uint8_t* output_buffer, size_t output_buffer_size, int output_stride); // WebPDecodeYUVInto() is a variant of WebPDecodeYUV() that operates directly // into pre-allocated luma/chroma plane buffers. This function requires the // strides to be passed: one for the luma plane and one for each of the // chroma ones. The size of each plane buffer is passed as 'luma_size', // 'u_size' and 'v_size' respectively. // Pointer to the luma plane ('*luma') is returned or NULL if an error occurred // during decoding (or because some buffers were found to be too small). WEBP_EXTERN(uint8_t*) WebPDecodeYUVInto( const uint8_t* data, size_t data_size, uint8_t* luma, size_t luma_size, int luma_stride, uint8_t* u, size_t u_size, int u_stride, uint8_t* v, size_t v_size, int v_stride); //------------------------------------------------------------------------------ // Output colorspaces and buffer // Colorspaces // Note: the naming describes the byte-ordering of packed samples in memory. // For instance, MODE_BGRA relates to samples ordered as B,G,R,A,B,G,R,A,... // Non-capital names (e.g.:MODE_Argb) relates to pre-multiplied RGB channels. // RGBA-4444 and RGB-565 colorspaces are represented by following byte-order: // RGBA-4444: [r3 r2 r1 r0 g3 g2 g1 g0], [b3 b2 b1 b0 a3 a2 a1 a0], ... // RGB-565: [r4 r3 r2 r1 r0 g5 g4 g3], [g2 g1 g0 b4 b3 b2 b1 b0], ... // In the case WEBP_SWAP_16BITS_CSP is defined, the bytes are swapped for // these two modes: // RGBA-4444: [b3 b2 b1 b0 a3 a2 a1 a0], [r3 r2 r1 r0 g3 g2 g1 g0], ... // RGB-565: [g2 g1 g0 b4 b3 b2 b1 b0], [r4 r3 r2 r1 r0 g5 g4 g3], ... typedef enum WEBP_CSP_MODE { MODE_RGB = 0, MODE_RGBA = 1, MODE_BGR = 2, MODE_BGRA = 3, MODE_ARGB = 4, MODE_RGBA_4444 = 5, MODE_RGB_565 = 6, // RGB-premultiplied transparent modes (alpha value is preserved) MODE_rgbA = 7, MODE_bgrA = 8, MODE_Argb = 9, MODE_rgbA_4444 = 10, // YUV modes must come after RGB ones. MODE_YUV = 11, MODE_YUVA = 12, // yuv 4:2:0 MODE_LAST = 13 } WEBP_CSP_MODE; // Some useful macros: static WEBP_INLINE int WebPIsPremultipliedMode(WEBP_CSP_MODE mode) { return (mode == MODE_rgbA || mode == MODE_bgrA || mode == MODE_Argb || mode == MODE_rgbA_4444); } static WEBP_INLINE int WebPIsAlphaMode(WEBP_CSP_MODE mode) { return (mode == MODE_RGBA || mode == MODE_BGRA || mode == MODE_ARGB || mode == MODE_RGBA_4444 || mode == MODE_YUVA || WebPIsPremultipliedMode(mode)); } static WEBP_INLINE int WebPIsRGBMode(WEBP_CSP_MODE mode) { return (mode < MODE_YUV); } //------------------------------------------------------------------------------ // WebPDecBuffer: Generic structure for describing the output sample buffer. struct WebPRGBABuffer { // view as RGBA uint8_t* rgba; // pointer to RGBA samples int stride; // stride in bytes from one scanline to the next. size_t size; // total size of the *rgba buffer. }; struct WebPYUVABuffer { // view as YUVA uint8_t* y, *u, *v, *a; // pointer to luma, chroma U/V, alpha samples int y_stride; // luma stride int u_stride, v_stride; // chroma strides int a_stride; // alpha stride size_t y_size; // luma plane size size_t u_size, v_size; // chroma planes size size_t a_size; // alpha-plane size }; // Output buffer struct WebPDecBuffer { WEBP_CSP_MODE colorspace; // Colorspace. int width, height; // Dimensions. int is_external_memory; // If true, 'internal_memory' pointer is not used. union { WebPRGBABuffer RGBA; WebPYUVABuffer YUVA; } u; // Nameless union of buffer parameters. uint32_t pad[4]; // padding for later use uint8_t* private_memory; // Internally allocated memory (only when // is_external_memory is false). Should not be used // externally, but accessed via the buffer union. }; // Internal, version-checked, entry point WEBP_EXTERN(int) WebPInitDecBufferInternal(WebPDecBuffer*, int); // Initialize the structure as empty. Must be called before any other use. // Returns false in case of version mismatch static WEBP_INLINE int WebPInitDecBuffer(WebPDecBuffer* buffer) { return WebPInitDecBufferInternal(buffer, WEBP_DECODER_ABI_VERSION); } // Free any memory associated with the buffer. Must always be called last. // Note: doesn't free the 'buffer' structure itself. WEBP_EXTERN(void) WebPFreeDecBuffer(WebPDecBuffer* buffer); //------------------------------------------------------------------------------ // Enumeration of the status codes typedef enum VP8StatusCode { VP8_STATUS_OK = 0, VP8_STATUS_OUT_OF_MEMORY, VP8_STATUS_INVALID_PARAM, VP8_STATUS_BITSTREAM_ERROR, VP8_STATUS_UNSUPPORTED_FEATURE, VP8_STATUS_SUSPENDED, VP8_STATUS_USER_ABORT, VP8_STATUS_NOT_ENOUGH_DATA } VP8StatusCode; //------------------------------------------------------------------------------ // Incremental decoding // // This API allows streamlined decoding of partial data. // Picture can be incrementally decoded as data become available thanks to the // WebPIDecoder object. This object can be left in a SUSPENDED state if the // picture is only partially decoded, pending additional input. // Code example: // // WebPInitDecBuffer(&buffer); // buffer.colorspace = mode; // ... // WebPIDecoder* idec = WebPINewDecoder(&buffer); // while (has_more_data) { // // ... (get additional data) // status = WebPIAppend(idec, new_data, new_data_size); // if (status != VP8_STATUS_SUSPENDED || // break; // } // // // The above call decodes the current available buffer. // // Part of the image can now be refreshed by calling to // // WebPIDecGetRGB()/WebPIDecGetYUVA() etc. // } // WebPIDelete(idec); // Creates a new incremental decoder with the supplied buffer parameter. // This output_buffer can be passed NULL, in which case a default output buffer // is used (with MODE_RGB). Otherwise, an internal reference to 'output_buffer' // is kept, which means that the lifespan of 'output_buffer' must be larger than // that of the returned WebPIDecoder object. // The supplied 'output_buffer' content MUST NOT be changed between calls to // WebPIAppend() or WebPIUpdate() unless 'output_buffer.is_external_memory' is // set to 1. In such a case, it is allowed to modify the pointers, size and // stride of output_buffer.u.RGBA or output_buffer.u.YUVA, provided they remain // within valid bounds. // All other fields of WebPDecBuffer MUST remain constant between calls. // Returns NULL if the allocation failed. WEBP_EXTERN(WebPIDecoder*) WebPINewDecoder(WebPDecBuffer* output_buffer); // This function allocates and initializes an incremental-decoder object, which // will output the RGB/A samples specified by 'csp' into a preallocated // buffer 'output_buffer'. The size of this buffer is at least // 'output_buffer_size' and the stride (distance in bytes between two scanlines) // is specified by 'output_stride'. // Additionally, output_buffer can be passed NULL in which case the output // buffer will be allocated automatically when the decoding starts. The // colorspace 'csp' is taken into account for allocating this buffer. All other // parameters are ignored. // Returns NULL if the allocation failed, or if some parameters are invalid. WEBP_EXTERN(WebPIDecoder*) WebPINewRGB( WEBP_CSP_MODE csp, uint8_t* output_buffer, size_t output_buffer_size, int output_stride); // This function allocates and initializes an incremental-decoder object, which // will output the raw luma/chroma samples into a preallocated planes if // supplied. The luma plane is specified by its pointer 'luma', its size // 'luma_size' and its stride 'luma_stride'. Similarly, the chroma-u plane // is specified by the 'u', 'u_size' and 'u_stride' parameters, and the chroma-v // plane by 'v' and 'v_size'. And same for the alpha-plane. The 'a' pointer // can be pass NULL in case one is not interested in the transparency plane. // Conversely, 'luma' can be passed NULL if no preallocated planes are supplied. // In this case, the output buffer will be automatically allocated (using // MODE_YUVA) when decoding starts. All parameters are then ignored. // Returns NULL if the allocation failed or if a parameter is invalid. WEBP_EXTERN(WebPIDecoder*) WebPINewYUVA( uint8_t* luma, size_t luma_size, int luma_stride, uint8_t* u, size_t u_size, int u_stride, uint8_t* v, size_t v_size, int v_stride, uint8_t* a, size_t a_size, int a_stride); // Deprecated version of the above, without the alpha plane. // Kept for backward compatibility. WEBP_EXTERN(WebPIDecoder*) WebPINewYUV( uint8_t* luma, size_t luma_size, int luma_stride, uint8_t* u, size_t u_size, int u_stride, uint8_t* v, size_t v_size, int v_stride); // Deletes the WebPIDecoder object and associated memory. Must always be called // if WebPINewDecoder, WebPINewRGB or WebPINewYUV succeeded. WEBP_EXTERN(void) WebPIDelete(WebPIDecoder* idec); // Copies and decodes the next available data. Returns VP8_STATUS_OK when // the image is successfully decoded. Returns VP8_STATUS_SUSPENDED when more // data is expected. Returns error in other cases. WEBP_EXTERN(VP8StatusCode) WebPIAppend( WebPIDecoder* idec, const uint8_t* data, size_t data_size); // A variant of the above function to be used when data buffer contains // partial data from the beginning. In this case data buffer is not copied // to the internal memory. // Note that the value of the 'data' pointer can change between calls to // WebPIUpdate, for instance when the data buffer is resized to fit larger data. WEBP_EXTERN(VP8StatusCode) WebPIUpdate( WebPIDecoder* idec, const uint8_t* data, size_t data_size); // Returns the RGB/A image decoded so far. Returns NULL if output params // are not initialized yet. The RGB/A output type corresponds to the colorspace // specified during call to WebPINewDecoder() or WebPINewRGB(). // *last_y is the index of last decoded row in raster scan order. Some pointers // (*last_y, *width etc.) can be NULL if corresponding information is not // needed. WEBP_EXTERN(uint8_t*) WebPIDecGetRGB( const WebPIDecoder* idec, int* last_y, int* width, int* height, int* stride); // Same as above function to get a YUVA image. Returns pointer to the luma // plane or NULL in case of error. If there is no alpha information // the alpha pointer '*a' will be returned NULL. WEBP_EXTERN(uint8_t*) WebPIDecGetYUVA( const WebPIDecoder* idec, int* last_y, uint8_t** u, uint8_t** v, uint8_t** a, int* width, int* height, int* stride, int* uv_stride, int* a_stride); // Deprecated alpha-less version of WebPIDecGetYUVA(): it will ignore the // alpha information (if present). Kept for backward compatibility. static WEBP_INLINE uint8_t* WebPIDecGetYUV( const WebPIDecoder* idec, int* last_y, uint8_t** u, uint8_t** v, int* width, int* height, int* stride, int* uv_stride) { return WebPIDecGetYUVA(idec, last_y, u, v, NULL, width, height, stride, uv_stride, NULL); } // Generic call to retrieve information about the displayable area. // If non NULL, the left/right/width/height pointers are filled with the visible // rectangular area so far. // Returns NULL in case the incremental decoder object is in an invalid state. // Otherwise returns the pointer to the internal representation. This structure // is read-only, tied to WebPIDecoder's lifespan and should not be modified. WEBP_EXTERN(const WebPDecBuffer*) WebPIDecodedArea( const WebPIDecoder* idec, int* left, int* top, int* width, int* height); //------------------------------------------------------------------------------ // Advanced decoding parametrization // // Code sample for using the advanced decoding API /* // A) Init a configuration object WebPDecoderConfig config; CHECK(WebPInitDecoderConfig(&config)); // B) optional: retrieve the bitstream's features. CHECK(WebPGetFeatures(data, data_size, &config.input) == VP8_STATUS_OK); // C) Adjust 'config', if needed config.no_fancy_upsampling = 1; config.output.colorspace = MODE_BGRA; // etc. // Note that you can also make config.output point to an externally // supplied memory buffer, provided it's big enough to store the decoded // picture. Otherwise, config.output will just be used to allocate memory // and store the decoded picture. // D) Decode! CHECK(WebPDecode(data, data_size, &config) == VP8_STATUS_OK); // E) Decoded image is now in config.output (and config.output.u.RGBA) // F) Reclaim memory allocated in config's object. It's safe to call // this function even if the memory is external and wasn't allocated // by WebPDecode(). WebPFreeDecBuffer(&config.output); */ // Features gathered from the bitstream struct WebPBitstreamFeatures { int width; // Width in pixels, as read from the bitstream. int height; // Height in pixels, as read from the bitstream. int has_alpha; // True if the bitstream contains an alpha channel. int has_animation; // True if the bitstream is an animation. int format; // 0 = undefined (/mixed), 1 = lossy, 2 = lossless uint32_t pad[5]; // padding for later use }; // Internal, version-checked, entry point WEBP_EXTERN(VP8StatusCode) WebPGetFeaturesInternal( const uint8_t*, size_t, WebPBitstreamFeatures*, int); // Retrieve features from the bitstream. The *features structure is filled // with information gathered from the bitstream. // Returns VP8_STATUS_OK when the features are successfully retrieved. Returns // VP8_STATUS_NOT_ENOUGH_DATA when more data is needed to retrieve the // features from headers. Returns error in other cases. static WEBP_INLINE VP8StatusCode WebPGetFeatures( const uint8_t* data, size_t data_size, WebPBitstreamFeatures* features) { return WebPGetFeaturesInternal(data, data_size, features, WEBP_DECODER_ABI_VERSION); } // Decoding options struct WebPDecoderOptions { int bypass_filtering; // if true, skip the in-loop filtering int no_fancy_upsampling; // if true, use faster pointwise upsampler int use_cropping; // if true, cropping is applied _first_ int crop_left, crop_top; // top-left position for cropping. // Will be snapped to even values. int crop_width, crop_height; // dimension of the cropping area int use_scaling; // if true, scaling is applied _afterward_ int scaled_width, scaled_height; // final resolution int use_threads; // if true, use multi-threaded decoding int dithering_strength; // dithering strength (0=Off, 100=full) int flip; // flip output vertically int alpha_dithering_strength; // alpha dithering strength in [0..100] uint32_t pad[5]; // padding for later use }; // Main object storing the configuration for advanced decoding. struct WebPDecoderConfig { WebPBitstreamFeatures input; // Immutable bitstream features (optional) WebPDecBuffer output; // Output buffer (can point to external mem) WebPDecoderOptions options; // Decoding options }; // Internal, version-checked, entry point WEBP_EXTERN(int) WebPInitDecoderConfigInternal(WebPDecoderConfig*, int); // Initialize the configuration as empty. This function must always be // called first, unless WebPGetFeatures() is to be called. // Returns false in case of mismatched version. static WEBP_INLINE int WebPInitDecoderConfig(WebPDecoderConfig* config) { return WebPInitDecoderConfigInternal(config, WEBP_DECODER_ABI_VERSION); } // Instantiate a new incremental decoder object with the requested // configuration. The bitstream can be passed using 'data' and 'data_size' // parameter, in which case the features will be parsed and stored into // config->input. Otherwise, 'data' can be NULL and no parsing will occur. // Note that 'config' can be NULL too, in which case a default configuration // is used. // The return WebPIDecoder object must always be deleted calling WebPIDelete(). // Returns NULL in case of error (and config->status will then reflect // the error condition). WEBP_EXTERN(WebPIDecoder*) WebPIDecode(const uint8_t* data, size_t data_size, WebPDecoderConfig* config); // Non-incremental version. This version decodes the full data at once, taking // 'config' into account. Returns decoding status (which should be VP8_STATUS_OK // if the decoding was successful). WEBP_EXTERN(VP8StatusCode) WebPDecode(const uint8_t* data, size_t data_size, WebPDecoderConfig* config); #ifdef __cplusplus } // extern "C" #endif #endif /* WEBP_WEBP_DECODE_H_ */ ================================================ FILE: MFPictureBrowserDemo/Pods/YYImage/Vendor/WebP.framework/Headers/demux.h ================================================ // Copyright 2012 Google Inc. All Rights Reserved. // // Use of this source code is governed by a BSD-style license // that can be found in the COPYING file in the root of the source // tree. An additional intellectual property rights grant can be found // in the file PATENTS. All contributing project authors may // be found in the AUTHORS file in the root of the source tree. // ----------------------------------------------------------------------------- // // Demux API. // Enables extraction of image and extended format data from WebP files. // Code Example: Demuxing WebP data to extract all the frames, ICC profile // and EXIF/XMP metadata. /* WebPDemuxer* demux = WebPDemux(&webp_data); uint32_t width = WebPDemuxGetI(demux, WEBP_FF_CANVAS_WIDTH); uint32_t height = WebPDemuxGetI(demux, WEBP_FF_CANVAS_HEIGHT); // ... (Get information about the features present in the WebP file). uint32_t flags = WebPDemuxGetI(demux, WEBP_FF_FORMAT_FLAGS); // ... (Iterate over all frames). WebPIterator iter; if (WebPDemuxGetFrame(demux, 1, &iter)) { do { // ... (Consume 'iter'; e.g. Decode 'iter.fragment' with WebPDecode(), // ... and get other frame properties like width, height, offsets etc. // ... see 'struct WebPIterator' below for more info). } while (WebPDemuxNextFrame(&iter)); WebPDemuxReleaseIterator(&iter); } // ... (Extract metadata). WebPChunkIterator chunk_iter; if (flags & ICCP_FLAG) WebPDemuxGetChunk(demux, "ICCP", 1, &chunk_iter); // ... (Consume the ICC profile in 'chunk_iter.chunk'). WebPDemuxReleaseChunkIterator(&chunk_iter); if (flags & EXIF_FLAG) WebPDemuxGetChunk(demux, "EXIF", 1, &chunk_iter); // ... (Consume the EXIF metadata in 'chunk_iter.chunk'). WebPDemuxReleaseChunkIterator(&chunk_iter); if (flags & XMP_FLAG) WebPDemuxGetChunk(demux, "XMP ", 1, &chunk_iter); // ... (Consume the XMP metadata in 'chunk_iter.chunk'). WebPDemuxReleaseChunkIterator(&chunk_iter); WebPDemuxDelete(demux); */ #ifndef WEBP_WEBP_DEMUX_H_ #define WEBP_WEBP_DEMUX_H_ #include "./decode.h" // for WEBP_CSP_MODE #include "./mux_types.h" #ifdef __cplusplus extern "C" { #endif #define WEBP_DEMUX_ABI_VERSION 0x0107 // MAJOR(8b) + MINOR(8b) // Note: forward declaring enumerations is not allowed in (strict) C and C++, // the types are left here for reference. // typedef enum WebPDemuxState WebPDemuxState; // typedef enum WebPFormatFeature WebPFormatFeature; typedef struct WebPDemuxer WebPDemuxer; typedef struct WebPIterator WebPIterator; typedef struct WebPChunkIterator WebPChunkIterator; typedef struct WebPAnimInfo WebPAnimInfo; typedef struct WebPAnimDecoderOptions WebPAnimDecoderOptions; //------------------------------------------------------------------------------ // Returns the version number of the demux library, packed in hexadecimal using // 8bits for each of major/minor/revision. E.g: v2.5.7 is 0x020507. WEBP_EXTERN(int) WebPGetDemuxVersion(void); //------------------------------------------------------------------------------ // Life of a Demux object typedef enum WebPDemuxState { WEBP_DEMUX_PARSE_ERROR = -1, // An error occurred while parsing. WEBP_DEMUX_PARSING_HEADER = 0, // Not enough data to parse full header. WEBP_DEMUX_PARSED_HEADER = 1, // Header parsing complete, // data may be available. WEBP_DEMUX_DONE = 2 // Entire file has been parsed. } WebPDemuxState; // Internal, version-checked, entry point WEBP_EXTERN(WebPDemuxer*) WebPDemuxInternal( const WebPData*, int, WebPDemuxState*, int); // Parses the full WebP file given by 'data'. For single images the WebP file // header alone or the file header and the chunk header may be absent. // Returns a WebPDemuxer object on successful parse, NULL otherwise. static WEBP_INLINE WebPDemuxer* WebPDemux(const WebPData* data) { return WebPDemuxInternal(data, 0, NULL, WEBP_DEMUX_ABI_VERSION); } // Parses the possibly incomplete WebP file given by 'data'. // If 'state' is non-NULL it will be set to indicate the status of the demuxer. // Returns NULL in case of error or if there isn't enough data to start parsing; // and a WebPDemuxer object on successful parse. // Note that WebPDemuxer keeps internal pointers to 'data' memory segment. // If this data is volatile, the demuxer object should be deleted (by calling // WebPDemuxDelete()) and WebPDemuxPartial() called again on the new data. // This is usually an inexpensive operation. static WEBP_INLINE WebPDemuxer* WebPDemuxPartial( const WebPData* data, WebPDemuxState* state) { return WebPDemuxInternal(data, 1, state, WEBP_DEMUX_ABI_VERSION); } // Frees memory associated with 'dmux'. WEBP_EXTERN(void) WebPDemuxDelete(WebPDemuxer* dmux); //------------------------------------------------------------------------------ // Data/information extraction. typedef enum WebPFormatFeature { WEBP_FF_FORMAT_FLAGS, // Extended format flags present in the 'VP8X' chunk. WEBP_FF_CANVAS_WIDTH, WEBP_FF_CANVAS_HEIGHT, WEBP_FF_LOOP_COUNT, WEBP_FF_BACKGROUND_COLOR, WEBP_FF_FRAME_COUNT // Number of frames present in the demux object. // In case of a partial demux, this is the number of // frames seen so far, with the last frame possibly // being partial. } WebPFormatFeature; // Get the 'feature' value from the 'dmux'. // NOTE: values are only valid if WebPDemux() was used or WebPDemuxPartial() // returned a state > WEBP_DEMUX_PARSING_HEADER. WEBP_EXTERN(uint32_t) WebPDemuxGetI( const WebPDemuxer* dmux, WebPFormatFeature feature); //------------------------------------------------------------------------------ // Frame iteration. struct WebPIterator { int frame_num; int num_frames; // equivalent to WEBP_FF_FRAME_COUNT. int x_offset, y_offset; // offset relative to the canvas. int width, height; // dimensions of this frame. int duration; // display duration in milliseconds. WebPMuxAnimDispose dispose_method; // dispose method for the frame. int complete; // true if 'fragment' contains a full frame. partial images // may still be decoded with the WebP incremental decoder. WebPData fragment; // The frame given by 'frame_num'. Note for historical // reasons this is called a fragment. int has_alpha; // True if the frame contains transparency. WebPMuxAnimBlend blend_method; // Blend operation for the frame. uint32_t pad[2]; // padding for later use. void* private_; // for internal use only. }; // Retrieves frame 'frame_number' from 'dmux'. // 'iter->fragment' points to the frame on return from this function. // Setting 'frame_number' equal to 0 will return the last frame of the image. // Returns false if 'dmux' is NULL or frame 'frame_number' is not present. // Call WebPDemuxReleaseIterator() when use of the iterator is complete. // NOTE: 'dmux' must persist for the lifetime of 'iter'. WEBP_EXTERN(int) WebPDemuxGetFrame( const WebPDemuxer* dmux, int frame_number, WebPIterator* iter); // Sets 'iter->fragment' to point to the next ('iter->frame_num' + 1) or // previous ('iter->frame_num' - 1) frame. These functions do not loop. // Returns true on success, false otherwise. WEBP_EXTERN(int) WebPDemuxNextFrame(WebPIterator* iter); WEBP_EXTERN(int) WebPDemuxPrevFrame(WebPIterator* iter); // Releases any memory associated with 'iter'. // Must be called before any subsequent calls to WebPDemuxGetChunk() on the same // iter. Also, must be called before destroying the associated WebPDemuxer with // WebPDemuxDelete(). WEBP_EXTERN(void) WebPDemuxReleaseIterator(WebPIterator* iter); //------------------------------------------------------------------------------ // Chunk iteration. struct WebPChunkIterator { // The current and total number of chunks with the fourcc given to // WebPDemuxGetChunk(). int chunk_num; int num_chunks; WebPData chunk; // The payload of the chunk. uint32_t pad[6]; // padding for later use void* private_; }; // Retrieves the 'chunk_number' instance of the chunk with id 'fourcc' from // 'dmux'. // 'fourcc' is a character array containing the fourcc of the chunk to return, // e.g., "ICCP", "XMP ", "EXIF", etc. // Setting 'chunk_number' equal to 0 will return the last chunk in a set. // Returns true if the chunk is found, false otherwise. Image related chunk // payloads are accessed through WebPDemuxGetFrame() and related functions. // Call WebPDemuxReleaseChunkIterator() when use of the iterator is complete. // NOTE: 'dmux' must persist for the lifetime of the iterator. WEBP_EXTERN(int) WebPDemuxGetChunk(const WebPDemuxer* dmux, const char fourcc[4], int chunk_number, WebPChunkIterator* iter); // Sets 'iter->chunk' to point to the next ('iter->chunk_num' + 1) or previous // ('iter->chunk_num' - 1) chunk. These functions do not loop. // Returns true on success, false otherwise. WEBP_EXTERN(int) WebPDemuxNextChunk(WebPChunkIterator* iter); WEBP_EXTERN(int) WebPDemuxPrevChunk(WebPChunkIterator* iter); // Releases any memory associated with 'iter'. // Must be called before destroying the associated WebPDemuxer with // WebPDemuxDelete(). WEBP_EXTERN(void) WebPDemuxReleaseChunkIterator(WebPChunkIterator* iter); //------------------------------------------------------------------------------ // WebPAnimDecoder API // // This API allows decoding (possibly) animated WebP images. // // Code Example: /* WebPAnimDecoderOptions dec_options; WebPAnimDecoderOptionsInit(&dec_options); // Tune 'dec_options' as needed. WebPAnimDecoder* dec = WebPAnimDecoderNew(webp_data, &dec_options); WebPAnimInfo anim_info; WebPAnimDecoderGetInfo(dec, &anim_info); for (uint32_t i = 0; i < anim_info.loop_count; ++i) { while (WebPAnimDecoderHasMoreFrames(dec)) { uint8_t* buf; int timestamp; WebPAnimDecoderGetNext(dec, &buf, ×tamp); // ... (Render 'buf' based on 'timestamp'). // ... (Do NOT free 'buf', as it is owned by 'dec'). } WebPAnimDecoderReset(dec); } const WebPDemuxer* demuxer = WebPAnimDecoderGetDemuxer(dec); // ... (Do something using 'demuxer'; e.g. get EXIF/XMP/ICC data). WebPAnimDecoderDelete(dec); */ typedef struct WebPAnimDecoder WebPAnimDecoder; // Main opaque object. // Global options. struct WebPAnimDecoderOptions { // Output colorspace. Only the following modes are supported: // MODE_RGBA, MODE_BGRA, MODE_rgbA and MODE_bgrA. WEBP_CSP_MODE color_mode; int use_threads; // If true, use multi-threaded decoding. uint32_t padding[7]; // Padding for later use. }; // Internal, version-checked, entry point. WEBP_EXTERN(int) WebPAnimDecoderOptionsInitInternal( WebPAnimDecoderOptions*, int); // Should always be called, to initialize a fresh WebPAnimDecoderOptions // structure before modification. Returns false in case of version mismatch. // WebPAnimDecoderOptionsInit() must have succeeded before using the // 'dec_options' object. static WEBP_INLINE int WebPAnimDecoderOptionsInit( WebPAnimDecoderOptions* dec_options) { return WebPAnimDecoderOptionsInitInternal(dec_options, WEBP_DEMUX_ABI_VERSION); } // Internal, version-checked, entry point. WEBP_EXTERN(WebPAnimDecoder*) WebPAnimDecoderNewInternal( const WebPData*, const WebPAnimDecoderOptions*, int); // Creates and initializes a WebPAnimDecoder object. // Parameters: // webp_data - (in) WebP bitstream. This should remain unchanged during the // lifetime of the output WebPAnimDecoder object. // dec_options - (in) decoding options. Can be passed NULL to choose // reasonable defaults (in particular, color mode MODE_RGBA // will be picked). // Returns: // A pointer to the newly created WebPAnimDecoder object, or NULL in case of // parsing error, invalid option or memory error. static WEBP_INLINE WebPAnimDecoder* WebPAnimDecoderNew( const WebPData* webp_data, const WebPAnimDecoderOptions* dec_options) { return WebPAnimDecoderNewInternal(webp_data, dec_options, WEBP_DEMUX_ABI_VERSION); } // Global information about the animation.. struct WebPAnimInfo { uint32_t canvas_width; uint32_t canvas_height; uint32_t loop_count; uint32_t bgcolor; uint32_t frame_count; uint32_t pad[4]; // padding for later use }; // Get global information about the animation. // Parameters: // dec - (in) decoder instance to get information from. // info - (out) global information fetched from the animation. // Returns: // True on success. WEBP_EXTERN(int) WebPAnimDecoderGetInfo(const WebPAnimDecoder* dec, WebPAnimInfo* info); // Fetch the next frame from 'dec' based on options supplied to // WebPAnimDecoderNew(). This will be a fully reconstructed canvas of size // 'canvas_width * 4 * canvas_height', and not just the frame sub-rectangle. The // returned buffer 'buf' is valid only until the next call to // WebPAnimDecoderGetNext(), WebPAnimDecoderReset() or WebPAnimDecoderDelete(). // Parameters: // dec - (in/out) decoder instance from which the next frame is to be fetched. // buf - (out) decoded frame. // timestamp - (out) timestamp of the frame in milliseconds. // Returns: // False if any of the arguments are NULL, or if there is a parsing or // decoding error, or if there are no more frames. Otherwise, returns true. WEBP_EXTERN(int) WebPAnimDecoderGetNext(WebPAnimDecoder* dec, uint8_t** buf, int* timestamp); // Check if there are more frames left to decode. // Parameters: // dec - (in) decoder instance to be checked. // Returns: // True if 'dec' is not NULL and some frames are yet to be decoded. // Otherwise, returns false. WEBP_EXTERN(int) WebPAnimDecoderHasMoreFrames(const WebPAnimDecoder* dec); // Resets the WebPAnimDecoder object, so that next call to // WebPAnimDecoderGetNext() will restart decoding from 1st frame. This would be // helpful when all frames need to be decoded multiple times (e.g. // info.loop_count times) without destroying and recreating the 'dec' object. // Parameters: // dec - (in/out) decoder instance to be reset WEBP_EXTERN(void) WebPAnimDecoderReset(WebPAnimDecoder* dec); // Grab the internal demuxer object. // Getting the demuxer object can be useful if one wants to use operations only // available through demuxer; e.g. to get XMP/EXIF/ICC metadata. The returned // demuxer object is owned by 'dec' and is valid only until the next call to // WebPAnimDecoderDelete(). // // Parameters: // dec - (in) decoder instance from which the demuxer object is to be fetched. WEBP_EXTERN(const WebPDemuxer*) WebPAnimDecoderGetDemuxer( const WebPAnimDecoder* dec); // Deletes the WebPAnimDecoder object. // Parameters: // dec - (in/out) decoder instance to be deleted WEBP_EXTERN(void) WebPAnimDecoderDelete(WebPAnimDecoder* dec); #ifdef __cplusplus } // extern "C" #endif #endif /* WEBP_WEBP_DEMUX_H_ */ ================================================ FILE: MFPictureBrowserDemo/Pods/YYImage/Vendor/WebP.framework/Headers/encode.h ================================================ // Copyright 2011 Google Inc. All Rights Reserved. // // Use of this source code is governed by a BSD-style license // that can be found in the COPYING file in the root of the source // tree. An additional intellectual property rights grant can be found // in the file PATENTS. All contributing project authors may // be found in the AUTHORS file in the root of the source tree. // ----------------------------------------------------------------------------- // // WebP encoder: main interface // // Author: Skal (pascal.massimino@gmail.com) #ifndef WEBP_WEBP_ENCODE_H_ #define WEBP_WEBP_ENCODE_H_ #include "./types.h" #ifdef __cplusplus extern "C" { #endif #define WEBP_ENCODER_ABI_VERSION 0x0209 // MAJOR(8b) + MINOR(8b) // Note: forward declaring enumerations is not allowed in (strict) C and C++, // the types are left here for reference. // typedef enum WebPImageHint WebPImageHint; // typedef enum WebPEncCSP WebPEncCSP; // typedef enum WebPPreset WebPPreset; // typedef enum WebPEncodingError WebPEncodingError; typedef struct WebPConfig WebPConfig; typedef struct WebPPicture WebPPicture; // main structure for I/O typedef struct WebPAuxStats WebPAuxStats; typedef struct WebPMemoryWriter WebPMemoryWriter; // Return the encoder's version number, packed in hexadecimal using 8bits for // each of major/minor/revision. E.g: v2.5.7 is 0x020507. WEBP_EXTERN(int) WebPGetEncoderVersion(void); //------------------------------------------------------------------------------ // One-stop-shop call! No questions asked: // Returns the size of the compressed data (pointed to by *output), or 0 if // an error occurred. The compressed data must be released by the caller // using the call 'WebPFree(*output)'. // These functions compress using the lossy format, and the quality_factor // can go from 0 (smaller output, lower quality) to 100 (best quality, // larger output). WEBP_EXTERN(size_t) WebPEncodeRGB(const uint8_t* rgb, int width, int height, int stride, float quality_factor, uint8_t** output); WEBP_EXTERN(size_t) WebPEncodeBGR(const uint8_t* bgr, int width, int height, int stride, float quality_factor, uint8_t** output); WEBP_EXTERN(size_t) WebPEncodeRGBA(const uint8_t* rgba, int width, int height, int stride, float quality_factor, uint8_t** output); WEBP_EXTERN(size_t) WebPEncodeBGRA(const uint8_t* bgra, int width, int height, int stride, float quality_factor, uint8_t** output); // These functions are the equivalent of the above, but compressing in a // lossless manner. Files are usually larger than lossy format, but will // not suffer any compression loss. WEBP_EXTERN(size_t) WebPEncodeLosslessRGB(const uint8_t* rgb, int width, int height, int stride, uint8_t** output); WEBP_EXTERN(size_t) WebPEncodeLosslessBGR(const uint8_t* bgr, int width, int height, int stride, uint8_t** output); WEBP_EXTERN(size_t) WebPEncodeLosslessRGBA(const uint8_t* rgba, int width, int height, int stride, uint8_t** output); WEBP_EXTERN(size_t) WebPEncodeLosslessBGRA(const uint8_t* bgra, int width, int height, int stride, uint8_t** output); // Releases memory returned by the WebPEncode*() functions above. WEBP_EXTERN(void) WebPFree(void* ptr); //------------------------------------------------------------------------------ // Coding parameters // Image characteristics hint for the underlying encoder. typedef enum WebPImageHint { WEBP_HINT_DEFAULT = 0, // default preset. WEBP_HINT_PICTURE, // digital picture, like portrait, inner shot WEBP_HINT_PHOTO, // outdoor photograph, with natural lighting WEBP_HINT_GRAPH, // Discrete tone image (graph, map-tile etc). WEBP_HINT_LAST } WebPImageHint; // Compression parameters. struct WebPConfig { int lossless; // Lossless encoding (0=lossy(default), 1=lossless). float quality; // between 0 (smallest file) and 100 (biggest) int method; // quality/speed trade-off (0=fast, 6=slower-better) WebPImageHint image_hint; // Hint for image type (lossless only for now). // Parameters related to lossy compression only: int target_size; // if non-zero, set the desired target size in bytes. // Takes precedence over the 'compression' parameter. float target_PSNR; // if non-zero, specifies the minimal distortion to // try to achieve. Takes precedence over target_size. int segments; // maximum number of segments to use, in [1..4] int sns_strength; // Spatial Noise Shaping. 0=off, 100=maximum. int filter_strength; // range: [0 = off .. 100 = strongest] int filter_sharpness; // range: [0 = off .. 7 = least sharp] int filter_type; // filtering type: 0 = simple, 1 = strong (only used // if filter_strength > 0 or autofilter > 0) int autofilter; // Auto adjust filter's strength [0 = off, 1 = on] int alpha_compression; // Algorithm for encoding the alpha plane (0 = none, // 1 = compressed with WebP lossless). Default is 1. int alpha_filtering; // Predictive filtering method for alpha plane. // 0: none, 1: fast, 2: best. Default if 1. int alpha_quality; // Between 0 (smallest size) and 100 (lossless). // Default is 100. int pass; // number of entropy-analysis passes (in [1..10]). int show_compressed; // if true, export the compressed picture back. // In-loop filtering is not applied. int preprocessing; // preprocessing filter: // 0=none, 1=segment-smooth, 2=pseudo-random dithering int partitions; // log2(number of token partitions) in [0..3]. Default // is set to 0 for easier progressive decoding. int partition_limit; // quality degradation allowed to fit the 512k limit // on prediction modes coding (0: no degradation, // 100: maximum possible degradation). int emulate_jpeg_size; // If true, compression parameters will be remapped // to better match the expected output size from // JPEG compression. Generally, the output size will // be similar but the degradation will be lower. int thread_level; // If non-zero, try and use multi-threaded encoding. int low_memory; // If set, reduce memory usage (but increase CPU use). int near_lossless; // Near lossless encoding [0 = off(default) .. 100]. // This feature is experimental. int exact; // if non-zero, preserve the exact RGB values under // transparent area. Otherwise, discard this invisible // RGB information for better compression. The default // value is 0. #ifdef WEBP_EXPERIMENTAL_FEATURES int delta_palettization; uint32_t pad[2]; // padding for later use #else uint32_t pad[3]; // padding for later use #endif // WEBP_EXPERIMENTAL_FEATURES }; // Enumerate some predefined settings for WebPConfig, depending on the type // of source picture. These presets are used when calling WebPConfigPreset(). typedef enum WebPPreset { WEBP_PRESET_DEFAULT = 0, // default preset. WEBP_PRESET_PICTURE, // digital picture, like portrait, inner shot WEBP_PRESET_PHOTO, // outdoor photograph, with natural lighting WEBP_PRESET_DRAWING, // hand or line drawing, with high-contrast details WEBP_PRESET_ICON, // small-sized colorful images WEBP_PRESET_TEXT // text-like } WebPPreset; // Internal, version-checked, entry point WEBP_EXTERN(int) WebPConfigInitInternal(WebPConfig*, WebPPreset, float, int); // Should always be called, to initialize a fresh WebPConfig structure before // modification. Returns false in case of version mismatch. WebPConfigInit() // must have succeeded before using the 'config' object. // Note that the default values are lossless=0 and quality=75. static WEBP_INLINE int WebPConfigInit(WebPConfig* config) { return WebPConfigInitInternal(config, WEBP_PRESET_DEFAULT, 75.f, WEBP_ENCODER_ABI_VERSION); } // This function will initialize the configuration according to a predefined // set of parameters (referred to by 'preset') and a given quality factor. // This function can be called as a replacement to WebPConfigInit(). Will // return false in case of error. static WEBP_INLINE int WebPConfigPreset(WebPConfig* config, WebPPreset preset, float quality) { return WebPConfigInitInternal(config, preset, quality, WEBP_ENCODER_ABI_VERSION); } // Activate the lossless compression mode with the desired efficiency level // between 0 (fastest, lowest compression) and 9 (slower, best compression). // A good default level is '6', providing a fair tradeoff between compression // speed and final compressed size. // This function will overwrite several fields from config: 'method', 'quality' // and 'lossless'. Returns false in case of parameter error. WEBP_EXTERN(int) WebPConfigLosslessPreset(WebPConfig* config, int level); // Returns true if 'config' is non-NULL and all configuration parameters are // within their valid ranges. WEBP_EXTERN(int) WebPValidateConfig(const WebPConfig* config); //------------------------------------------------------------------------------ // Input / Output // Structure for storing auxiliary statistics (mostly for lossy encoding). struct WebPAuxStats { int coded_size; // final size float PSNR[5]; // peak-signal-to-noise ratio for Y/U/V/All/Alpha int block_count[3]; // number of intra4/intra16/skipped macroblocks int header_bytes[2]; // approximate number of bytes spent for header // and mode-partition #0 int residual_bytes[3][4]; // approximate number of bytes spent for // DC/AC/uv coefficients for each (0..3) segments. int segment_size[4]; // number of macroblocks in each segments int segment_quant[4]; // quantizer values for each segments int segment_level[4]; // filtering strength for each segments [0..63] int alpha_data_size; // size of the transparency data int layer_data_size; // size of the enhancement layer data // lossless encoder statistics uint32_t lossless_features; // bit0:predictor bit1:cross-color transform // bit2:subtract-green bit3:color indexing int histogram_bits; // number of precision bits of histogram int transform_bits; // precision bits for transform int cache_bits; // number of bits for color cache lookup int palette_size; // number of color in palette, if used int lossless_size; // final lossless size int lossless_hdr_size; // lossless header (transform, huffman etc) size int lossless_data_size; // lossless image data size uint32_t pad[2]; // padding for later use }; // Signature for output function. Should return true if writing was successful. // data/data_size is the segment of data to write, and 'picture' is for // reference (and so one can make use of picture->custom_ptr). typedef int (*WebPWriterFunction)(const uint8_t* data, size_t data_size, const WebPPicture* picture); // WebPMemoryWrite: a special WebPWriterFunction that writes to memory using // the following WebPMemoryWriter object (to be set as a custom_ptr). struct WebPMemoryWriter { uint8_t* mem; // final buffer (of size 'max_size', larger than 'size'). size_t size; // final size size_t max_size; // total capacity uint32_t pad[1]; // padding for later use }; // The following must be called first before any use. WEBP_EXTERN(void) WebPMemoryWriterInit(WebPMemoryWriter* writer); // The following must be called to deallocate writer->mem memory. The 'writer' // object itself is not deallocated. WEBP_EXTERN(void) WebPMemoryWriterClear(WebPMemoryWriter* writer); // The custom writer to be used with WebPMemoryWriter as custom_ptr. Upon // completion, writer.mem and writer.size will hold the coded data. // writer.mem must be freed by calling WebPMemoryWriterClear. WEBP_EXTERN(int) WebPMemoryWrite(const uint8_t* data, size_t data_size, const WebPPicture* picture); // Progress hook, called from time to time to report progress. It can return // false to request an abort of the encoding process, or true otherwise if // everything is OK. typedef int (*WebPProgressHook)(int percent, const WebPPicture* picture); // Color spaces. typedef enum WebPEncCSP { // chroma sampling WEBP_YUV420 = 0, // 4:2:0 WEBP_YUV420A = 4, // alpha channel variant WEBP_CSP_UV_MASK = 3, // bit-mask to get the UV sampling factors WEBP_CSP_ALPHA_BIT = 4 // bit that is set if alpha is present } WebPEncCSP; // Encoding error conditions. typedef enum WebPEncodingError { VP8_ENC_OK = 0, VP8_ENC_ERROR_OUT_OF_MEMORY, // memory error allocating objects VP8_ENC_ERROR_BITSTREAM_OUT_OF_MEMORY, // memory error while flushing bits VP8_ENC_ERROR_NULL_PARAMETER, // a pointer parameter is NULL VP8_ENC_ERROR_INVALID_CONFIGURATION, // configuration is invalid VP8_ENC_ERROR_BAD_DIMENSION, // picture has invalid width/height VP8_ENC_ERROR_PARTITION0_OVERFLOW, // partition is bigger than 512k VP8_ENC_ERROR_PARTITION_OVERFLOW, // partition is bigger than 16M VP8_ENC_ERROR_BAD_WRITE, // error while flushing bytes VP8_ENC_ERROR_FILE_TOO_BIG, // file is bigger than 4G VP8_ENC_ERROR_USER_ABORT, // abort request by user VP8_ENC_ERROR_LAST // list terminator. always last. } WebPEncodingError; // maximum width/height allowed (inclusive), in pixels #define WEBP_MAX_DIMENSION 16383 // Main exchange structure (input samples, output bytes, statistics) struct WebPPicture { // INPUT ////////////// // Main flag for encoder selecting between ARGB or YUV input. // It is recommended to use ARGB input (*argb, argb_stride) for lossless // compression, and YUV input (*y, *u, *v, etc.) for lossy compression // since these are the respective native colorspace for these formats. int use_argb; // YUV input (mostly used for input to lossy compression) WebPEncCSP colorspace; // colorspace: should be YUV420 for now (=Y'CbCr). int width, height; // dimensions (less or equal to WEBP_MAX_DIMENSION) uint8_t *y, *u, *v; // pointers to luma/chroma planes. int y_stride, uv_stride; // luma/chroma strides. uint8_t* a; // pointer to the alpha plane int a_stride; // stride of the alpha plane uint32_t pad1[2]; // padding for later use // ARGB input (mostly used for input to lossless compression) uint32_t* argb; // Pointer to argb (32 bit) plane. int argb_stride; // This is stride in pixels units, not bytes. uint32_t pad2[3]; // padding for later use // OUTPUT /////////////// // Byte-emission hook, to store compressed bytes as they are ready. WebPWriterFunction writer; // can be NULL void* custom_ptr; // can be used by the writer. // map for extra information (only for lossy compression mode) int extra_info_type; // 1: intra type, 2: segment, 3: quant // 4: intra-16 prediction mode, // 5: chroma prediction mode, // 6: bit cost, 7: distortion uint8_t* extra_info; // if not NULL, points to an array of size // ((width + 15) / 16) * ((height + 15) / 16) that // will be filled with a macroblock map, depending // on extra_info_type. // STATS AND REPORTS /////////////////////////// // Pointer to side statistics (updated only if not NULL) WebPAuxStats* stats; // Error code for the latest error encountered during encoding WebPEncodingError error_code; // If not NULL, report progress during encoding. WebPProgressHook progress_hook; void* user_data; // this field is free to be set to any value and // used during callbacks (like progress-report e.g.). uint32_t pad3[3]; // padding for later use // Unused for now uint8_t *pad4, *pad5; uint32_t pad6[8]; // padding for later use // PRIVATE FIELDS //////////////////// void* memory_; // row chunk of memory for yuva planes void* memory_argb_; // and for argb too. void* pad7[2]; // padding for later use }; // Internal, version-checked, entry point WEBP_EXTERN(int) WebPPictureInitInternal(WebPPicture*, int); // Should always be called, to initialize the structure. Returns false in case // of version mismatch. WebPPictureInit() must have succeeded before using the // 'picture' object. // Note that, by default, use_argb is false and colorspace is WEBP_YUV420. static WEBP_INLINE int WebPPictureInit(WebPPicture* picture) { return WebPPictureInitInternal(picture, WEBP_ENCODER_ABI_VERSION); } //------------------------------------------------------------------------------ // WebPPicture utils // Convenience allocation / deallocation based on picture->width/height: // Allocate y/u/v buffers as per colorspace/width/height specification. // Note! This function will free the previous buffer if needed. // Returns false in case of memory error. WEBP_EXTERN(int) WebPPictureAlloc(WebPPicture* picture); // Release the memory allocated by WebPPictureAlloc() or WebPPictureImport*(). // Note that this function does _not_ free the memory used by the 'picture' // object itself. // Besides memory (which is reclaimed) all other fields of 'picture' are // preserved. WEBP_EXTERN(void) WebPPictureFree(WebPPicture* picture); // Copy the pixels of *src into *dst, using WebPPictureAlloc. Upon return, *dst // will fully own the copied pixels (this is not a view). The 'dst' picture need // not be initialized as its content is overwritten. // Returns false in case of memory allocation error. WEBP_EXTERN(int) WebPPictureCopy(const WebPPicture* src, WebPPicture* dst); // Compute PSNR, SSIM or LSIM distortion metric between two pictures. Results // are in dB, stored in result[] in the Y/U/V/Alpha/All or B/G/R/A/All order. // Returns false in case of error (src and ref don't have same dimension, ...) // Warning: this function is rather CPU-intensive. WEBP_EXTERN(int) WebPPictureDistortion( const WebPPicture* src, const WebPPicture* ref, int metric_type, // 0 = PSNR, 1 = SSIM, 2 = LSIM float result[5]); // self-crops a picture to the rectangle defined by top/left/width/height. // Returns false in case of memory allocation error, or if the rectangle is // outside of the source picture. // The rectangle for the view is defined by the top-left corner pixel // coordinates (left, top) as well as its width and height. This rectangle // must be fully be comprised inside the 'src' source picture. If the source // picture uses the YUV420 colorspace, the top and left coordinates will be // snapped to even values. WEBP_EXTERN(int) WebPPictureCrop(WebPPicture* picture, int left, int top, int width, int height); // Extracts a view from 'src' picture into 'dst'. The rectangle for the view // is defined by the top-left corner pixel coordinates (left, top) as well // as its width and height. This rectangle must be fully be comprised inside // the 'src' source picture. If the source picture uses the YUV420 colorspace, // the top and left coordinates will be snapped to even values. // Picture 'src' must out-live 'dst' picture. Self-extraction of view is allowed // ('src' equal to 'dst') as a mean of fast-cropping (but note that doing so, // the original dimension will be lost). Picture 'dst' need not be initialized // with WebPPictureInit() if it is different from 'src', since its content will // be overwritten. // Returns false in case of memory allocation error or invalid parameters. WEBP_EXTERN(int) WebPPictureView(const WebPPicture* src, int left, int top, int width, int height, WebPPicture* dst); // Returns true if the 'picture' is actually a view and therefore does // not own the memory for pixels. WEBP_EXTERN(int) WebPPictureIsView(const WebPPicture* picture); // Rescale a picture to new dimension width x height. // If either 'width' or 'height' (but not both) is 0 the corresponding // dimension will be calculated preserving the aspect ratio. // No gamma correction is applied. // Returns false in case of error (invalid parameter or insufficient memory). WEBP_EXTERN(int) WebPPictureRescale(WebPPicture* pic, int width, int height); // Colorspace conversion function to import RGB samples. // Previous buffer will be free'd, if any. // *rgb buffer should have a size of at least height * rgb_stride. // Returns false in case of memory error. WEBP_EXTERN(int) WebPPictureImportRGB( WebPPicture* picture, const uint8_t* rgb, int rgb_stride); // Same, but for RGBA buffer. WEBP_EXTERN(int) WebPPictureImportRGBA( WebPPicture* picture, const uint8_t* rgba, int rgba_stride); // Same, but for RGBA buffer. Imports the RGB direct from the 32-bit format // input buffer ignoring the alpha channel. Avoids needing to copy the data // to a temporary 24-bit RGB buffer to import the RGB only. WEBP_EXTERN(int) WebPPictureImportRGBX( WebPPicture* picture, const uint8_t* rgbx, int rgbx_stride); // Variants of the above, but taking BGR(A|X) input. WEBP_EXTERN(int) WebPPictureImportBGR( WebPPicture* picture, const uint8_t* bgr, int bgr_stride); WEBP_EXTERN(int) WebPPictureImportBGRA( WebPPicture* picture, const uint8_t* bgra, int bgra_stride); WEBP_EXTERN(int) WebPPictureImportBGRX( WebPPicture* picture, const uint8_t* bgrx, int bgrx_stride); // Converts picture->argb data to the YUV420A format. The 'colorspace' // parameter is deprecated and should be equal to WEBP_YUV420. // Upon return, picture->use_argb is set to false. The presence of real // non-opaque transparent values is detected, and 'colorspace' will be // adjusted accordingly. Note that this method is lossy. // Returns false in case of error. WEBP_EXTERN(int) WebPPictureARGBToYUVA(WebPPicture* picture, WebPEncCSP /*colorspace = WEBP_YUV420*/); // Same as WebPPictureARGBToYUVA(), but the conversion is done using // pseudo-random dithering with a strength 'dithering' between // 0.0 (no dithering) and 1.0 (maximum dithering). This is useful // for photographic picture. WEBP_EXTERN(int) WebPPictureARGBToYUVADithered( WebPPicture* picture, WebPEncCSP colorspace, float dithering); // Performs 'smart' RGBA->YUVA420 downsampling and colorspace conversion. // Downsampling is handled with extra care in case of color clipping. This // method is roughly 2x slower than WebPPictureARGBToYUVA() but produces better // YUV representation. // Returns false in case of error. WEBP_EXTERN(int) WebPPictureSmartARGBToYUVA(WebPPicture* picture); // Converts picture->yuv to picture->argb and sets picture->use_argb to true. // The input format must be YUV_420 or YUV_420A. // Note that the use of this method is discouraged if one has access to the // raw ARGB samples, since using YUV420 is comparatively lossy. Also, the // conversion from YUV420 to ARGB incurs a small loss too. // Returns false in case of error. WEBP_EXTERN(int) WebPPictureYUVAToARGB(WebPPicture* picture); // Helper function: given a width x height plane of RGBA or YUV(A) samples // clean-up the YUV or RGB samples under fully transparent area, to help // compressibility (no guarantee, though). WEBP_EXTERN(void) WebPCleanupTransparentArea(WebPPicture* picture); // Scan the picture 'picture' for the presence of non fully opaque alpha values. // Returns true in such case. Otherwise returns false (indicating that the // alpha plane can be ignored altogether e.g.). WEBP_EXTERN(int) WebPPictureHasTransparency(const WebPPicture* picture); // Remove the transparency information (if present) by blending the color with // the background color 'background_rgb' (specified as 24bit RGB triplet). // After this call, all alpha values are reset to 0xff. WEBP_EXTERN(void) WebPBlendAlpha(WebPPicture* pic, uint32_t background_rgb); //------------------------------------------------------------------------------ // Main call // Main encoding call, after config and picture have been initialized. // 'picture' must be less than 16384x16384 in dimension (cf WEBP_MAX_DIMENSION), // and the 'config' object must be a valid one. // Returns false in case of error, true otherwise. // In case of error, picture->error_code is updated accordingly. // 'picture' can hold the source samples in both YUV(A) or ARGB input, depending // on the value of 'picture->use_argb'. It is highly recommended to use // the former for lossy encoding, and the latter for lossless encoding // (when config.lossless is true). Automatic conversion from one format to // another is provided but they both incur some loss. WEBP_EXTERN(int) WebPEncode(const WebPConfig* config, WebPPicture* picture); //------------------------------------------------------------------------------ #ifdef __cplusplus } // extern "C" #endif #endif /* WEBP_WEBP_ENCODE_H_ */ ================================================ FILE: MFPictureBrowserDemo/Pods/YYImage/Vendor/WebP.framework/Headers/extras.h ================================================ // Copyright 2015 Google Inc. All Rights Reserved. // // Use of this source code is governed by a BSD-style license // that can be found in the COPYING file in the root of the source // tree. An additional intellectual property rights grant can be found // in the file PATENTS. All contributing project authors may // be found in the AUTHORS file in the root of the source tree. // ----------------------------------------------------------------------------- // #ifndef WEBP_WEBP_EXTRAS_H_ #define WEBP_WEBP_EXTRAS_H_ #include "./types.h" #ifdef __cplusplus extern "C" { #endif #include "./encode.h" #define WEBP_EXTRAS_ABI_VERSION 0x0000 // MAJOR(8b) + MINOR(8b) //------------------------------------------------------------------------------ // Returns the version number of the extras library, packed in hexadecimal using // 8bits for each of major/minor/revision. E.g: v2.5.7 is 0x020507. WEBP_EXTERN(int) WebPGetExtrasVersion(void); //------------------------------------------------------------------------------ // Ad-hoc colorspace importers. // Import luma sample (gray scale image) into 'picture'. The 'picture' // width and height must be set prior to calling this function. WEBP_EXTERN(int) WebPImportGray(const uint8_t* gray, WebPPicture* picture); // Import rgb sample in RGB565 packed format into 'picture'. The 'picture' // width and height must be set prior to calling this function. WEBP_EXTERN(int) WebPImportRGB565(const uint8_t* rgb565, WebPPicture* pic); // Import rgb sample in RGB4444 packed format into 'picture'. The 'picture' // width and height must be set prior to calling this function. WEBP_EXTERN(int) WebPImportRGB4444(const uint8_t* rgb4444, WebPPicture* pic); //------------------------------------------------------------------------------ #ifdef __cplusplus } // extern "C" #endif #endif /* WEBP_WEBP_EXTRAS_H_ */ ================================================ FILE: MFPictureBrowserDemo/Pods/YYImage/Vendor/WebP.framework/Headers/format_constants.h ================================================ // Copyright 2012 Google Inc. All Rights Reserved. // // Use of this source code is governed by a BSD-style license // that can be found in the COPYING file in the root of the source // tree. An additional intellectual property rights grant can be found // in the file PATENTS. All contributing project authors may // be found in the AUTHORS file in the root of the source tree. // ----------------------------------------------------------------------------- // // Internal header for constants related to WebP file format. // // Author: Urvang (urvang@google.com) #ifndef WEBP_WEBP_FORMAT_CONSTANTS_H_ #define WEBP_WEBP_FORMAT_CONSTANTS_H_ // Create fourcc of the chunk from the chunk tag characters. #define MKFOURCC(a, b, c, d) ((a) | (b) << 8 | (c) << 16 | (uint32_t)(d) << 24) // VP8 related constants. #define VP8_SIGNATURE 0x9d012a // Signature in VP8 data. #define VP8_MAX_PARTITION0_SIZE (1 << 19) // max size of mode partition #define VP8_MAX_PARTITION_SIZE (1 << 24) // max size for token partition #define VP8_FRAME_HEADER_SIZE 10 // Size of the frame header within VP8 data. // VP8L related constants. #define VP8L_SIGNATURE_SIZE 1 // VP8L signature size. #define VP8L_MAGIC_BYTE 0x2f // VP8L signature byte. #define VP8L_IMAGE_SIZE_BITS 14 // Number of bits used to store // width and height. #define VP8L_VERSION_BITS 3 // 3 bits reserved for version. #define VP8L_VERSION 0 // version 0 #define VP8L_FRAME_HEADER_SIZE 5 // Size of the VP8L frame header. #define MAX_PALETTE_SIZE 256 #define MAX_CACHE_BITS 11 #define HUFFMAN_CODES_PER_META_CODE 5 #define ARGB_BLACK 0xff000000 #define DEFAULT_CODE_LENGTH 8 #define MAX_ALLOWED_CODE_LENGTH 15 #define NUM_LITERAL_CODES 256 #define NUM_LENGTH_CODES 24 #define NUM_DISTANCE_CODES 40 #define CODE_LENGTH_CODES 19 #define MIN_HUFFMAN_BITS 2 // min number of Huffman bits #define MAX_HUFFMAN_BITS 9 // max number of Huffman bits #define TRANSFORM_PRESENT 1 // The bit to be written when next data // to be read is a transform. #define NUM_TRANSFORMS 4 // Maximum number of allowed transform // in a bitstream. typedef enum { PREDICTOR_TRANSFORM = 0, CROSS_COLOR_TRANSFORM = 1, SUBTRACT_GREEN = 2, COLOR_INDEXING_TRANSFORM = 3 } VP8LImageTransformType; // Alpha related constants. #define ALPHA_HEADER_LEN 1 #define ALPHA_NO_COMPRESSION 0 #define ALPHA_LOSSLESS_COMPRESSION 1 #define ALPHA_PREPROCESSED_LEVELS 1 // Mux related constants. #define TAG_SIZE 4 // Size of a chunk tag (e.g. "VP8L"). #define CHUNK_SIZE_BYTES 4 // Size needed to store chunk's size. #define CHUNK_HEADER_SIZE 8 // Size of a chunk header. #define RIFF_HEADER_SIZE 12 // Size of the RIFF header ("RIFFnnnnWEBP"). #define ANMF_CHUNK_SIZE 16 // Size of an ANMF chunk. #define ANIM_CHUNK_SIZE 6 // Size of an ANIM chunk. #define FRGM_CHUNK_SIZE 6 // Size of a FRGM chunk. #define VP8X_CHUNK_SIZE 10 // Size of a VP8X chunk. #define MAX_CANVAS_SIZE (1 << 24) // 24-bit max for VP8X width/height. #define MAX_IMAGE_AREA (1ULL << 32) // 32-bit max for width x height. #define MAX_LOOP_COUNT (1 << 16) // maximum value for loop-count #define MAX_DURATION (1 << 24) // maximum duration #define MAX_POSITION_OFFSET (1 << 24) // maximum frame/fragment x/y offset // Maximum chunk payload is such that adding the header and padding won't // overflow a uint32_t. #define MAX_CHUNK_PAYLOAD (~0U - CHUNK_HEADER_SIZE - 1) #endif /* WEBP_WEBP_FORMAT_CONSTANTS_H_ */ ================================================ FILE: MFPictureBrowserDemo/Pods/YYImage/Vendor/WebP.framework/Headers/mux.h ================================================ // Copyright 2011 Google Inc. All Rights Reserved. // // Use of this source code is governed by a BSD-style license // that can be found in the COPYING file in the root of the source // tree. An additional intellectual property rights grant can be found // in the file PATENTS. All contributing project authors may // be found in the AUTHORS file in the root of the source tree. // ----------------------------------------------------------------------------- // // RIFF container manipulation and encoding for WebP images. // // Authors: Urvang (urvang@google.com) // Vikas (vikasa@google.com) #ifndef WEBP_WEBP_MUX_H_ #define WEBP_WEBP_MUX_H_ #include "./mux_types.h" #ifdef __cplusplus extern "C" { #endif #define WEBP_MUX_ABI_VERSION 0x0106 // MAJOR(8b) + MINOR(8b) //------------------------------------------------------------------------------ // Mux API // // This API allows manipulation of WebP container images containing features // like color profile, metadata, animation and fragmented images. // // Code Example#1: Create a WebPMux object with image data, color profile and // XMP metadata. /* int copy_data = 0; WebPMux* mux = WebPMuxNew(); // ... (Prepare image data). WebPMuxSetImage(mux, &image, copy_data); // ... (Prepare ICCP color profile data). WebPMuxSetChunk(mux, "ICCP", &icc_profile, copy_data); // ... (Prepare XMP metadata). WebPMuxSetChunk(mux, "XMP ", &xmp, copy_data); // Get data from mux in WebP RIFF format. WebPMuxAssemble(mux, &output_data); WebPMuxDelete(mux); // ... (Consume output_data; e.g. write output_data.bytes to file). WebPDataClear(&output_data); */ // Code Example#2: Get image and color profile data from a WebP file. /* int copy_data = 0; // ... (Read data from file). WebPMux* mux = WebPMuxCreate(&data, copy_data); WebPMuxGetFrame(mux, 1, &image); // ... (Consume image; e.g. call WebPDecode() to decode the data). WebPMuxGetChunk(mux, "ICCP", &icc_profile); // ... (Consume icc_data). WebPMuxDelete(mux); free(data); */ // Note: forward declaring enumerations is not allowed in (strict) C and C++, // the types are left here for reference. // typedef enum WebPMuxError WebPMuxError; // typedef enum WebPChunkId WebPChunkId; typedef struct WebPMux WebPMux; // main opaque object. typedef struct WebPMuxFrameInfo WebPMuxFrameInfo; typedef struct WebPMuxAnimParams WebPMuxAnimParams; typedef struct WebPAnimEncoderOptions WebPAnimEncoderOptions; // Error codes typedef enum WebPMuxError { WEBP_MUX_OK = 1, WEBP_MUX_NOT_FOUND = 0, WEBP_MUX_INVALID_ARGUMENT = -1, WEBP_MUX_BAD_DATA = -2, WEBP_MUX_MEMORY_ERROR = -3, WEBP_MUX_NOT_ENOUGH_DATA = -4 } WebPMuxError; // IDs for different types of chunks. typedef enum WebPChunkId { WEBP_CHUNK_VP8X, // VP8X WEBP_CHUNK_ICCP, // ICCP WEBP_CHUNK_ANIM, // ANIM WEBP_CHUNK_ANMF, // ANMF WEBP_CHUNK_FRGM, // FRGM WEBP_CHUNK_ALPHA, // ALPH WEBP_CHUNK_IMAGE, // VP8/VP8L WEBP_CHUNK_EXIF, // EXIF WEBP_CHUNK_XMP, // XMP WEBP_CHUNK_UNKNOWN, // Other chunks. WEBP_CHUNK_NIL } WebPChunkId; //------------------------------------------------------------------------------ // Returns the version number of the mux library, packed in hexadecimal using // 8bits for each of major/minor/revision. E.g: v2.5.7 is 0x020507. WEBP_EXTERN(int) WebPGetMuxVersion(void); //------------------------------------------------------------------------------ // Life of a Mux object // Internal, version-checked, entry point WEBP_EXTERN(WebPMux*) WebPNewInternal(int); // Creates an empty mux object. // Returns: // A pointer to the newly created empty mux object. // Or NULL in case of memory error. static WEBP_INLINE WebPMux* WebPMuxNew(void) { return WebPNewInternal(WEBP_MUX_ABI_VERSION); } // Deletes the mux object. // Parameters: // mux - (in/out) object to be deleted WEBP_EXTERN(void) WebPMuxDelete(WebPMux* mux); //------------------------------------------------------------------------------ // Mux creation. // Internal, version-checked, entry point WEBP_EXTERN(WebPMux*) WebPMuxCreateInternal(const WebPData*, int, int); // Creates a mux object from raw data given in WebP RIFF format. // Parameters: // bitstream - (in) the bitstream data in WebP RIFF format // copy_data - (in) value 1 indicates given data WILL be copied to the mux // object and value 0 indicates data will NOT be copied. // Returns: // A pointer to the mux object created from given data - on success. // NULL - In case of invalid data or memory error. static WEBP_INLINE WebPMux* WebPMuxCreate(const WebPData* bitstream, int copy_data) { return WebPMuxCreateInternal(bitstream, copy_data, WEBP_MUX_ABI_VERSION); } //------------------------------------------------------------------------------ // Non-image chunks. // Note: Only non-image related chunks should be managed through chunk APIs. // (Image related chunks are: "ANMF", "FRGM", "VP8 ", "VP8L" and "ALPH"). // To add, get and delete images, use WebPMuxSetImage(), WebPMuxPushFrame(), // WebPMuxGetFrame() and WebPMuxDeleteFrame(). // Adds a chunk with id 'fourcc' and data 'chunk_data' in the mux object. // Any existing chunk(s) with the same id will be removed. // Parameters: // mux - (in/out) object to which the chunk is to be added // fourcc - (in) a character array containing the fourcc of the given chunk; // e.g., "ICCP", "XMP ", "EXIF" etc. // chunk_data - (in) the chunk data to be added // copy_data - (in) value 1 indicates given data WILL be copied to the mux // object and value 0 indicates data will NOT be copied. // Returns: // WEBP_MUX_INVALID_ARGUMENT - if mux, fourcc or chunk_data is NULL // or if fourcc corresponds to an image chunk. // WEBP_MUX_MEMORY_ERROR - on memory allocation error. // WEBP_MUX_OK - on success. WEBP_EXTERN(WebPMuxError) WebPMuxSetChunk( WebPMux* mux, const char fourcc[4], const WebPData* chunk_data, int copy_data); // Gets a reference to the data of the chunk with id 'fourcc' in the mux object. // The caller should NOT free the returned data. // Parameters: // mux - (in) object from which the chunk data is to be fetched // fourcc - (in) a character array containing the fourcc of the chunk; // e.g., "ICCP", "XMP ", "EXIF" etc. // chunk_data - (out) returned chunk data // Returns: // WEBP_MUX_INVALID_ARGUMENT - if mux, fourcc or chunk_data is NULL // or if fourcc corresponds to an image chunk. // WEBP_MUX_NOT_FOUND - If mux does not contain a chunk with the given id. // WEBP_MUX_OK - on success. WEBP_EXTERN(WebPMuxError) WebPMuxGetChunk( const WebPMux* mux, const char fourcc[4], WebPData* chunk_data); // Deletes the chunk with the given 'fourcc' from the mux object. // Parameters: // mux - (in/out) object from which the chunk is to be deleted // fourcc - (in) a character array containing the fourcc of the chunk; // e.g., "ICCP", "XMP ", "EXIF" etc. // Returns: // WEBP_MUX_INVALID_ARGUMENT - if mux or fourcc is NULL // or if fourcc corresponds to an image chunk. // WEBP_MUX_NOT_FOUND - If mux does not contain a chunk with the given fourcc. // WEBP_MUX_OK - on success. WEBP_EXTERN(WebPMuxError) WebPMuxDeleteChunk( WebPMux* mux, const char fourcc[4]); //------------------------------------------------------------------------------ // Images. // Encapsulates data about a single frame/fragment. struct WebPMuxFrameInfo { WebPData bitstream; // image data: can be a raw VP8/VP8L bitstream // or a single-image WebP file. int x_offset; // x-offset of the frame. int y_offset; // y-offset of the frame. int duration; // duration of the frame (in milliseconds). WebPChunkId id; // frame type: should be one of WEBP_CHUNK_ANMF, // WEBP_CHUNK_FRGM or WEBP_CHUNK_IMAGE WebPMuxAnimDispose dispose_method; // Disposal method for the frame. WebPMuxAnimBlend blend_method; // Blend operation for the frame. uint32_t pad[1]; // padding for later use }; // Sets the (non-animated and non-fragmented) image in the mux object. // Note: Any existing images (including frames/fragments) will be removed. // Parameters: // mux - (in/out) object in which the image is to be set // bitstream - (in) can be a raw VP8/VP8L bitstream or a single-image // WebP file (non-animated and non-fragmented) // copy_data - (in) value 1 indicates given data WILL be copied to the mux // object and value 0 indicates data will NOT be copied. // Returns: // WEBP_MUX_INVALID_ARGUMENT - if mux is NULL or bitstream is NULL. // WEBP_MUX_MEMORY_ERROR - on memory allocation error. // WEBP_MUX_OK - on success. WEBP_EXTERN(WebPMuxError) WebPMuxSetImage( WebPMux* mux, const WebPData* bitstream, int copy_data); // Adds a frame at the end of the mux object. // Notes: (1) frame.id should be one of WEBP_CHUNK_ANMF or WEBP_CHUNK_FRGM // (2) For setting a non-animated non-fragmented image, use // WebPMuxSetImage() instead. // (3) Type of frame being pushed must be same as the frames in mux. // (4) As WebP only supports even offsets, any odd offset will be snapped // to an even location using: offset &= ~1 // Parameters: // mux - (in/out) object to which the frame is to be added // frame - (in) frame data. // copy_data - (in) value 1 indicates given data WILL be copied to the mux // object and value 0 indicates data will NOT be copied. // Returns: // WEBP_MUX_INVALID_ARGUMENT - if mux or frame is NULL // or if content of 'frame' is invalid. // WEBP_MUX_MEMORY_ERROR - on memory allocation error. // WEBP_MUX_OK - on success. WEBP_EXTERN(WebPMuxError) WebPMuxPushFrame( WebPMux* mux, const WebPMuxFrameInfo* frame, int copy_data); // Gets the nth frame from the mux object. // The content of 'frame->bitstream' is allocated using malloc(), and NOT // owned by the 'mux' object. It MUST be deallocated by the caller by calling // WebPDataClear(). // nth=0 has a special meaning - last position. // Parameters: // mux - (in) object from which the info is to be fetched // nth - (in) index of the frame in the mux object // frame - (out) data of the returned frame // Returns: // WEBP_MUX_INVALID_ARGUMENT - if mux or frame is NULL. // WEBP_MUX_NOT_FOUND - if there are less than nth frames in the mux object. // WEBP_MUX_BAD_DATA - if nth frame chunk in mux is invalid. // WEBP_MUX_MEMORY_ERROR - on memory allocation error. // WEBP_MUX_OK - on success. WEBP_EXTERN(WebPMuxError) WebPMuxGetFrame( const WebPMux* mux, uint32_t nth, WebPMuxFrameInfo* frame); // Deletes a frame from the mux object. // nth=0 has a special meaning - last position. // Parameters: // mux - (in/out) object from which a frame is to be deleted // nth - (in) The position from which the frame is to be deleted // Returns: // WEBP_MUX_INVALID_ARGUMENT - if mux is NULL. // WEBP_MUX_NOT_FOUND - If there are less than nth frames in the mux object // before deletion. // WEBP_MUX_OK - on success. WEBP_EXTERN(WebPMuxError) WebPMuxDeleteFrame(WebPMux* mux, uint32_t nth); //------------------------------------------------------------------------------ // Animation. // Animation parameters. struct WebPMuxAnimParams { uint32_t bgcolor; // Background color of the canvas stored (in MSB order) as: // Bits 00 to 07: Alpha. // Bits 08 to 15: Red. // Bits 16 to 23: Green. // Bits 24 to 31: Blue. int loop_count; // Number of times to repeat the animation [0 = infinite]. }; // Sets the animation parameters in the mux object. Any existing ANIM chunks // will be removed. // Parameters: // mux - (in/out) object in which ANIM chunk is to be set/added // params - (in) animation parameters. // Returns: // WEBP_MUX_INVALID_ARGUMENT - if mux or params is NULL. // WEBP_MUX_MEMORY_ERROR - on memory allocation error. // WEBP_MUX_OK - on success. WEBP_EXTERN(WebPMuxError) WebPMuxSetAnimationParams( WebPMux* mux, const WebPMuxAnimParams* params); // Gets the animation parameters from the mux object. // Parameters: // mux - (in) object from which the animation parameters to be fetched // params - (out) animation parameters extracted from the ANIM chunk // Returns: // WEBP_MUX_INVALID_ARGUMENT - if mux or params is NULL. // WEBP_MUX_NOT_FOUND - if ANIM chunk is not present in mux object. // WEBP_MUX_OK - on success. WEBP_EXTERN(WebPMuxError) WebPMuxGetAnimationParams( const WebPMux* mux, WebPMuxAnimParams* params); //------------------------------------------------------------------------------ // Misc Utilities. // Sets the canvas size for the mux object. The width and height can be // specified explicitly or left as zero (0, 0). // * When width and height are specified explicitly, then this frame bound is // enforced during subsequent calls to WebPMuxAssemble() and an error is // reported if any animated frame does not completely fit within the canvas. // * When unspecified (0, 0), the constructed canvas will get the frame bounds // from the bounding-box over all frames after calling WebPMuxAssemble(). // Parameters: // mux - (in) object to which the canvas size is to be set // width - (in) canvas width // height - (in) canvas height // Returns: // WEBP_MUX_INVALID_ARGUMENT - if mux is NULL; or // width or height are invalid or out of bounds // WEBP_MUX_OK - on success. WEBP_EXTERN(WebPMuxError) WebPMuxSetCanvasSize(WebPMux* mux, int width, int height); // Gets the canvas size from the mux object. // Note: This method assumes that the VP8X chunk, if present, is up-to-date. // That is, the mux object hasn't been modified since the last call to // WebPMuxAssemble() or WebPMuxCreate(). // Parameters: // mux - (in) object from which the canvas size is to be fetched // width - (out) canvas width // height - (out) canvas height // Returns: // WEBP_MUX_INVALID_ARGUMENT - if mux, width or height is NULL. // WEBP_MUX_BAD_DATA - if VP8X/VP8/VP8L chunk or canvas size is invalid. // WEBP_MUX_OK - on success. WEBP_EXTERN(WebPMuxError) WebPMuxGetCanvasSize(const WebPMux* mux, int* width, int* height); // Gets the feature flags from the mux object. // Note: This method assumes that the VP8X chunk, if present, is up-to-date. // That is, the mux object hasn't been modified since the last call to // WebPMuxAssemble() or WebPMuxCreate(). // Parameters: // mux - (in) object from which the features are to be fetched // flags - (out) the flags specifying which features are present in the // mux object. This will be an OR of various flag values. // Enum 'WebPFeatureFlags' can be used to test individual flag values. // Returns: // WEBP_MUX_INVALID_ARGUMENT - if mux or flags is NULL. // WEBP_MUX_BAD_DATA - if VP8X/VP8/VP8L chunk or canvas size is invalid. // WEBP_MUX_OK - on success. WEBP_EXTERN(WebPMuxError) WebPMuxGetFeatures(const WebPMux* mux, uint32_t* flags); // Gets number of chunks with the given 'id' in the mux object. // Parameters: // mux - (in) object from which the info is to be fetched // id - (in) chunk id specifying the type of chunk // num_elements - (out) number of chunks with the given chunk id // Returns: // WEBP_MUX_INVALID_ARGUMENT - if mux, or num_elements is NULL. // WEBP_MUX_OK - on success. WEBP_EXTERN(WebPMuxError) WebPMuxNumChunks(const WebPMux* mux, WebPChunkId id, int* num_elements); // Assembles all chunks in WebP RIFF format and returns in 'assembled_data'. // This function also validates the mux object. // Note: The content of 'assembled_data' will be ignored and overwritten. // Also, the content of 'assembled_data' is allocated using malloc(), and NOT // owned by the 'mux' object. It MUST be deallocated by the caller by calling // WebPDataClear(). It's always safe to call WebPDataClear() upon return, // even in case of error. // Parameters: // mux - (in/out) object whose chunks are to be assembled // assembled_data - (out) assembled WebP data // Returns: // WEBP_MUX_BAD_DATA - if mux object is invalid. // WEBP_MUX_INVALID_ARGUMENT - if mux or assembled_data is NULL. // WEBP_MUX_MEMORY_ERROR - on memory allocation error. // WEBP_MUX_OK - on success. WEBP_EXTERN(WebPMuxError) WebPMuxAssemble(WebPMux* mux, WebPData* assembled_data); //------------------------------------------------------------------------------ // WebPAnimEncoder API // // This API allows encoding (possibly) animated WebP images. // // Code Example: /* WebPAnimEncoderOptions enc_options; WebPAnimEncoderOptionsInit(&enc_options); // Tune 'enc_options' as needed. WebPAnimEncoder* enc = WebPAnimEncoderNew(width, height, &enc_options); while() { WebPConfig config; WebPConfigInit(&config); // Tune 'config' as needed. WebPAnimEncoderAdd(enc, frame, timestamp_ms, &config); } WebPAnimEncoderAdd(enc, NULL, timestamp_ms, NULL); WebPAnimEncoderAssemble(enc, webp_data); WebPAnimEncoderDelete(enc); // Write the 'webp_data' to a file, or re-mux it further. */ typedef struct WebPAnimEncoder WebPAnimEncoder; // Main opaque object. // Forward declarations. Defined in encode.h. struct WebPPicture; struct WebPConfig; // Global options. struct WebPAnimEncoderOptions { WebPMuxAnimParams anim_params; // Animation parameters. int minimize_size; // If true, minimize the output size (slow). Implicitly // disables key-frame insertion. int kmin; int kmax; // Minimum and maximum distance between consecutive key // frames in the output. The library may insert some key // frames as needed to satisfy this criteria. // Note that these conditions should hold: kmax > kmin // and kmin >= kmax / 2 + 1. Also, if kmin == 0, then // key-frame insertion is disabled; and if kmax == 0, // then all frames will be key-frames. int allow_mixed; // If true, use mixed compression mode; may choose // either lossy and lossless for each frame. int verbose; // If true, print info and warning messages to stderr. uint32_t padding[4]; // Padding for later use. }; // Internal, version-checked, entry point. WEBP_EXTERN(int) WebPAnimEncoderOptionsInitInternal( WebPAnimEncoderOptions*, int); // Should always be called, to initialize a fresh WebPAnimEncoderOptions // structure before modification. Returns false in case of version mismatch. // WebPAnimEncoderOptionsInit() must have succeeded before using the // 'enc_options' object. static WEBP_INLINE int WebPAnimEncoderOptionsInit( WebPAnimEncoderOptions* enc_options) { return WebPAnimEncoderOptionsInitInternal(enc_options, WEBP_MUX_ABI_VERSION); } // Internal, version-checked, entry point. WEBP_EXTERN(WebPAnimEncoder*) WebPAnimEncoderNewInternal( int, int, const WebPAnimEncoderOptions*, int); // Creates and initializes a WebPAnimEncoder object. // Parameters: // width/height - (in) canvas width and height of the animation. // enc_options - (in) encoding options; can be passed NULL to pick // reasonable defaults. // Returns: // A pointer to the newly created WebPAnimEncoder object. // Or NULL in case of memory error. static WEBP_INLINE WebPAnimEncoder* WebPAnimEncoderNew( int width, int height, const WebPAnimEncoderOptions* enc_options) { return WebPAnimEncoderNewInternal(width, height, enc_options, WEBP_MUX_ABI_VERSION); } // Optimize the given frame for WebP, encode it and add it to the // WebPAnimEncoder object. // The last call to 'WebPAnimEncoderAdd' should be with frame = NULL, which // indicates that no more frames are to be added. This call is also used to // determine the duration of the last frame. // Parameters: // enc - (in/out) object to which the frame is to be added. // frame - (in/out) frame data in ARGB or YUV(A) format. If it is in YUV(A) // format, it will be converted to ARGB, which incurs a small loss. // timestamp_ms - (in) timestamp of this frame in milliseconds. // Duration of a frame would be calculated as // "timestamp of next frame - timestamp of this frame". // Hence, timestamps should be in non-decreasing order. // config - (in) encoding options; can be passed NULL to pick // reasonable defaults. // Returns: // On error, returns false and frame->error_code is set appropriately. // Otherwise, returns true. WEBP_EXTERN(int) WebPAnimEncoderAdd( WebPAnimEncoder* enc, struct WebPPicture* frame, int timestamp_ms, const struct WebPConfig* config); // Assemble all frames added so far into a WebP bitstream. // This call should be preceded by a call to 'WebPAnimEncoderAdd' with // frame = NULL; if not, the duration of the last frame will be internally // estimated. // Parameters: // enc - (in/out) object from which the frames are to be assembled. // webp_data - (out) generated WebP bitstream. // Returns: // True on success. WEBP_EXTERN(int) WebPAnimEncoderAssemble(WebPAnimEncoder* enc, WebPData* webp_data); // Get error string corresponding to the most recent call using 'enc'. The // returned string is owned by 'enc' and is valid only until the next call to // WebPAnimEncoderAdd() or WebPAnimEncoderAssemble() or WebPAnimEncoderDelete(). // Parameters: // enc - (in/out) object from which the error string is to be fetched. // Returns: // NULL if 'enc' is NULL. Otherwise, returns the error string if the last call // to 'enc' had an error, or an empty string if the last call was a success. WEBP_EXTERN(const char*) WebPAnimEncoderGetError(WebPAnimEncoder* enc); // Deletes the WebPAnimEncoder object. // Parameters: // enc - (in/out) object to be deleted WEBP_EXTERN(void) WebPAnimEncoderDelete(WebPAnimEncoder* enc); //------------------------------------------------------------------------------ #ifdef __cplusplus } // extern "C" #endif #endif /* WEBP_WEBP_MUX_H_ */ ================================================ FILE: MFPictureBrowserDemo/Pods/YYImage/Vendor/WebP.framework/Headers/mux_types.h ================================================ // Copyright 2012 Google Inc. All Rights Reserved. // // Use of this source code is governed by a BSD-style license // that can be found in the COPYING file in the root of the source // tree. An additional intellectual property rights grant can be found // in the file PATENTS. All contributing project authors may // be found in the AUTHORS file in the root of the source tree. // ----------------------------------------------------------------------------- // // Data-types common to the mux and demux libraries. // // Author: Urvang (urvang@google.com) #ifndef WEBP_WEBP_MUX_TYPES_H_ #define WEBP_WEBP_MUX_TYPES_H_ #include // free() #include // memset() #include "./types.h" #ifdef __cplusplus extern "C" { #endif // Note: forward declaring enumerations is not allowed in (strict) C and C++, // the types are left here for reference. // typedef enum WebPFeatureFlags WebPFeatureFlags; // typedef enum WebPMuxAnimDispose WebPMuxAnimDispose; // typedef enum WebPMuxAnimBlend WebPMuxAnimBlend; typedef struct WebPData WebPData; // VP8X Feature Flags. typedef enum WebPFeatureFlags { FRAGMENTS_FLAG = 0x00000001, ANIMATION_FLAG = 0x00000002, XMP_FLAG = 0x00000004, EXIF_FLAG = 0x00000008, ALPHA_FLAG = 0x00000010, ICCP_FLAG = 0x00000020 } WebPFeatureFlags; // Dispose method (animation only). Indicates how the area used by the current // frame is to be treated before rendering the next frame on the canvas. typedef enum WebPMuxAnimDispose { WEBP_MUX_DISPOSE_NONE, // Do not dispose. WEBP_MUX_DISPOSE_BACKGROUND // Dispose to background color. } WebPMuxAnimDispose; // Blend operation (animation only). Indicates how transparent pixels of the // current frame are blended with those of the previous canvas. typedef enum WebPMuxAnimBlend { WEBP_MUX_BLEND, // Blend. WEBP_MUX_NO_BLEND // Do not blend. } WebPMuxAnimBlend; // Data type used to describe 'raw' data, e.g., chunk data // (ICC profile, metadata) and WebP compressed image data. struct WebPData { const uint8_t* bytes; size_t size; }; // Initializes the contents of the 'webp_data' object with default values. static WEBP_INLINE void WebPDataInit(WebPData* webp_data) { if (webp_data != NULL) { memset(webp_data, 0, sizeof(*webp_data)); } } // Clears the contents of the 'webp_data' object by calling free(). Does not // deallocate the object itself. static WEBP_INLINE void WebPDataClear(WebPData* webp_data) { if (webp_data != NULL) { free((void*)webp_data->bytes); WebPDataInit(webp_data); } } // Allocates necessary storage for 'dst' and copies the contents of 'src'. // Returns true on success. static WEBP_INLINE int WebPDataCopy(const WebPData* src, WebPData* dst) { if (src == NULL || dst == NULL) return 0; WebPDataInit(dst); if (src->bytes != NULL && src->size != 0) { dst->bytes = (uint8_t*)malloc(src->size); if (dst->bytes == NULL) return 0; memcpy((void*)dst->bytes, src->bytes, src->size); dst->size = src->size; } return 1; } #ifdef __cplusplus } // extern "C" #endif #endif /* WEBP_WEBP_MUX_TYPES_H_ */ ================================================ FILE: MFPictureBrowserDemo/Pods/YYImage/Vendor/WebP.framework/Headers/types.h ================================================ // Copyright 2010 Google Inc. All Rights Reserved. // // Use of this source code is governed by a BSD-style license // that can be found in the COPYING file in the root of the source // tree. An additional intellectual property rights grant can be found // in the file PATENTS. All contributing project authors may // be found in the AUTHORS file in the root of the source tree. // ----------------------------------------------------------------------------- // // Common types // // Author: Skal (pascal.massimino@gmail.com) #ifndef WEBP_WEBP_TYPES_H_ #define WEBP_WEBP_TYPES_H_ #include // for size_t #ifndef _MSC_VER #include #if defined(__cplusplus) || !defined(__STRICT_ANSI__) || \ (defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L) #define WEBP_INLINE inline #else #define WEBP_INLINE #endif #else typedef signed char int8_t; typedef unsigned char uint8_t; typedef signed short int16_t; typedef unsigned short uint16_t; typedef signed int int32_t; typedef unsigned int uint32_t; typedef unsigned long long int uint64_t; typedef long long int int64_t; #define WEBP_INLINE __forceinline #endif /* _MSC_VER */ #ifndef WEBP_EXTERN // This explicitly marks library functions and allows for changing the // signature for e.g., Windows DLL builds. # if defined(__GNUC__) && __GNUC__ >= 4 # define WEBP_EXTERN(type) extern __attribute__ ((visibility ("default"))) type # else # define WEBP_EXTERN(type) extern type # endif /* __GNUC__ >= 4 */ #endif /* WEBP_EXTERN */ // Macro to check ABI compatibility (same major revision number) #define WEBP_ABI_IS_INCOMPATIBLE(a, b) (((a) >> 8) != ((b) >> 8)) #endif /* WEBP_WEBP_TYPES_H_ */ ================================================ FILE: MFPictureBrowserDemo/Pods/YYImage/YYImage/YYAnimatedImageView.h ================================================ // // YYAnimatedImageView.h // YYImage // // Created by ibireme on 14/10/19. // Copyright (c) 2015 ibireme. // // This source code is licensed under the MIT-style license found in the // LICENSE file in the root directory of this source tree. // #import NS_ASSUME_NONNULL_BEGIN /** An image view for displaying animated image. @discussion It is a fully compatible `UIImageView` subclass. If the `image` or `highlightedImage` property adopt to the `YYAnimatedImage` protocol, then it can be used to play the multi-frame animation. The animation can also be controlled with the UIImageView methods `-startAnimating`, `-stopAnimating` and `-isAnimating`. This view request the frame data just in time. When the device has enough free memory, this view may cache some or all future frames in an inner buffer for lower CPU cost. Buffer size is dynamically adjusted based on the current state of the device memory. Sample Code: // ani@3x.gif YYImage *image = [YYImage imageNamed:@"ani"]; YYAnimatedImageView *imageView = [YYAnimatedImageView alloc] initWithImage:image]; [view addSubView:imageView]; */ @interface YYAnimatedImageView : UIImageView /** If the image has more than one frame, set this value to `YES` will automatically play/stop the animation when the view become visible/invisible. The default value is `YES`. */ @property (nonatomic) BOOL autoPlayAnimatedImage; /** Index of the currently displayed frame (index from 0). Set a new value to this property will cause to display the new frame immediately. If the new value is invalid, this method has no effect. You can add an observer to this property to observe the playing status. */ @property (nonatomic) NSUInteger currentAnimatedImageIndex; /** Whether the image view is playing animation currently. You can add an observer to this property to observe the playing status. */ @property (nonatomic, readonly) BOOL currentIsPlayingAnimation; /** The animation timer's runloop mode, default is `NSRunLoopCommonModes`. Set this property to `NSDefaultRunLoopMode` will make the animation pause during UIScrollView scrolling. */ @property (nonatomic, copy) NSString *runloopMode; /** The max size (in bytes) for inner frame buffer size, default is 0 (dynamically). When the device has enough free memory, this view will request and decode some or all future frame image into an inner buffer. If this property's value is 0, then the max buffer size will be dynamically adjusted based on the current state of the device free memory. Otherwise, the buffer size will be limited by this value. When receive memory warning or app enter background, the buffer will be released immediately, and may grow back at the right time. */ @property (nonatomic) NSUInteger maxBufferSize; @end /** The YYAnimatedImage protocol declares the required methods for animated image display with YYAnimatedImageView. Subclass a UIImage and implement this protocol, so that instances of that class can be set to YYAnimatedImageView.image or YYAnimatedImageView.highlightedImage to display animation. See `YYImage` and `YYFrameImage` for example. */ @protocol YYAnimatedImage @required /// Total animated frame count. /// It the frame count is less than 1, then the methods below will be ignored. - (NSUInteger)animatedImageFrameCount; /// Animation loop count, 0 means infinite looping. - (NSUInteger)animatedImageLoopCount; /// Bytes per frame (in memory). It may used to optimize memory buffer size. - (NSUInteger)animatedImageBytesPerFrame; /// Returns the frame image from a specified index. /// This method may be called on background thread. /// @param index Frame index (zero based). - (nullable UIImage *)animatedImageFrameAtIndex:(NSUInteger)index; /// Returns the frames's duration from a specified index. /// @param index Frame index (zero based). - (NSTimeInterval)animatedImageDurationAtIndex:(NSUInteger)index; @optional /// A rectangle in image coordinates defining the subrectangle of the image that /// will be displayed. The rectangle should not outside the image's bounds. /// It may used to display sprite animation with a single image (sprite sheet). - (CGRect)animatedImageContentsRectAtIndex:(NSUInteger)index; @end NS_ASSUME_NONNULL_END ================================================ FILE: MFPictureBrowserDemo/Pods/YYImage/YYImage/YYAnimatedImageView.m ================================================ // // YYAnimatedImageView.m // YYImage // // Created by ibireme on 14/10/19. // Copyright (c) 2015 ibireme. // // This source code is licensed under the MIT-style license found in the // LICENSE file in the root directory of this source tree. // #import "YYAnimatedImageView.h" #import "YYImageCoder.h" #import #import #define BUFFER_SIZE (10 * 1024 * 1024) // 10MB (minimum memory buffer size) #define LOCK(...) dispatch_semaphore_wait(self->_lock, DISPATCH_TIME_FOREVER); \ __VA_ARGS__; \ dispatch_semaphore_signal(self->_lock); #define LOCK_VIEW(...) dispatch_semaphore_wait(view->_lock, DISPATCH_TIME_FOREVER); \ __VA_ARGS__; \ dispatch_semaphore_signal(view->_lock); static int64_t _YYDeviceMemoryTotal() { int64_t mem = [[NSProcessInfo processInfo] physicalMemory]; if (mem < -1) mem = -1; return mem; } static int64_t _YYDeviceMemoryFree() { mach_port_t host_port = mach_host_self(); mach_msg_type_number_t host_size = sizeof(vm_statistics_data_t) / sizeof(integer_t); vm_size_t page_size; vm_statistics_data_t vm_stat; kern_return_t kern; kern = host_page_size(host_port, &page_size); if (kern != KERN_SUCCESS) return -1; kern = host_statistics(host_port, HOST_VM_INFO, (host_info_t)&vm_stat, &host_size); if (kern != KERN_SUCCESS) return -1; return vm_stat.free_count * page_size; } /** A proxy used to hold a weak object. It can be used to avoid retain cycles, such as the target in NSTimer or CADisplayLink. */ @interface _YYImageWeakProxy : NSProxy @property (nonatomic, weak, readonly) id target; - (instancetype)initWithTarget:(id)target; + (instancetype)proxyWithTarget:(id)target; @end @implementation _YYImageWeakProxy - (instancetype)initWithTarget:(id)target { _target = target; return self; } + (instancetype)proxyWithTarget:(id)target { return [[_YYImageWeakProxy alloc] initWithTarget:target]; } - (id)forwardingTargetForSelector:(SEL)selector { return _target; } - (void)forwardInvocation:(NSInvocation *)invocation { void *null = NULL; [invocation setReturnValue:&null]; } - (NSMethodSignature *)methodSignatureForSelector:(SEL)selector { return [NSObject instanceMethodSignatureForSelector:@selector(init)]; } - (BOOL)respondsToSelector:(SEL)aSelector { return [_target respondsToSelector:aSelector]; } - (BOOL)isEqual:(id)object { return [_target isEqual:object]; } - (NSUInteger)hash { return [_target hash]; } - (Class)superclass { return [_target superclass]; } - (Class)class { return [_target class]; } - (BOOL)isKindOfClass:(Class)aClass { return [_target isKindOfClass:aClass]; } - (BOOL)isMemberOfClass:(Class)aClass { return [_target isMemberOfClass:aClass]; } - (BOOL)conformsToProtocol:(Protocol *)aProtocol { return [_target conformsToProtocol:aProtocol]; } - (BOOL)isProxy { return YES; } - (NSString *)description { return [_target description]; } - (NSString *)debugDescription { return [_target debugDescription]; } @end typedef NS_ENUM(NSUInteger, YYAnimatedImageType) { YYAnimatedImageTypeNone = 0, YYAnimatedImageTypeImage, YYAnimatedImageTypeHighlightedImage, YYAnimatedImageTypeImages, YYAnimatedImageTypeHighlightedImages, }; @interface YYAnimatedImageView() { @package UIImage *_curAnimatedImage; dispatch_once_t _onceToken; dispatch_semaphore_t _lock; ///< lock for _buffer NSOperationQueue *_requestQueue; ///< image request queue, serial CADisplayLink *_link; ///< ticker for change frame NSTimeInterval _time; ///< time after last frame UIImage *_curFrame; ///< current frame to display NSUInteger _curIndex; ///< current frame index (from 0) NSUInteger _totalFrameCount; ///< total frame count BOOL _loopEnd; ///< whether the loop is end. NSUInteger _curLoop; ///< current loop count (from 0) NSUInteger _totalLoop; ///< total loop count, 0 means infinity NSMutableDictionary *_buffer; ///< frame buffer BOOL _bufferMiss; ///< whether miss frame on last opportunity NSUInteger _maxBufferCount; ///< maximum buffer count NSInteger _incrBufferCount; ///< current allowed buffer count (will increase by step) CGRect _curContentsRect; BOOL _curImageHasContentsRect; ///< image has implementated "animatedImageContentsRectAtIndex:" } @property (nonatomic, readwrite) BOOL currentIsPlayingAnimation; - (void)calcMaxBufferCount; @end /// An operation for image fetch @interface _YYAnimatedImageViewFetchOperation : NSOperation @property (nonatomic, weak) YYAnimatedImageView *view; @property (nonatomic, assign) NSUInteger nextIndex; @property (nonatomic, strong) UIImage *curImage; @end @implementation _YYAnimatedImageViewFetchOperation - (void)main { __strong YYAnimatedImageView *view = _view; if (!view) return; if ([self isCancelled]) return; view->_incrBufferCount++; if (view->_incrBufferCount == 0) [view calcMaxBufferCount]; if (view->_incrBufferCount > (NSInteger)view->_maxBufferCount) { view->_incrBufferCount = view->_maxBufferCount; } NSUInteger idx = _nextIndex; NSUInteger max = view->_incrBufferCount < 1 ? 1 : view->_incrBufferCount; NSUInteger total = view->_totalFrameCount; view = nil; for (int i = 0; i < max; i++, idx++) { @autoreleasepool { if (idx >= total) idx = 0; if ([self isCancelled]) break; __strong YYAnimatedImageView *view = _view; if (!view) break; LOCK_VIEW(BOOL miss = (view->_buffer[@(idx)] == nil)); if (miss) { UIImage *img = [_curImage animatedImageFrameAtIndex:idx]; img = img.yy_imageByDecoded; if ([self isCancelled]) break; LOCK_VIEW(view->_buffer[@(idx)] = img ? img : [NSNull null]); view = nil; } } } } @end @implementation YYAnimatedImageView - (instancetype)init { self = [super init]; _runloopMode = NSRunLoopCommonModes; _autoPlayAnimatedImage = YES; return self; } - (instancetype)initWithFrame:(CGRect)frame { self = [super initWithFrame:frame]; _runloopMode = NSRunLoopCommonModes; _autoPlayAnimatedImage = YES; return self; } - (instancetype)initWithImage:(UIImage *)image { self = [super init]; _runloopMode = NSRunLoopCommonModes; _autoPlayAnimatedImage = YES; self.frame = (CGRect) {CGPointZero, image.size }; self.image = image; return self; } - (instancetype)initWithImage:(UIImage *)image highlightedImage:(UIImage *)highlightedImage { self = [super init]; _runloopMode = NSRunLoopCommonModes; _autoPlayAnimatedImage = YES; CGSize size = image ? image.size : highlightedImage.size; self.frame = (CGRect) {CGPointZero, size }; self.image = image; self.highlightedImage = highlightedImage; return self; } // init the animated params. - (void)resetAnimated { dispatch_once(&_onceToken, ^{ _lock = dispatch_semaphore_create(1); _buffer = [NSMutableDictionary new]; _requestQueue = [[NSOperationQueue alloc] init]; _requestQueue.maxConcurrentOperationCount = 1; _link = [CADisplayLink displayLinkWithTarget:[_YYImageWeakProxy proxyWithTarget:self] selector:@selector(step:)]; if (_runloopMode) { [_link addToRunLoop:[NSRunLoop mainRunLoop] forMode:_runloopMode]; } _link.paused = YES; [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(didReceiveMemoryWarning:) name:UIApplicationDidReceiveMemoryWarningNotification object:nil]; [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(didEnterBackground:) name:UIApplicationDidEnterBackgroundNotification object:nil]; }); [_requestQueue cancelAllOperations]; LOCK( if (_buffer.count) { NSMutableDictionary *holder = _buffer; _buffer = [NSMutableDictionary new]; dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_LOW, 0), ^{ // Capture the dictionary to global queue, // release these images in background to avoid blocking UI thread. [holder class]; }); } ); _link.paused = YES; _time = 0; if (_curIndex != 0) { [self willChangeValueForKey:@"currentAnimatedImageIndex"]; _curIndex = 0; [self didChangeValueForKey:@"currentAnimatedImageIndex"]; } _curAnimatedImage = nil; _curFrame = nil; _curLoop = 0; _totalLoop = 0; _totalFrameCount = 1; _loopEnd = NO; _bufferMiss = NO; _incrBufferCount = 0; } - (void)setImage:(UIImage *)image { if (self.image == image) return; [self setImage:image withType:YYAnimatedImageTypeImage]; } - (void)setHighlightedImage:(UIImage *)highlightedImage { if (self.highlightedImage == highlightedImage) return; [self setImage:highlightedImage withType:YYAnimatedImageTypeHighlightedImage]; } - (void)setAnimationImages:(NSArray *)animationImages { if (self.animationImages == animationImages) return; [self setImage:animationImages withType:YYAnimatedImageTypeImages]; } - (void)setHighlightedAnimationImages:(NSArray *)highlightedAnimationImages { if (self.highlightedAnimationImages == highlightedAnimationImages) return; [self setImage:highlightedAnimationImages withType:YYAnimatedImageTypeHighlightedImages]; } - (void)setHighlighted:(BOOL)highlighted { [super setHighlighted:highlighted]; if (_link) [self resetAnimated]; [self imageChanged]; } - (id)imageForType:(YYAnimatedImageType)type { switch (type) { case YYAnimatedImageTypeNone: return nil; case YYAnimatedImageTypeImage: return self.image; case YYAnimatedImageTypeHighlightedImage: return self.highlightedImage; case YYAnimatedImageTypeImages: return self.animationImages; case YYAnimatedImageTypeHighlightedImages: return self.highlightedAnimationImages; } return nil; } - (YYAnimatedImageType)currentImageType { YYAnimatedImageType curType = YYAnimatedImageTypeNone; if (self.highlighted) { if (self.highlightedAnimationImages.count) curType = YYAnimatedImageTypeHighlightedImages; else if (self.highlightedImage) curType = YYAnimatedImageTypeHighlightedImage; } if (curType == YYAnimatedImageTypeNone) { if (self.animationImages.count) curType = YYAnimatedImageTypeImages; else if (self.image) curType = YYAnimatedImageTypeImage; } return curType; } - (void)setImage:(id)image withType:(YYAnimatedImageType)type { [self stopAnimating]; if (_link) [self resetAnimated]; _curFrame = nil; switch (type) { case YYAnimatedImageTypeNone: break; case YYAnimatedImageTypeImage: super.image = image; break; case YYAnimatedImageTypeHighlightedImage: super.highlightedImage = image; break; case YYAnimatedImageTypeImages: super.animationImages = image; break; case YYAnimatedImageTypeHighlightedImages: super.highlightedAnimationImages = image; break; } [self imageChanged]; } - (void)imageChanged { YYAnimatedImageType newType = [self currentImageType]; id newVisibleImage = [self imageForType:newType]; NSUInteger newImageFrameCount = 0; BOOL hasContentsRect = NO; if ([newVisibleImage isKindOfClass:[UIImage class]] && [newVisibleImage conformsToProtocol:@protocol(YYAnimatedImage)]) { newImageFrameCount = ((UIImage *) newVisibleImage).animatedImageFrameCount; if (newImageFrameCount > 1) { hasContentsRect = [((UIImage *) newVisibleImage) respondsToSelector:@selector(animatedImageContentsRectAtIndex:)]; } } if (!hasContentsRect && _curImageHasContentsRect) { if (!CGRectEqualToRect(self.layer.contentsRect, CGRectMake(0, 0, 1, 1)) ) { [CATransaction begin]; [CATransaction setDisableActions:YES]; self.layer.contentsRect = CGRectMake(0, 0, 1, 1); [CATransaction commit]; } } _curImageHasContentsRect = hasContentsRect; if (hasContentsRect) { CGRect rect = [((UIImage *) newVisibleImage) animatedImageContentsRectAtIndex:0]; [self setContentsRect:rect forImage:newVisibleImage]; } if (newImageFrameCount > 1) { [self resetAnimated]; _curAnimatedImage = newVisibleImage; _curFrame = newVisibleImage; _totalLoop = _curAnimatedImage.animatedImageLoopCount; _totalFrameCount = _curAnimatedImage.animatedImageFrameCount; [self calcMaxBufferCount]; } [self setNeedsDisplay]; [self didMoved]; } // dynamically adjust buffer size for current memory. - (void)calcMaxBufferCount { int64_t bytes = (int64_t)_curAnimatedImage.animatedImageBytesPerFrame; if (bytes == 0) bytes = 1024; int64_t total = _YYDeviceMemoryTotal(); int64_t free = _YYDeviceMemoryFree(); int64_t max = MIN(total * 0.2, free * 0.6); max = MAX(max, BUFFER_SIZE); if (_maxBufferSize) max = max > _maxBufferSize ? _maxBufferSize : max; double maxBufferCount = (double)max / (double)bytes; if (maxBufferCount < 1) maxBufferCount = 1; else if (maxBufferCount > 512) maxBufferCount = 512; _maxBufferCount = maxBufferCount; } - (void)dealloc { [_requestQueue cancelAllOperations]; [[NSNotificationCenter defaultCenter] removeObserver:self name:UIApplicationDidReceiveMemoryWarningNotification object:nil]; [[NSNotificationCenter defaultCenter] removeObserver:self name:UIApplicationDidEnterBackgroundNotification object:nil]; [_link invalidate]; } - (BOOL)isAnimating { return self.currentIsPlayingAnimation; } - (void)stopAnimating { [super stopAnimating]; [_requestQueue cancelAllOperations]; _link.paused = YES; self.currentIsPlayingAnimation = NO; } - (void)startAnimating { YYAnimatedImageType type = [self currentImageType]; if (type == YYAnimatedImageTypeImages || type == YYAnimatedImageTypeHighlightedImages) { NSArray *images = [self imageForType:type]; if (images.count > 0) { [super startAnimating]; self.currentIsPlayingAnimation = YES; } } else { if (_curAnimatedImage && _link.paused) { _curLoop = 0; _loopEnd = NO; _link.paused = NO; self.currentIsPlayingAnimation = YES; } } } - (void)didReceiveMemoryWarning:(NSNotification *)notification { [_requestQueue cancelAllOperations]; [_requestQueue addOperationWithBlock: ^{ _incrBufferCount = -60 - (int)(arc4random() % 120); // about 1~3 seconds to grow back.. NSNumber *next = @((_curIndex + 1) % _totalFrameCount); LOCK( NSArray * keys = _buffer.allKeys; for (NSNumber * key in keys) { if (![key isEqualToNumber:next]) { // keep the next frame for smoothly animation [_buffer removeObjectForKey:key]; } } )//LOCK }]; } - (void)didEnterBackground:(NSNotification *)notification { [_requestQueue cancelAllOperations]; NSNumber *next = @((_curIndex + 1) % _totalFrameCount); LOCK( NSArray * keys = _buffer.allKeys; for (NSNumber * key in keys) { if (![key isEqualToNumber:next]) { // keep the next frame for smoothly animation [_buffer removeObjectForKey:key]; } } )//LOCK } - (void)step:(CADisplayLink *)link { UIImage *image = _curAnimatedImage; NSMutableDictionary *buffer = _buffer; UIImage *bufferedImage = nil; NSUInteger nextIndex = (_curIndex + 1) % _totalFrameCount; BOOL bufferIsFull = NO; if (!image) return; if (_loopEnd) { // view will keep in last frame [self stopAnimating]; return; } NSTimeInterval delay = 0; if (!_bufferMiss) { _time += link.duration; delay = [image animatedImageDurationAtIndex:_curIndex]; if (_time < delay) return; _time -= delay; if (nextIndex == 0) { _curLoop++; if (_curLoop >= _totalLoop && _totalLoop != 0) { _loopEnd = YES; [self stopAnimating]; [self.layer setNeedsDisplay]; // let system call `displayLayer:` before runloop sleep return; // stop at last frame } } delay = [image animatedImageDurationAtIndex:nextIndex]; if (_time > delay) _time = delay; // do not jump over frame } LOCK( bufferedImage = buffer[@(nextIndex)]; if (bufferedImage) { if ((int)_incrBufferCount < _totalFrameCount) { [buffer removeObjectForKey:@(nextIndex)]; } [self willChangeValueForKey:@"currentAnimatedImageIndex"]; _curIndex = nextIndex; [self didChangeValueForKey:@"currentAnimatedImageIndex"]; _curFrame = bufferedImage == (id)[NSNull null] ? nil : bufferedImage; if (_curImageHasContentsRect) { _curContentsRect = [image animatedImageContentsRectAtIndex:_curIndex]; [self setContentsRect:_curContentsRect forImage:_curFrame]; } nextIndex = (_curIndex + 1) % _totalFrameCount; _bufferMiss = NO; if (buffer.count == _totalFrameCount) { bufferIsFull = YES; } } else { _bufferMiss = YES; } )//LOCK if (!_bufferMiss) { [self.layer setNeedsDisplay]; // let system call `displayLayer:` before runloop sleep } if (!bufferIsFull && _requestQueue.operationCount == 0) { // if some work not finished, wait for next opportunity _YYAnimatedImageViewFetchOperation *operation = [_YYAnimatedImageViewFetchOperation new]; operation.view = self; operation.nextIndex = nextIndex; operation.curImage = image; [_requestQueue addOperation:operation]; } } - (void)displayLayer:(CALayer *)layer { if (_curFrame) { layer.contents = (__bridge id)_curFrame.CGImage; } } - (void)setContentsRect:(CGRect)rect forImage:(UIImage *)image{ CGRect layerRect = CGRectMake(0, 0, 1, 1); if (image) { CGSize imageSize = image.size; if (imageSize.width > 0.01 && imageSize.height > 0.01) { layerRect.origin.x = rect.origin.x / imageSize.width; layerRect.origin.y = rect.origin.y / imageSize.height; layerRect.size.width = rect.size.width / imageSize.width; layerRect.size.height = rect.size.height / imageSize.height; layerRect = CGRectIntersection(layerRect, CGRectMake(0, 0, 1, 1)); if (CGRectIsNull(layerRect) || CGRectIsEmpty(layerRect)) { layerRect = CGRectMake(0, 0, 1, 1); } } } [CATransaction begin]; [CATransaction setDisableActions:YES]; self.layer.contentsRect = layerRect; [CATransaction commit]; } - (void)didMoved { if (self.autoPlayAnimatedImage) { if(self.superview && self.window) { [self startAnimating]; } else { [self stopAnimating]; } } } - (void)didMoveToWindow { [super didMoveToWindow]; [self didMoved]; } - (void)didMoveToSuperview { [super didMoveToSuperview]; [self didMoved]; } - (void)setCurrentAnimatedImageIndex:(NSUInteger)currentAnimatedImageIndex { if (!_curAnimatedImage) return; if (currentAnimatedImageIndex >= _curAnimatedImage.animatedImageFrameCount) return; if (_curIndex == currentAnimatedImageIndex) return; void (^block)() = ^{ LOCK( [_requestQueue cancelAllOperations]; [_buffer removeAllObjects]; [self willChangeValueForKey:@"currentAnimatedImageIndex"]; _curIndex = currentAnimatedImageIndex; [self didChangeValueForKey:@"currentAnimatedImageIndex"]; _curFrame = [_curAnimatedImage animatedImageFrameAtIndex:_curIndex]; if (_curImageHasContentsRect) { _curContentsRect = [_curAnimatedImage animatedImageContentsRectAtIndex:_curIndex]; } _time = 0; _loopEnd = NO; _bufferMiss = NO; [self.layer setNeedsDisplay]; )//LOCK }; if (pthread_main_np()) { block(); } else { dispatch_async(dispatch_get_main_queue(), block); } } - (NSUInteger)currentAnimatedImageIndex { return _curIndex; } - (void)setRunloopMode:(NSString *)runloopMode { if ([_runloopMode isEqual:runloopMode]) return; if (_link) { if (_runloopMode) { [_link removeFromRunLoop:[NSRunLoop mainRunLoop] forMode:_runloopMode]; } if (runloopMode.length) { [_link addToRunLoop:[NSRunLoop mainRunLoop] forMode:runloopMode]; } } _runloopMode = runloopMode.copy; } #pragma mark - Override NSObject(NSKeyValueObservingCustomization) + (BOOL)automaticallyNotifiesObserversForKey:(NSString *)key { if ([key isEqualToString:@"currentAnimatedImageIndex"]) { return NO; } return [super automaticallyNotifiesObserversForKey:key]; } #pragma mark - NSCoding - (instancetype)initWithCoder:(NSCoder *)aDecoder { self = [super initWithCoder:aDecoder]; _runloopMode = [aDecoder decodeObjectForKey:@"runloopMode"]; if (_runloopMode.length == 0) _runloopMode = NSRunLoopCommonModes; if ([aDecoder containsValueForKey:@"autoPlayAnimatedImage"]) { _autoPlayAnimatedImage = [aDecoder decodeBoolForKey:@"autoPlayAnimatedImage"]; } else { _autoPlayAnimatedImage = YES; } UIImage *image = [aDecoder decodeObjectForKey:@"YYAnimatedImage"]; UIImage *highlightedImage = [aDecoder decodeObjectForKey:@"YYHighlightedAnimatedImage"]; if (image) { self.image = image; [self setImage:image withType:YYAnimatedImageTypeImage]; } if (highlightedImage) { self.highlightedImage = highlightedImage; [self setImage:highlightedImage withType:YYAnimatedImageTypeHighlightedImage]; } return self; } - (void)encodeWithCoder:(NSCoder *)aCoder { [super encodeWithCoder:aCoder]; [aCoder encodeObject:_runloopMode forKey:@"runloopMode"]; [aCoder encodeBool:_autoPlayAnimatedImage forKey:@"autoPlayAnimatedImage"]; BOOL ani, multi; ani = [self.image conformsToProtocol:@protocol(YYAnimatedImage)]; multi = (ani && ((UIImage *)self.image).animatedImageFrameCount > 1); if (multi) [aCoder encodeObject:self.image forKey:@"YYAnimatedImage"]; ani = [self.highlightedImage conformsToProtocol:@protocol(YYAnimatedImage)]; multi = (ani && ((UIImage *)self.highlightedImage).animatedImageFrameCount > 1); if (multi) [aCoder encodeObject:self.highlightedImage forKey:@"YYHighlightedAnimatedImage"]; } @end ================================================ FILE: MFPictureBrowserDemo/Pods/YYImage/YYImage/YYFrameImage.h ================================================ // // YYFrameImage.h // YYImage // // Created by ibireme on 14/12/9. // Copyright (c) 2015 ibireme. // // This source code is licensed under the MIT-style license found in the // LICENSE file in the root directory of this source tree. // #import #if __has_include() #import #elif __has_include() #import #else #import "YYAnimatedImageView.h" #endif NS_ASSUME_NONNULL_BEGIN /** An image to display frame-based animation. @discussion It is a fully compatible `UIImage` subclass. It only support system image format such as png and jpeg. The animation can be played by YYAnimatedImageView. Sample Code: NSArray *paths = @[@"/ani/frame1.png", @"/ani/frame2.png", @"/ani/frame3.png"]; NSArray *times = @[@0.1, @0.2, @0.1]; YYFrameImage *image = [YYFrameImage alloc] initWithImagePaths:paths frameDurations:times repeats:YES]; YYAnimatedImageView *imageView = [YYAnimatedImageView alloc] initWithImage:image]; [view addSubView:imageView]; */ @interface YYFrameImage : UIImage /** Create a frame animated image from files. @param paths An array of NSString objects, contains the full or partial path to each image file. e.g. @[@"/ani/1.png",@"/ani/2.png",@"/ani/3.png"] @param oneFrameDuration The duration (in seconds) per frame. @param loopCount The animation loop count, 0 means infinite. @return An initialized YYFrameImage object, or nil when an error occurs. */ - (nullable instancetype)initWithImagePaths:(NSArray *)paths oneFrameDuration:(NSTimeInterval)oneFrameDuration loopCount:(NSUInteger)loopCount; /** Create a frame animated image from files. @param paths An array of NSString objects, contains the full or partial path to each image file. e.g. @[@"/ani/frame1.png",@"/ani/frame2.png",@"/ani/frame3.png"] @param frameDurations An array of NSNumber objects, contains the duration (in seconds) per frame. e.g. @[@0.1, @0.2, @0.3]; @param loopCount The animation loop count, 0 means infinite. @return An initialized YYFrameImage object, or nil when an error occurs. */ - (nullable instancetype)initWithImagePaths:(NSArray *)paths frameDurations:(NSArray *)frameDurations loopCount:(NSUInteger)loopCount; /** Create a frame animated image from an array of data. @param dataArray An array of NSData objects. @param oneFrameDuration The duration (in seconds) per frame. @param loopCount The animation loop count, 0 means infinite. @return An initialized YYFrameImage object, or nil when an error occurs. */ - (nullable instancetype)initWithImageDataArray:(NSArray *)dataArray oneFrameDuration:(NSTimeInterval)oneFrameDuration loopCount:(NSUInteger)loopCount; /** Create a frame animated image from an array of data. @param dataArray An array of NSData objects. @param frameDurations An array of NSNumber objects, contains the duration (in seconds) per frame. e.g. @[@0.1, @0.2, @0.3]; @param loopCount The animation loop count, 0 means infinite. @return An initialized YYFrameImage object, or nil when an error occurs. */ - (nullable instancetype)initWithImageDataArray:(NSArray *)dataArray frameDurations:(NSArray *)frameDurations loopCount:(NSUInteger)loopCount; @end NS_ASSUME_NONNULL_END ================================================ FILE: MFPictureBrowserDemo/Pods/YYImage/YYImage/YYFrameImage.m ================================================ // // YYFrameImage.m // YYImage // // Created by ibireme on 14/12/9. // Copyright (c) 2015 ibireme. // // This source code is licensed under the MIT-style license found in the // LICENSE file in the root directory of this source tree. // #import "YYFrameImage.h" #import "YYImageCoder.h" /** Return the path scale. e.g.
Path Scale
"icon.png" 1
"icon@2x.png" 2
"icon@2.5x.png" 2.5
"icon@2x" 1
"icon@2x..png" 1
"icon@2x.png/" 1
*/ static CGFloat _NSStringPathScale(NSString *string) { if (string.length == 0 || [string hasSuffix:@"/"]) return 1; NSString *name = string.stringByDeletingPathExtension; __block CGFloat scale = 1; NSRegularExpression *pattern = [NSRegularExpression regularExpressionWithPattern:@"@[0-9]+\\.?[0-9]*x$" options:NSRegularExpressionAnchorsMatchLines error:nil]; [pattern enumerateMatchesInString:name options:kNilOptions range:NSMakeRange(0, name.length) usingBlock:^(NSTextCheckingResult *result, NSMatchingFlags flags, BOOL *stop) { if (result.range.location >= 3) { scale = [string substringWithRange:NSMakeRange(result.range.location + 1, result.range.length - 2)].doubleValue; } }]; return scale; } @implementation YYFrameImage { NSUInteger _loopCount; NSUInteger _oneFrameBytes; NSArray *_imagePaths; NSArray *_imageDatas; NSArray *_frameDurations; } - (instancetype)initWithImagePaths:(NSArray *)paths oneFrameDuration:(NSTimeInterval)oneFrameDuration loopCount:(NSUInteger)loopCount { NSMutableArray *durations = [NSMutableArray new]; for (int i = 0, max = (int)paths.count; i < max; i++) { [durations addObject:@(oneFrameDuration)]; } return [self initWithImagePaths:paths frameDurations:durations loopCount:loopCount]; } - (instancetype)initWithImagePaths:(NSArray *)paths frameDurations:(NSArray *)frameDurations loopCount:(NSUInteger)loopCount { if (paths.count == 0) return nil; if (paths.count != frameDurations.count) return nil; NSString *firstPath = paths[0]; NSData *firstData = [NSData dataWithContentsOfFile:firstPath]; CGFloat scale = _NSStringPathScale(firstPath); UIImage *firstCG = [[[UIImage alloc] initWithData:firstData] yy_imageByDecoded]; self = [self initWithCGImage:firstCG.CGImage scale:scale orientation:UIImageOrientationUp]; if (!self) return nil; long frameByte = CGImageGetBytesPerRow(firstCG.CGImage) * CGImageGetHeight(firstCG.CGImage); _oneFrameBytes = (NSUInteger)frameByte; _imagePaths = paths.copy; _frameDurations = frameDurations.copy; _loopCount = loopCount; return self; } - (instancetype)initWithImageDataArray:(NSArray *)dataArray oneFrameDuration:(NSTimeInterval)oneFrameDuration loopCount:(NSUInteger)loopCount { NSMutableArray *durations = [NSMutableArray new]; for (int i = 0, max = (int)dataArray.count; i < max; i++) { [durations addObject:@(oneFrameDuration)]; } return [self initWithImageDataArray:dataArray frameDurations:durations loopCount:loopCount]; } - (instancetype)initWithImageDataArray:(NSArray *)dataArray frameDurations:(NSArray *)frameDurations loopCount:(NSUInteger)loopCount { if (dataArray.count == 0) return nil; if (dataArray.count != frameDurations.count) return nil; NSData *firstData = dataArray[0]; CGFloat scale = [UIScreen mainScreen].scale; UIImage *firstCG = [[[UIImage alloc] initWithData:firstData] yy_imageByDecoded]; self = [self initWithCGImage:firstCG.CGImage scale:scale orientation:UIImageOrientationUp]; if (!self) return nil; long frameByte = CGImageGetBytesPerRow(firstCG.CGImage) * CGImageGetHeight(firstCG.CGImage); _oneFrameBytes = (NSUInteger)frameByte; _imageDatas = dataArray.copy; _frameDurations = frameDurations.copy; _loopCount = loopCount; return self; } #pragma mark - YYAnimtedImage - (NSUInteger)animatedImageFrameCount { if (_imagePaths) { return _imagePaths.count; } else if (_imageDatas) { return _imageDatas.count; } else { return 1; } } - (NSUInteger)animatedImageLoopCount { return _loopCount; } - (NSUInteger)animatedImageBytesPerFrame { return _oneFrameBytes; } - (UIImage *)animatedImageFrameAtIndex:(NSUInteger)index { if (_imagePaths) { if (index >= _imagePaths.count) return nil; NSString *path = _imagePaths[index]; CGFloat scale = _NSStringPathScale(path); NSData *data = [NSData dataWithContentsOfFile:path]; return [[UIImage imageWithData:data scale:scale] yy_imageByDecoded]; } else if (_imageDatas) { if (index >= _imageDatas.count) return nil; NSData *data = _imageDatas[index]; return [[UIImage imageWithData:data scale:[UIScreen mainScreen].scale] yy_imageByDecoded]; } else { return index == 0 ? self : nil; } } - (NSTimeInterval)animatedImageDurationAtIndex:(NSUInteger)index { if (index >= _frameDurations.count) return 0; NSNumber *num = _frameDurations[index]; return [num doubleValue]; } @end ================================================ FILE: MFPictureBrowserDemo/Pods/YYImage/YYImage/YYImage.h ================================================ // // YYImage.h // YYImage // // Created by ibireme on 14/10/20. // Copyright (c) 2015 ibireme. // // This source code is licensed under the MIT-style license found in the // LICENSE file in the root directory of this source tree. // #import #if __has_include() FOUNDATION_EXPORT double YYImageVersionNumber; FOUNDATION_EXPORT const unsigned char YYImageVersionString[]; #import #import #import #import #elif __has_include() #import #import #import #import #else #import "YYFrameImage.h" #import "YYSpriteSheetImage.h" #import "YYImageCoder.h" #import "YYAnimatedImageView.h" #endif NS_ASSUME_NONNULL_BEGIN /** A YYImage object is a high-level way to display animated image data. @discussion It is a fully compatible `UIImage` subclass. It extends the UIImage to support animated WebP, APNG and GIF format image data decoding. It also support NSCoding protocol to archive and unarchive multi-frame image data. If the image is created from multi-frame image data, and you want to play the animation, try replace UIImageView with `YYAnimatedImageView`. Sample Code: // animation@3x.webp YYImage *image = [YYImage imageNamed:@"animation.webp"]; YYAnimatedImageView *imageView = [YYAnimatedImageView alloc] initWithImage:image]; [view addSubView:imageView]; */ @interface YYImage : UIImage + (nullable YYImage *)imageNamed:(NSString *)name; // no cache! + (nullable YYImage *)imageWithContentsOfFile:(NSString *)path; + (nullable YYImage *)imageWithData:(NSData *)data; + (nullable YYImage *)imageWithData:(NSData *)data scale:(CGFloat)scale; /** If the image is created from data or file, then the value indicates the data type. */ @property (nonatomic, readonly) YYImageType animatedImageType; /** If the image is created from animated image data (multi-frame GIF/APNG/WebP), this property stores the original image data. */ @property (nullable, nonatomic, readonly) NSData *animatedImageData; /** The total memory usage (in bytes) if all frame images was loaded into memory. The value is 0 if the image is not created from a multi-frame image data. */ @property (nonatomic, readonly) NSUInteger animatedImageMemorySize; /** Preload all frame image to memory. @discussion Set this property to `YES` will block the calling thread to decode all animation frame image to memory, set to `NO` will release the preloaded frames. If the image is shared by lots of image views (such as emoticon), preload all frames will reduce the CPU cost. See `animatedImageMemorySize` for memory cost. */ @property (nonatomic) BOOL preloadAllAnimatedImageFrames; @end NS_ASSUME_NONNULL_END ================================================ FILE: MFPictureBrowserDemo/Pods/YYImage/YYImage/YYImage.m ================================================ // // YYImage.m // YYImage // // Created by ibireme on 14/10/20. // Copyright (c) 2015 ibireme. // // This source code is licensed under the MIT-style license found in the // LICENSE file in the root directory of this source tree. // #import "YYImage.h" /** An array of NSNumber objects, shows the best order for path scale search. e.g. iPhone3GS:@[@1,@2,@3] iPhone5:@[@2,@3,@1] iPhone6 Plus:@[@3,@2,@1] */ static NSArray *_NSBundlePreferredScales() { static NSArray *scales; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ CGFloat screenScale = [UIScreen mainScreen].scale; if (screenScale <= 1) { scales = @[@1,@2,@3]; } else if (screenScale <= 2) { scales = @[@2,@3,@1]; } else { scales = @[@3,@2,@1]; } }); return scales; } /** Add scale modifier to the file name (without path extension), From @"name" to @"name@2x". e.g.
Before After(scale:2)
"icon" "icon@2x"
"icon " "icon @2x"
"icon.top" "icon.top@2x"
"/p/name" "/p/name@2x"
"/path/" "/path/"
@param scale Resource scale. @return String by add scale modifier, or just return if it's not end with file name. */ static NSString *_NSStringByAppendingNameScale(NSString *string, CGFloat scale) { if (!string) return nil; if (fabs(scale - 1) <= __FLT_EPSILON__ || string.length == 0 || [string hasSuffix:@"/"]) return string.copy; return [string stringByAppendingFormat:@"@%@x", @(scale)]; } /** Return the path scale. e.g.
Path Scale
"icon.png" 1
"icon@2x.png" 2
"icon@2.5x.png" 2.5
"icon@2x" 1
"icon@2x..png" 1
"icon@2x.png/" 1
*/ static CGFloat _NSStringPathScale(NSString *string) { if (string.length == 0 || [string hasSuffix:@"/"]) return 1; NSString *name = string.stringByDeletingPathExtension; __block CGFloat scale = 1; NSRegularExpression *pattern = [NSRegularExpression regularExpressionWithPattern:@"@[0-9]+\\.?[0-9]*x$" options:NSRegularExpressionAnchorsMatchLines error:nil]; [pattern enumerateMatchesInString:name options:kNilOptions range:NSMakeRange(0, name.length) usingBlock:^(NSTextCheckingResult *result, NSMatchingFlags flags, BOOL *stop) { if (result.range.location >= 3) { scale = [string substringWithRange:NSMakeRange(result.range.location + 1, result.range.length - 2)].doubleValue; } }]; return scale; } @implementation YYImage { YYImageDecoder *_decoder; NSArray *_preloadedFrames; dispatch_semaphore_t _preloadedLock; NSUInteger _bytesPerFrame; } + (YYImage *)imageNamed:(NSString *)name { if (name.length == 0) return nil; if ([name hasSuffix:@"/"]) return nil; NSString *res = name.stringByDeletingPathExtension; NSString *ext = name.pathExtension; NSString *path = nil; CGFloat scale = 1; // If no extension, guess by system supported (same as UIImage). NSArray *exts = ext.length > 0 ? @[ext] : @[@"", @"png", @"jpeg", @"jpg", @"gif", @"webp", @"apng"]; NSArray *scales = _NSBundlePreferredScales(); for (int s = 0; s < scales.count; s++) { scale = ((NSNumber *)scales[s]).floatValue; NSString *scaledName = _NSStringByAppendingNameScale(res, scale); for (NSString *e in exts) { path = [[NSBundle mainBundle] pathForResource:scaledName ofType:e]; if (path) break; } if (path) break; } if (path.length == 0) return nil; NSData *data = [NSData dataWithContentsOfFile:path]; if (data.length == 0) return nil; return [[self alloc] initWithData:data scale:scale]; } + (YYImage *)imageWithContentsOfFile:(NSString *)path { return [[self alloc] initWithContentsOfFile:path]; } + (YYImage *)imageWithData:(NSData *)data { return [[self alloc] initWithData:data]; } + (YYImage *)imageWithData:(NSData *)data scale:(CGFloat)scale { return [[self alloc] initWithData:data scale:scale]; } - (instancetype)initWithContentsOfFile:(NSString *)path { NSData *data = [NSData dataWithContentsOfFile:path]; return [self initWithData:data scale:_NSStringPathScale(path)]; } - (instancetype)initWithData:(NSData *)data { return [self initWithData:data scale:1]; } - (instancetype)initWithData:(NSData *)data scale:(CGFloat)scale { if (data.length == 0) return nil; if (scale <= 0) scale = [UIScreen mainScreen].scale; _preloadedLock = dispatch_semaphore_create(1); @autoreleasepool { YYImageDecoder *decoder = [YYImageDecoder decoderWithData:data scale:scale]; YYImageFrame *frame = [decoder frameAtIndex:0 decodeForDisplay:YES]; UIImage *image = frame.image; if (!image) return nil; self = [self initWithCGImage:image.CGImage scale:decoder.scale orientation:image.imageOrientation]; if (!self) return nil; _animatedImageType = decoder.type; if (decoder.frameCount > 1) { _decoder = decoder; _bytesPerFrame = CGImageGetBytesPerRow(image.CGImage) * CGImageGetHeight(image.CGImage); _animatedImageMemorySize = _bytesPerFrame * decoder.frameCount; } self.yy_isDecodedForDisplay = YES; } return self; } - (NSData *)animatedImageData { return _decoder.data; } - (void)setPreloadAllAnimatedImageFrames:(BOOL)preloadAllAnimatedImageFrames { if (_preloadAllAnimatedImageFrames != preloadAllAnimatedImageFrames) { if (preloadAllAnimatedImageFrames && _decoder.frameCount > 0) { NSMutableArray *frames = [NSMutableArray new]; for (NSUInteger i = 0, max = _decoder.frameCount; i < max; i++) { UIImage *img = [self animatedImageFrameAtIndex:i]; if (img) { [frames addObject:img]; } else { [frames addObject:[NSNull null]]; } } dispatch_semaphore_wait(_preloadedLock, DISPATCH_TIME_FOREVER); _preloadedFrames = frames; dispatch_semaphore_signal(_preloadedLock); } else { dispatch_semaphore_wait(_preloadedLock, DISPATCH_TIME_FOREVER); _preloadedFrames = nil; dispatch_semaphore_signal(_preloadedLock); } } } #pragma mark - protocol NSCoding - (instancetype)initWithCoder:(NSCoder *)aDecoder { NSNumber *scale = [aDecoder decodeObjectForKey:@"YYImageScale"]; NSData *data = [aDecoder decodeObjectForKey:@"YYImageData"]; if (data.length) { self = [self initWithData:data scale:scale.doubleValue]; } else { self = [super initWithCoder:aDecoder]; } return self; } - (void)encodeWithCoder:(NSCoder *)aCoder { if (_decoder.data.length) { [aCoder encodeObject:@(self.scale) forKey:@"YYImageScale"]; [aCoder encodeObject:_decoder.data forKey:@"YYImageData"]; } else { [super encodeWithCoder:aCoder]; // Apple use UIImagePNGRepresentation() to encode UIImage. } } #pragma mark - protocol YYAnimatedImage - (NSUInteger)animatedImageFrameCount { return _decoder.frameCount; } - (NSUInteger)animatedImageLoopCount { return _decoder.loopCount; } - (NSUInteger)animatedImageBytesPerFrame { return _bytesPerFrame; } - (UIImage *)animatedImageFrameAtIndex:(NSUInteger)index { if (index >= _decoder.frameCount) return nil; dispatch_semaphore_wait(_preloadedLock, DISPATCH_TIME_FOREVER); UIImage *image = _preloadedFrames[index]; dispatch_semaphore_signal(_preloadedLock); if (image) return image == (id)[NSNull null] ? nil : image; return [_decoder frameAtIndex:index decodeForDisplay:YES].image; } - (NSTimeInterval)animatedImageDurationAtIndex:(NSUInteger)index { NSTimeInterval duration = [_decoder frameDurationAtIndex:index]; /* http://opensource.apple.com/source/WebCore/WebCore-7600.1.25/platform/graphics/cg/ImageSourceCG.cpp Many annoying ads specify a 0 duration to make an image flash as quickly as possible. We follow Safari and Firefox's behavior and use a duration of 100 ms for any frames that specify a duration of <= 10 ms. See and for more information. See also: http://nullsleep.tumblr.com/post/16524517190/animated-gif-minimum-frame-delay-browser. */ if (duration < 0.011f) return 0.100f; return duration; } @end ================================================ FILE: MFPictureBrowserDemo/Pods/YYImage/YYImage/YYImageCoder.h ================================================ // // YYImageCoder.h // YYImage // // Created by ibireme on 15/5/13. // Copyright (c) 2015 ibireme. // // This source code is licensed under the MIT-style license found in the // LICENSE file in the root directory of this source tree. // #import NS_ASSUME_NONNULL_BEGIN /** Image file type. */ typedef NS_ENUM(NSUInteger, YYImageType) { YYImageTypeUnknown = 0, ///< unknown YYImageTypeJPEG, ///< jpeg, jpg YYImageTypeJPEG2000, ///< jp2 YYImageTypeTIFF, ///< tiff, tif YYImageTypeBMP, ///< bmp YYImageTypeICO, ///< ico YYImageTypeICNS, ///< icns YYImageTypeGIF, ///< gif YYImageTypePNG, ///< png YYImageTypeWebP, ///< webp YYImageTypeOther, ///< other image format }; /** Dispose method specifies how the area used by the current frame is to be treated before rendering the next frame on the canvas. */ typedef NS_ENUM(NSUInteger, YYImageDisposeMethod) { /** No disposal is done on this frame before rendering the next; the contents of the canvas are left as is. */ YYImageDisposeNone = 0, /** The frame's region of the canvas is to be cleared to fully transparent black before rendering the next frame. */ YYImageDisposeBackground, /** The frame's region of the canvas is to be reverted to the previous contents before rendering the next frame. */ YYImageDisposePrevious, }; /** Blend operation specifies how transparent pixels of the current frame are blended with those of the previous canvas. */ typedef NS_ENUM(NSUInteger, YYImageBlendOperation) { /** All color components of the frame, including alpha, overwrite the current contents of the frame's canvas region. */ YYImageBlendNone = 0, /** The frame should be composited onto the output buffer based on its alpha. */ YYImageBlendOver, }; /** An image frame object. */ @interface YYImageFrame : NSObject @property (nonatomic) NSUInteger index; ///< Frame index (zero based) @property (nonatomic) NSUInteger width; ///< Frame width @property (nonatomic) NSUInteger height; ///< Frame height @property (nonatomic) NSUInteger offsetX; ///< Frame origin.x in canvas (left-bottom based) @property (nonatomic) NSUInteger offsetY; ///< Frame origin.y in canvas (left-bottom based) @property (nonatomic) NSTimeInterval duration; ///< Frame duration in seconds @property (nonatomic) YYImageDisposeMethod dispose; ///< Frame dispose method. @property (nonatomic) YYImageBlendOperation blend; ///< Frame blend operation. @property (nullable, nonatomic, strong) UIImage *image; ///< The image. + (instancetype)frameWithImage:(UIImage *)image; @end #pragma mark - Decoder /** An image decoder to decode image data. @discussion This class supports decoding animated WebP, APNG, GIF and system image format such as PNG, JPG, JP2, BMP, TIFF, PIC, ICNS and ICO. It can be used to decode complete image data, or to decode incremental image data during image download. This class is thread-safe. Example: // Decode single image: NSData *data = [NSData dataWithContentOfFile:@"/tmp/image.webp"]; YYImageDecoder *decoder = [YYImageDecoder decoderWithData:data scale:2.0]; UIImage image = [decoder frameAtIndex:0 decodeForDisplay:YES].image; // Decode image during download: NSMutableData *data = [NSMutableData new]; YYImageDecoder *decoder = [[YYImageDecoder alloc] initWithScale:2.0]; while(newDataArrived) { [data appendData:newData]; [decoder updateData:data final:NO]; if (decoder.frameCount > 0) { UIImage image = [decoder frameAtIndex:0 decodeForDisplay:YES].image; // progressive display... } } [decoder updateData:data final:YES]; UIImage image = [decoder frameAtIndex:0 decodeForDisplay:YES].image; // final display... */ @interface YYImageDecoder : NSObject @property (nullable, nonatomic, readonly) NSData *data; ///< Image data. @property (nonatomic, readonly) YYImageType type; ///< Image data type. @property (nonatomic, readonly) CGFloat scale; ///< Image scale. @property (nonatomic, readonly) NSUInteger frameCount; ///< Image frame count. @property (nonatomic, readonly) NSUInteger loopCount; ///< Image loop count, 0 means infinite. @property (nonatomic, readonly) NSUInteger width; ///< Image canvas width. @property (nonatomic, readonly) NSUInteger height; ///< Image canvas height. @property (nonatomic, readonly, getter=isFinalized) BOOL finalized; /** Creates an image decoder. @param scale Image's scale. @return An image decoder. */ - (instancetype)initWithScale:(CGFloat)scale NS_DESIGNATED_INITIALIZER; /** Updates the incremental image with new data. @discussion You can use this method to decode progressive/interlaced/baseline image when you do not have the complete image data. The `data` was retained by decoder, you should not modify the data in other thread during decoding. @param data The data to add to the image decoder. Each time you call this function, the 'data' parameter must contain all of the image file data accumulated so far. @param final A value that specifies whether the data is the final set. Pass YES if it is, NO otherwise. When the data is already finalized, you can not update the data anymore. @return Whether succeed. */ - (BOOL)updateData:(nullable NSData *)data final:(BOOL)final; /** Convenience method to create a decoder with specified data. @param data Image data. @param scale Image's scale. @return A new decoder, or nil if an error occurs. */ + (nullable instancetype)decoderWithData:(NSData *)data scale:(CGFloat)scale; /** Decodes and returns a frame from a specified index. @param index Frame image index (zero-based). @param decodeForDisplay Whether decode the image to memory bitmap for display. If NO, it will try to returns the original frame data without blend. @return A new frame with image, or nil if an error occurs. */ - (nullable YYImageFrame *)frameAtIndex:(NSUInteger)index decodeForDisplay:(BOOL)decodeForDisplay; /** Returns the frame duration from a specified index. @param index Frame image (zero-based). @return Duration in seconds. */ - (NSTimeInterval)frameDurationAtIndex:(NSUInteger)index; /** Returns the frame's properties. See "CGImageProperties.h" in ImageIO.framework for more information. @param index Frame image index (zero-based). @return The ImageIO frame property. */ - (nullable NSDictionary *)framePropertiesAtIndex:(NSUInteger)index; /** Returns the image's properties. See "CGImageProperties.h" in ImageIO.framework for more information. */ - (nullable NSDictionary *)imageProperties; @end #pragma mark - Encoder /** An image encoder to encode image to data. @discussion It supports encoding single frame image with the type defined in YYImageType. It also supports encoding multi-frame image with GIF, APNG and WebP. Example: YYImageEncoder *jpegEncoder = [[YYImageEncoder alloc] initWithType:YYImageTypeJPEG]; jpegEncoder.quality = 0.9; [jpegEncoder addImage:image duration:0]; NSData jpegData = [jpegEncoder encode]; YYImageEncoder *gifEncoder = [[YYImageEncoder alloc] initWithType:YYImageTypeGIF]; gifEncoder.loopCount = 5; [gifEncoder addImage:image0 duration:0.1]; [gifEncoder addImage:image1 duration:0.15]; [gifEncoder addImage:image2 duration:0.2]; NSData gifData = [gifEncoder encode]; @warning It just pack the images together when encoding multi-frame image. If you want to reduce the image file size, try imagemagick/ffmpeg for GIF and WebP, and apngasm for APNG. */ @interface YYImageEncoder : NSObject @property (nonatomic, readonly) YYImageType type; ///< Image type. @property (nonatomic) NSUInteger loopCount; ///< Loop count, 0 means infinit, only available for GIF/APNG/WebP. @property (nonatomic) BOOL lossless; ///< Lossless, only available for WebP. @property (nonatomic) CGFloat quality; ///< Compress quality, 0.0~1.0, only available for JPG/JP2/WebP. - (instancetype)init UNAVAILABLE_ATTRIBUTE; + (instancetype)new UNAVAILABLE_ATTRIBUTE; /** Create an image encoder with a specified type. @param type Image type. @return A new encoder, or nil if an error occurs. */ - (nullable instancetype)initWithType:(YYImageType)type NS_DESIGNATED_INITIALIZER; /** Add an image to encoder. @param image Image. @param duration Image duration for animation. Pass 0 to ignore this parameter. */ - (void)addImage:(UIImage *)image duration:(NSTimeInterval)duration; /** Add an image with image data to encoder. @param data Image data. @param duration Image duration for animation. Pass 0 to ignore this parameter. */ - (void)addImageWithData:(NSData *)data duration:(NSTimeInterval)duration; /** Add an image from a file path to encoder. @param image Image file path. @param duration Image duration for animation. Pass 0 to ignore this parameter. */ - (void)addImageWithFile:(NSString *)path duration:(NSTimeInterval)duration; /** Encodes the image and returns the image data. @return The image data, or nil if an error occurs. */ - (nullable NSData *)encode; /** Encodes the image to a file. @param path The file path (overwrite if exist). @return Whether succeed. */ - (BOOL)encodeToFile:(NSString *)path; /** Convenience method to encode single frame image. @param image The image. @param type The destination image type. @param quality Image quality, 0.0~1.0. @return The image data, or nil if an error occurs. */ + (nullable NSData *)encodeImage:(UIImage *)image type:(YYImageType)type quality:(CGFloat)quality; /** Convenience method to encode image from a decoder. @param decoder The image decoder. @param type The destination image type; @param quality Image quality, 0.0~1.0. @return The image data, or nil if an error occurs. */ + (nullable NSData *)encodeImageWithDecoder:(YYImageDecoder *)decoder type:(YYImageType)type quality:(CGFloat)quality; @end #pragma mark - UIImage @interface UIImage (YYImageCoder) /** Decompress this image to bitmap, so when the image is displayed on screen, the main thread won't be blocked by additional decode. If the image has already been decoded or unable to decode, it just returns itself. @return an image decoded, or just return itself if no needed. @see yy_isDecodedForDisplay */ - (instancetype)yy_imageByDecoded; /** Wherher the image can be display on screen without additional decoding. @warning It just a hint for your code, change it has no other effect. */ @property (nonatomic) BOOL yy_isDecodedForDisplay; /** Saves this image to iOS Photos Album. @discussion This method attempts to save the original data to album if the image is created from an animated GIF/APNG, otherwise, it will save the image as JPEG or PNG (based on the alpha information). @param completionBlock The block invoked (in main thread) after the save operation completes. assetURL: An URL that identifies the saved image file. If the image is not saved, assetURL is nil. error: If the image is not saved, an error object that describes the reason for failure, otherwise nil. */ - (void)yy_saveToAlbumWithCompletionBlock:(nullable void(^)(NSURL * _Nullable assetURL, NSError * _Nullable error))completionBlock; /** Return a 'best' data representation for this image. @discussion The convertion based on these rule: 1. If the image is created from an animated GIF/APNG/WebP, it returns the original data. 2. It returns PNG or JPEG(0.9) representation based on the alpha information. @return Image data, or nil if an error occurs. */ - (nullable NSData *)yy_imageDataRepresentation; @end #pragma mark - Helper /// Detect a data's image type by reading the data's header 16 bytes (very fast). CG_EXTERN YYImageType YYImageDetectType(CFDataRef data); /// Convert YYImageType to UTI (such as kUTTypeJPEG). CG_EXTERN CFStringRef _Nullable YYImageTypeToUTType(YYImageType type); /// Convert UTI (such as kUTTypeJPEG) to YYImageType. CG_EXTERN YYImageType YYImageTypeFromUTType(CFStringRef uti); /// Get image type's file extension (such as @"jpg"). CG_EXTERN NSString *_Nullable YYImageTypeGetExtension(YYImageType type); /// Returns the shared DeviceRGB color space. CG_EXTERN CGColorSpaceRef YYCGColorSpaceGetDeviceRGB(); /// Returns the shared DeviceGray color space. CG_EXTERN CGColorSpaceRef YYCGColorSpaceGetDeviceGray(); /// Returns whether a color space is DeviceRGB. CG_EXTERN BOOL YYCGColorSpaceIsDeviceRGB(CGColorSpaceRef space); /// Returns whether a color space is DeviceGray. CG_EXTERN BOOL YYCGColorSpaceIsDeviceGray(CGColorSpaceRef space); /// Convert EXIF orientation value to UIImageOrientation. CG_EXTERN UIImageOrientation YYUIImageOrientationFromEXIFValue(NSInteger value); /// Convert UIImageOrientation to EXIF orientation value. CG_EXTERN NSInteger YYUIImageOrientationToEXIFValue(UIImageOrientation orientation); /** Create a decoded image. @discussion If the source image is created from a compressed image data (such as PNG or JPEG), you can use this method to decode the image. After decoded, you can access the decoded bytes with CGImageGetDataProvider() and CGDataProviderCopyData() without additional decode process. If the image has already decoded, this method just copy the decoded bytes to the new image. @param imageRef The source image. @param decodeForDisplay If YES, this method will decode the image and convert it to BGRA8888 (premultiplied) or BGRX8888 format for CALayer display. @return A decoded image, or NULL if an error occurs. */ CG_EXTERN CGImageRef _Nullable YYCGImageCreateDecodedCopy(CGImageRef imageRef, BOOL decodeForDisplay); /** Create an image copy with an orientation. @param imageRef Source image @param orientation Image orientation which will applied to the image. @param destBitmapInfo Destimation image bitmap, only support 32bit format (such as ARGB8888). @return A new image, or NULL if an error occurs. */ CG_EXTERN CGImageRef _Nullable YYCGImageCreateCopyWithOrientation(CGImageRef imageRef, UIImageOrientation orientation, CGBitmapInfo destBitmapInfo); /** Create an image copy with CGAffineTransform. @param imageRef Source image. @param transform Transform applied to image (left-bottom based coordinate system). @param destSize Destination image size @param destBitmapInfo Destimation image bitmap, only support 32bit format (such as ARGB8888). @return A new image, or NULL if an error occurs. */ CG_EXTERN CGImageRef _Nullable YYCGImageCreateAffineTransformCopy(CGImageRef imageRef, CGAffineTransform transform, CGSize destSize, CGBitmapInfo destBitmapInfo); /** Encode an image to data with CGImageDestination. @param imageRef The image. @param type The image destination data type. @param quality The quality (0.0~1.0) @return A new image data, or nil if an error occurs. */ CG_EXTERN CFDataRef _Nullable YYCGImageCreateEncodedData(CGImageRef imageRef, YYImageType type, CGFloat quality); /** Whether WebP is available in YYImage. */ CG_EXTERN BOOL YYImageWebPAvailable(); /** Get a webp image frame count; @param webpData WebP data. @return Image frame count, or 0 if an error occurs. */ CG_EXTERN NSUInteger YYImageGetWebPFrameCount(CFDataRef webpData); /** Decode an image from WebP data, returns NULL if an error occurs. @param webpData The WebP data. @param decodeForDisplay If YES, this method will decode the image and convert it to BGRA8888 (premultiplied) format for CALayer display. @param useThreads YES to enable multi-thread decode. (speed up, but cost more CPU) @param bypassFiltering YES to skip the in-loop filtering. (speed up, but may lose some smooth) @param noFancyUpsampling YES to use faster pointwise upsampler. (speed down, and may lose some details). @return The decoded image, or NULL if an error occurs. */ CG_EXTERN CGImageRef _Nullable YYCGImageCreateWithWebPData(CFDataRef webpData, BOOL decodeForDisplay, BOOL useThreads, BOOL bypassFiltering, BOOL noFancyUpsampling); typedef NS_ENUM(NSUInteger, YYImagePreset) { YYImagePresetDefault = 0, ///< default preset. YYImagePresetPicture, ///< digital picture, like portrait, inner shot YYImagePresetPhoto, ///< outdoor photograph, with natural lighting YYImagePresetDrawing, ///< hand or line drawing, with high-contrast details YYImagePresetIcon, ///< small-sized colorful images YYImagePresetText ///< text-like }; /** Encode a CGImage to WebP data @param imageRef image @param lossless YES=lossless (similar to PNG), NO=lossy (similar to JPEG) @param quality 0.0~1.0 (0=smallest file, 1.0=biggest file) For lossless image, try the value near 1.0; for lossy, try the value near 0.8. @param compressLevel 0~6 (0=fast, 6=slower-better). Default is 4. @param preset Preset for different image type, default is YYImagePresetDefault. @return WebP data, or nil if an error occurs. */ CG_EXTERN CFDataRef _Nullable YYCGImageCreateEncodedWebPData(CGImageRef imageRef, BOOL lossless, CGFloat quality, int compressLevel, YYImagePreset preset); NS_ASSUME_NONNULL_END ================================================ FILE: MFPictureBrowserDemo/Pods/YYImage/YYImage/YYImageCoder.m ================================================ // // YYImageCoder.m // YYImage // // Created by ibireme on 15/5/13. // Copyright (c) 2015 ibireme. // // This source code is licensed under the MIT-style license found in the // LICENSE file in the root directory of this source tree. // #import "YYImageCoder.h" #import "YYImage.h" #import #import #import #import #import #import #import #import #import #ifndef YYIMAGE_WEBP_ENABLED #if __has_include() && __has_include() && \ __has_include() && __has_include() #define YYIMAGE_WEBP_ENABLED 1 #import #import #import #import #elif __has_include("webp/decode.h") && __has_include("webp/encode.h") && \ __has_include("webp/demux.h") && __has_include("webp/mux.h") #define YYIMAGE_WEBP_ENABLED 1 #import "webp/decode.h" #import "webp/encode.h" #import "webp/demux.h" #import "webp/mux.h" #else #define YYIMAGE_WEBP_ENABLED 0 #endif #endif //////////////////////////////////////////////////////////////////////////////// #pragma mark - Utility (for little endian platform) #define YY_FOUR_CC(c1,c2,c3,c4) ((uint32_t)(((c4) << 24) | ((c3) << 16) | ((c2) << 8) | (c1))) #define YY_TWO_CC(c1,c2) ((uint16_t)(((c2) << 8) | (c1))) static inline uint16_t yy_swap_endian_uint16(uint16_t value) { return (uint16_t) ((value & 0x00FF) << 8) | (uint16_t) ((value & 0xFF00) >> 8) ; } static inline uint32_t yy_swap_endian_uint32(uint32_t value) { return (uint32_t)((value & 0x000000FFU) << 24) | (uint32_t)((value & 0x0000FF00U) << 8) | (uint32_t)((value & 0x00FF0000U) >> 8) | (uint32_t)((value & 0xFF000000U) >> 24) ; } //////////////////////////////////////////////////////////////////////////////// #pragma mark - APNG /* PNG spec: http://www.libpng.org/pub/png/spec/1.2/PNG-Structure.html APNG spec: https://wiki.mozilla.org/APNG_Specification =============================================================================== PNG format: header (8): 89 50 4e 47 0d 0a 1a 0a chunk, chunk, chunk, ... =============================================================================== chunk format: length (4): uint32_t big endian fourcc (4): chunk type code data (length): data crc32 (4): uint32_t big endian crc32(fourcc + data) =============================================================================== PNG chunk define: IHDR (Image Header) required, must appear first, 13 bytes width (4) pixel count, should not be zero height (4) pixel count, should not be zero bit depth (1) expected: 1, 2, 4, 8, 16 color type (1) 1<<0 (palette used), 1<<1 (color used), 1<<2 (alpha channel used) compression method (1) 0 (deflate/inflate) filter method (1) 0 (adaptive filtering with five basic filter types) interlace method (1) 0 (no interlace) or 1 (Adam7 interlace) IDAT (Image Data) required, must appear consecutively if there's multiple 'IDAT' chunk IEND (End) required, must appear last, 0 bytes =============================================================================== APNG chunk define: acTL (Animation Control) required, must appear before 'IDAT', 8 bytes num frames (4) number of frames num plays (4) number of times to loop, 0 indicates infinite looping fcTL (Frame Control) required, must appear before the 'IDAT' or 'fdAT' chunks of the frame to which it applies, 26 bytes sequence number (4) sequence number of the animation chunk, starting from 0 width (4) width of the following frame height (4) height of the following frame x offset (4) x position at which to render the following frame y offset (4) y position at which to render the following frame delay num (2) frame delay fraction numerator delay den (2) frame delay fraction denominator dispose op (1) type of frame area disposal to be done after rendering this frame (0:none, 1:background 2:previous) blend op (1) type of frame area rendering for this frame (0:source, 1:over) fdAT (Frame Data) required sequence number (4) sequence number of the animation chunk frame data (x) frame data for this frame (same as 'IDAT') =============================================================================== `dispose_op` specifies how the output buffer should be changed at the end of the delay (before rendering the next frame). * NONE: no disposal is done on this frame before rendering the next; the contents of the output buffer are left as is. * BACKGROUND: the frame's region of the output buffer is to be cleared to fully transparent black before rendering the next frame. * PREVIOUS: the frame's region of the output buffer is to be reverted to the previous contents before rendering the next frame. `blend_op` specifies whether the frame is to be alpha blended into the current output buffer content, or whether it should completely replace its region in the output buffer. * SOURCE: all color components of the frame, including alpha, overwrite the current contents of the frame's output buffer region. * OVER: the frame should be composited onto the output buffer based on its alpha, using a simple OVER operation as described in the "Alpha Channel Processing" section of the PNG specification */ typedef enum { YY_PNG_ALPHA_TYPE_PALEETE = 1 << 0, YY_PNG_ALPHA_TYPE_COLOR = 1 << 1, YY_PNG_ALPHA_TYPE_ALPHA = 1 << 2, } yy_png_alpha_type; typedef enum { YY_PNG_DISPOSE_OP_NONE = 0, YY_PNG_DISPOSE_OP_BACKGROUND = 1, YY_PNG_DISPOSE_OP_PREVIOUS = 2, } yy_png_dispose_op; typedef enum { YY_PNG_BLEND_OP_SOURCE = 0, YY_PNG_BLEND_OP_OVER = 1, } yy_png_blend_op; typedef struct { uint32_t width; ///< pixel count, should not be zero uint32_t height; ///< pixel count, should not be zero uint8_t bit_depth; ///< expected: 1, 2, 4, 8, 16 uint8_t color_type; ///< see yy_png_alpha_type uint8_t compression_method; ///< 0 (deflate/inflate) uint8_t filter_method; ///< 0 (adaptive filtering with five basic filter types) uint8_t interlace_method; ///< 0 (no interlace) or 1 (Adam7 interlace) } yy_png_chunk_IHDR; typedef struct { uint32_t sequence_number; ///< sequence number of the animation chunk, starting from 0 uint32_t width; ///< width of the following frame uint32_t height; ///< height of the following frame uint32_t x_offset; ///< x position at which to render the following frame uint32_t y_offset; ///< y position at which to render the following frame uint16_t delay_num; ///< frame delay fraction numerator uint16_t delay_den; ///< frame delay fraction denominator uint8_t dispose_op; ///< see yy_png_dispose_op uint8_t blend_op; ///< see yy_png_blend_op } yy_png_chunk_fcTL; typedef struct { uint32_t offset; ///< chunk offset in PNG data uint32_t fourcc; ///< chunk fourcc uint32_t length; ///< chunk data length uint32_t crc32; ///< chunk crc32 } yy_png_chunk_info; typedef struct { uint32_t chunk_index; ///< the first `fdAT`/`IDAT` chunk index uint32_t chunk_num; ///< the `fdAT`/`IDAT` chunk count uint32_t chunk_size; ///< the `fdAT`/`IDAT` chunk bytes yy_png_chunk_fcTL frame_control; } yy_png_frame_info; typedef struct { yy_png_chunk_IHDR header; ///< png header yy_png_chunk_info *chunks; ///< chunks uint32_t chunk_num; ///< count of chunks yy_png_frame_info *apng_frames; ///< frame info, NULL if not apng uint32_t apng_frame_num; ///< 0 if not apng uint32_t apng_loop_num; ///< 0 indicates infinite looping uint32_t *apng_shared_chunk_indexs; ///< shared chunk index uint32_t apng_shared_chunk_num; ///< shared chunk count uint32_t apng_shared_chunk_size; ///< shared chunk bytes uint32_t apng_shared_insert_index; ///< shared chunk insert index bool apng_first_frame_is_cover; ///< the first frame is same as png (cover) } yy_png_info; static void yy_png_chunk_IHDR_read(yy_png_chunk_IHDR *IHDR, const uint8_t *data) { IHDR->width = yy_swap_endian_uint32(*((uint32_t *)(data))); IHDR->height = yy_swap_endian_uint32(*((uint32_t *)(data + 4))); IHDR->bit_depth = data[8]; IHDR->color_type = data[9]; IHDR->compression_method = data[10]; IHDR->filter_method = data[11]; IHDR->interlace_method = data[12]; } static void yy_png_chunk_IHDR_write(yy_png_chunk_IHDR *IHDR, uint8_t *data) { *((uint32_t *)(data)) = yy_swap_endian_uint32(IHDR->width); *((uint32_t *)(data + 4)) = yy_swap_endian_uint32(IHDR->height); data[8] = IHDR->bit_depth; data[9] = IHDR->color_type; data[10] = IHDR->compression_method; data[11] = IHDR->filter_method; data[12] = IHDR->interlace_method; } static void yy_png_chunk_fcTL_read(yy_png_chunk_fcTL *fcTL, const uint8_t *data) { fcTL->sequence_number = yy_swap_endian_uint32(*((uint32_t *)(data))); fcTL->width = yy_swap_endian_uint32(*((uint32_t *)(data + 4))); fcTL->height = yy_swap_endian_uint32(*((uint32_t *)(data + 8))); fcTL->x_offset = yy_swap_endian_uint32(*((uint32_t *)(data + 12))); fcTL->y_offset = yy_swap_endian_uint32(*((uint32_t *)(data + 16))); fcTL->delay_num = yy_swap_endian_uint16(*((uint16_t *)(data + 20))); fcTL->delay_den = yy_swap_endian_uint16(*((uint16_t *)(data + 22))); fcTL->dispose_op = data[24]; fcTL->blend_op = data[25]; } static void yy_png_chunk_fcTL_write(yy_png_chunk_fcTL *fcTL, uint8_t *data) { *((uint32_t *)(data)) = yy_swap_endian_uint32(fcTL->sequence_number); *((uint32_t *)(data + 4)) = yy_swap_endian_uint32(fcTL->width); *((uint32_t *)(data + 8)) = yy_swap_endian_uint32(fcTL->height); *((uint32_t *)(data + 12)) = yy_swap_endian_uint32(fcTL->x_offset); *((uint32_t *)(data + 16)) = yy_swap_endian_uint32(fcTL->y_offset); *((uint16_t *)(data + 20)) = yy_swap_endian_uint16(fcTL->delay_num); *((uint16_t *)(data + 22)) = yy_swap_endian_uint16(fcTL->delay_den); data[24] = fcTL->dispose_op; data[25] = fcTL->blend_op; } // convert double value to fraction static void yy_png_delay_to_fraction(double duration, uint16_t *num, uint16_t *den) { if (duration >= 0xFF) { *num = 0xFF; *den = 1; } else if (duration <= 1.0 / (double)0xFF) { *num = 1; *den = 0xFF; } else { // Use continued fraction to calculate the num and den. long MAX = 10; double eps = (0.5 / (double)0xFF); long p[MAX], q[MAX], a[MAX], i, numl = 0, denl = 0; // The first two convergents are 0/1 and 1/0 p[0] = 0; q[0] = 1; p[1] = 1; q[1] = 0; // The rest of the convergents (and continued fraction) for (i = 2; i < MAX; i++) { a[i] = lrint(floor(duration)); p[i] = a[i] * p[i - 1] + p[i - 2]; q[i] = a[i] * q[i - 1] + q[i - 2]; if (p[i] <= 0xFF && q[i] <= 0xFF) { // uint16_t numl = p[i]; denl = q[i]; } else break; if (fabs(duration - a[i]) < eps) break; duration = 1.0 / (duration - a[i]); } if (numl != 0 && denl != 0) { *num = numl; *den = denl; } else { *num = 1; *den = 100; } } } // convert fraction to double value static double yy_png_delay_to_seconds(uint16_t num, uint16_t den) { if (den == 0) { return num / 100.0; } else { return (double)num / (double)den; } } static bool yy_png_validate_animation_chunk_order(yy_png_chunk_info *chunks, /* input */ uint32_t chunk_num, /* input */ uint32_t *first_idat_index, /* output */ bool *first_frame_is_cover /* output */) { /* PNG at least contains 3 chunks: IHDR, IDAT, IEND. `IHDR` must appear first. `IDAT` must appear consecutively. `IEND` must appear end. APNG must contains one `acTL` and at least one 'fcTL' and `fdAT`. `fdAT` must appear consecutively. `fcTL` must appear before `IDAT` or `fdAT`. */ if (chunk_num <= 2) return false; if (chunks->fourcc != YY_FOUR_CC('I', 'H', 'D', 'R')) return false; if ((chunks + chunk_num - 1)->fourcc != YY_FOUR_CC('I', 'E', 'N', 'D')) return false; uint32_t prev_fourcc = 0; uint32_t IHDR_num = 0; uint32_t IDAT_num = 0; uint32_t acTL_num = 0; uint32_t fcTL_num = 0; uint32_t first_IDAT = 0; bool first_frame_cover = false; for (uint32_t i = 0; i < chunk_num; i++) { yy_png_chunk_info *chunk = chunks + i; switch (chunk->fourcc) { case YY_FOUR_CC('I', 'H', 'D', 'R'): { // png header if (i != 0) return false; if (IHDR_num > 0) return false; IHDR_num++; } break; case YY_FOUR_CC('I', 'D', 'A', 'T'): { // png data if (prev_fourcc != YY_FOUR_CC('I', 'D', 'A', 'T')) { if (IDAT_num == 0) first_IDAT = i; else return false; } IDAT_num++; } break; case YY_FOUR_CC('a', 'c', 'T', 'L'): { // apng control if (acTL_num > 0) return false; acTL_num++; } break; case YY_FOUR_CC('f', 'c', 'T', 'L'): { // apng frame control if (i + 1 == chunk_num) return false; if ((chunk + 1)->fourcc != YY_FOUR_CC('f', 'd', 'A', 'T') && (chunk + 1)->fourcc != YY_FOUR_CC('I', 'D', 'A', 'T')) { return false; } if (fcTL_num == 0) { if ((chunk + 1)->fourcc == YY_FOUR_CC('I', 'D', 'A', 'T')) { first_frame_cover = true; } } fcTL_num++; } break; case YY_FOUR_CC('f', 'd', 'A', 'T'): { // apng data if (prev_fourcc != YY_FOUR_CC('f', 'd', 'A', 'T') && prev_fourcc != YY_FOUR_CC('f', 'c', 'T', 'L')) { return false; } } break; } prev_fourcc = chunk->fourcc; } if (IHDR_num != 1) return false; if (IDAT_num == 0) return false; if (acTL_num != 1) return false; if (fcTL_num < acTL_num) return false; *first_idat_index = first_IDAT; *first_frame_is_cover = first_frame_cover; return true; } static void yy_png_info_release(yy_png_info *info) { if (info) { if (info->chunks) free(info->chunks); if (info->apng_frames) free(info->apng_frames); if (info->apng_shared_chunk_indexs) free(info->apng_shared_chunk_indexs); free(info); } } /** Create a png info from a png file. See struct png_info for more information. @param data png/apng file data. @param length the data's length in bytes. @return A png info object, you may call yy_png_info_release() to release it. Returns NULL if an error occurs. */ static yy_png_info *yy_png_info_create(const uint8_t *data, uint32_t length) { if (length < 32) return NULL; if (*((uint32_t *)data) != YY_FOUR_CC(0x89, 0x50, 0x4E, 0x47)) return NULL; if (*((uint32_t *)(data + 4)) != YY_FOUR_CC(0x0D, 0x0A, 0x1A, 0x0A)) return NULL; uint32_t chunk_realloc_num = 16; yy_png_chunk_info *chunks = malloc(sizeof(yy_png_chunk_info) * chunk_realloc_num); if (!chunks) return NULL; // parse png chunks uint32_t offset = 8; uint32_t chunk_num = 0; uint32_t chunk_capacity = chunk_realloc_num; uint32_t apng_loop_num = 0; int32_t apng_sequence_index = -1; int32_t apng_frame_index = 0; int32_t apng_frame_number = -1; bool apng_chunk_error = false; do { if (chunk_num >= chunk_capacity) { yy_png_chunk_info *new_chunks = realloc(chunks, sizeof(yy_png_chunk_info) * (chunk_capacity + chunk_realloc_num)); if (!new_chunks) { free(chunks); return NULL; } chunks = new_chunks; chunk_capacity += chunk_realloc_num; } yy_png_chunk_info *chunk = chunks + chunk_num; const uint8_t *chunk_data = data + offset; chunk->offset = offset; chunk->length = yy_swap_endian_uint32(*((uint32_t *)chunk_data)); if ((uint64_t)chunk->offset + (uint64_t)chunk->length + 12 > length) { free(chunks); return NULL; } chunk->fourcc = *((uint32_t *)(chunk_data + 4)); if ((uint64_t)chunk->offset + 4 + chunk->length + 4 > (uint64_t)length) break; chunk->crc32 = yy_swap_endian_uint32(*((uint32_t *)(chunk_data + 8 + chunk->length))); chunk_num++; offset += 12 + chunk->length; switch (chunk->fourcc) { case YY_FOUR_CC('a', 'c', 'T', 'L') : { if (chunk->length == 8) { apng_frame_number = yy_swap_endian_uint32(*((uint32_t *)(chunk_data + 8))); apng_loop_num = yy_swap_endian_uint32(*((uint32_t *)(chunk_data + 12))); } else { apng_chunk_error = true; } } break; case YY_FOUR_CC('f', 'c', 'T', 'L') : case YY_FOUR_CC('f', 'd', 'A', 'T') : { if (chunk->fourcc == YY_FOUR_CC('f', 'c', 'T', 'L')) { if (chunk->length != 26) { apng_chunk_error = true; } else { apng_frame_index++; } } if (chunk->length > 4) { uint32_t sequence = yy_swap_endian_uint32(*((uint32_t *)(chunk_data + 8))); if (apng_sequence_index + 1 == sequence) { apng_sequence_index++; } else { apng_chunk_error = true; } } else { apng_chunk_error = true; } } break; case YY_FOUR_CC('I', 'E', 'N', 'D') : { offset = length; // end, break do-while loop } break; } } while (offset + 12 <= length); if (chunk_num < 3 || chunks->fourcc != YY_FOUR_CC('I', 'H', 'D', 'R') || chunks->length != 13) { free(chunks); return NULL; } // png info yy_png_info *info = calloc(1, sizeof(yy_png_info)); if (!info) { free(chunks); return NULL; } info->chunks = chunks; info->chunk_num = chunk_num; yy_png_chunk_IHDR_read(&info->header, data + chunks->offset + 8); // apng info if (!apng_chunk_error && apng_frame_number == apng_frame_index && apng_frame_number >= 1) { bool first_frame_is_cover = false; uint32_t first_IDAT_index = 0; if (!yy_png_validate_animation_chunk_order(info->chunks, info->chunk_num, &first_IDAT_index, &first_frame_is_cover)) { return info; // ignore apng chunk } info->apng_loop_num = apng_loop_num; info->apng_frame_num = apng_frame_number; info->apng_first_frame_is_cover = first_frame_is_cover; info->apng_shared_insert_index = first_IDAT_index; info->apng_frames = calloc(apng_frame_number, sizeof(yy_png_frame_info)); if (!info->apng_frames) { yy_png_info_release(info); return NULL; } info->apng_shared_chunk_indexs = calloc(info->chunk_num, sizeof(uint32_t)); if (!info->apng_shared_chunk_indexs) { yy_png_info_release(info); return NULL; } int32_t frame_index = -1; uint32_t *shared_chunk_index = info->apng_shared_chunk_indexs; for (int32_t i = 0; i < info->chunk_num; i++) { yy_png_chunk_info *chunk = info->chunks + i; switch (chunk->fourcc) { case YY_FOUR_CC('I', 'D', 'A', 'T'): { if (info->apng_shared_insert_index == 0) { info->apng_shared_insert_index = i; } if (first_frame_is_cover) { yy_png_frame_info *frame = info->apng_frames + frame_index; frame->chunk_num++; frame->chunk_size += chunk->length + 12; } } break; case YY_FOUR_CC('a', 'c', 'T', 'L'): { } break; case YY_FOUR_CC('f', 'c', 'T', 'L'): { frame_index++; yy_png_frame_info *frame = info->apng_frames + frame_index; frame->chunk_index = i + 1; yy_png_chunk_fcTL_read(&frame->frame_control, data + chunk->offset + 8); } break; case YY_FOUR_CC('f', 'd', 'A', 'T'): { yy_png_frame_info *frame = info->apng_frames + frame_index; frame->chunk_num++; frame->chunk_size += chunk->length + 12; } break; default: { *shared_chunk_index = i; shared_chunk_index++; info->apng_shared_chunk_size += chunk->length + 12; info->apng_shared_chunk_num++; } break; } } } return info; } /** Copy a png frame data from an apng file. @param data apng file data @param info png info @param index frame index (zero-based) @param size output, the size of the frame data @return A frame data (single-frame png file), call free() to release the data. Returns NULL if an error occurs. */ static uint8_t *yy_png_copy_frame_data_at_index(const uint8_t *data, const yy_png_info *info, const uint32_t index, uint32_t *size) { if (index >= info->apng_frame_num) return NULL; yy_png_frame_info *frame_info = info->apng_frames + index; uint32_t frame_remux_size = 8 /* PNG Header */ + info->apng_shared_chunk_size + frame_info->chunk_size; if (!(info->apng_first_frame_is_cover && index == 0)) { frame_remux_size -= frame_info->chunk_num * 4; // remove fdAT sequence number } uint8_t *frame_data = malloc(frame_remux_size); if (!frame_data) return NULL; *size = frame_remux_size; uint32_t data_offset = 0; bool inserted = false; memcpy(frame_data, data, 8); // PNG File Header data_offset += 8; for (uint32_t i = 0; i < info->apng_shared_chunk_num; i++) { uint32_t shared_chunk_index = info->apng_shared_chunk_indexs[i]; yy_png_chunk_info *shared_chunk_info = info->chunks + shared_chunk_index; if (shared_chunk_index >= info->apng_shared_insert_index && !inserted) { // replace IDAT with fdAT inserted = true; for (uint32_t c = 0; c < frame_info->chunk_num; c++) { yy_png_chunk_info *insert_chunk_info = info->chunks + frame_info->chunk_index + c; if (insert_chunk_info->fourcc == YY_FOUR_CC('f', 'd', 'A', 'T')) { *((uint32_t *)(frame_data + data_offset)) = yy_swap_endian_uint32(insert_chunk_info->length - 4); *((uint32_t *)(frame_data + data_offset + 4)) = YY_FOUR_CC('I', 'D', 'A', 'T'); memcpy(frame_data + data_offset + 8, data + insert_chunk_info->offset + 12, insert_chunk_info->length - 4); uint32_t crc = (uint32_t)crc32(0, frame_data + data_offset + 4, insert_chunk_info->length); *((uint32_t *)(frame_data + data_offset + insert_chunk_info->length + 4)) = yy_swap_endian_uint32(crc); data_offset += insert_chunk_info->length + 8; } else { // IDAT memcpy(frame_data + data_offset, data + insert_chunk_info->offset, insert_chunk_info->length + 12); data_offset += insert_chunk_info->length + 12; } } } if (shared_chunk_info->fourcc == YY_FOUR_CC('I', 'H', 'D', 'R')) { uint8_t tmp[25] = {0}; memcpy(tmp, data + shared_chunk_info->offset, 25); yy_png_chunk_IHDR IHDR = info->header; IHDR.width = frame_info->frame_control.width; IHDR.height = frame_info->frame_control.height; yy_png_chunk_IHDR_write(&IHDR, tmp + 8); *((uint32_t *)(tmp + 21)) = yy_swap_endian_uint32((uint32_t)crc32(0, tmp + 4, 17)); memcpy(frame_data + data_offset, tmp, 25); data_offset += 25; } else { memcpy(frame_data + data_offset, data + shared_chunk_info->offset, shared_chunk_info->length + 12); data_offset += shared_chunk_info->length + 12; } } return frame_data; } //////////////////////////////////////////////////////////////////////////////// #pragma mark - Helper /// Returns byte-aligned size. static inline size_t YYImageByteAlign(size_t size, size_t alignment) { return ((size + (alignment - 1)) / alignment) * alignment; } /// Convert degree to radians static inline CGFloat YYImageDegreesToRadians(CGFloat degrees) { return degrees * M_PI / 180; } CGColorSpaceRef YYCGColorSpaceGetDeviceRGB() { static CGColorSpaceRef space; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ space = CGColorSpaceCreateDeviceRGB(); }); return space; } CGColorSpaceRef YYCGColorSpaceGetDeviceGray() { static CGColorSpaceRef space; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ space = CGColorSpaceCreateDeviceGray(); }); return space; } BOOL YYCGColorSpaceIsDeviceRGB(CGColorSpaceRef space) { return space && CFEqual(space, YYCGColorSpaceGetDeviceRGB()); } BOOL YYCGColorSpaceIsDeviceGray(CGColorSpaceRef space) { return space && CFEqual(space, YYCGColorSpaceGetDeviceGray()); } /** A callback used in CGDataProviderCreateWithData() to release data. Example: void *data = malloc(size); CGDataProviderRef provider = CGDataProviderCreateWithData(data, data, size, YYCGDataProviderReleaseDataCallback); */ static void YYCGDataProviderReleaseDataCallback(void *info, const void *data, size_t size) { if (info) free(info); } /** Decode an image to bitmap buffer with the specified format. @param srcImage Source image. @param dest Destination buffer. It should be zero before call this method. If decode succeed, you should release the dest->data using free(). @param destFormat Destination bitmap format. @return Whether succeed. @warning This method support iOS7.0 and later. If call it on iOS6, it just returns NO. CG_AVAILABLE_STARTING(__MAC_10_9, __IPHONE_7_0) */ static BOOL YYCGImageDecodeToBitmapBufferWithAnyFormat(CGImageRef srcImage, vImage_Buffer *dest, vImage_CGImageFormat *destFormat) { if (!srcImage || (((long)vImageConvert_AnyToAny) + 1 == 1) || !destFormat || !dest) return NO; size_t width = CGImageGetWidth(srcImage); size_t height = CGImageGetHeight(srcImage); if (width == 0 || height == 0) return NO; dest->data = NULL; vImage_Error error = kvImageNoError; CFDataRef srcData = NULL; vImageConverterRef convertor = NULL; vImage_CGImageFormat srcFormat = {0}; srcFormat.bitsPerComponent = (uint32_t)CGImageGetBitsPerComponent(srcImage); srcFormat.bitsPerPixel = (uint32_t)CGImageGetBitsPerPixel(srcImage); srcFormat.colorSpace = CGImageGetColorSpace(srcImage); srcFormat.bitmapInfo = CGImageGetBitmapInfo(srcImage) | CGImageGetAlphaInfo(srcImage); convertor = vImageConverter_CreateWithCGImageFormat(&srcFormat, destFormat, NULL, kvImageNoFlags, NULL); if (!convertor) goto fail; CGDataProviderRef srcProvider = CGImageGetDataProvider(srcImage); srcData = srcProvider ? CGDataProviderCopyData(srcProvider) : NULL; // decode size_t srcLength = srcData ? CFDataGetLength(srcData) : 0; const void *srcBytes = srcData ? CFDataGetBytePtr(srcData) : NULL; if (srcLength == 0 || !srcBytes) goto fail; vImage_Buffer src = {0}; src.data = (void *)srcBytes; src.width = width; src.height = height; src.rowBytes = CGImageGetBytesPerRow(srcImage); error = vImageBuffer_Init(dest, height, width, 32, kvImageNoFlags); if (error != kvImageNoError) goto fail; error = vImageConvert_AnyToAny(convertor, &src, dest, NULL, kvImageNoFlags); // convert if (error != kvImageNoError) goto fail; CFRelease(convertor); CFRelease(srcData); return YES; fail: if (convertor) CFRelease(convertor); if (srcData) CFRelease(srcData); if (dest->data) free(dest->data); dest->data = NULL; return NO; } /** Decode an image to bitmap buffer with the 32bit format (such as ARGB8888). @param srcImage Source image. @param dest Destination buffer. It should be zero before call this method. If decode succeed, you should release the dest->data using free(). @param bitmapInfo Destination bitmap format. @return Whether succeed. */ static BOOL YYCGImageDecodeToBitmapBufferWith32BitFormat(CGImageRef srcImage, vImage_Buffer *dest, CGBitmapInfo bitmapInfo) { if (!srcImage || !dest) return NO; size_t width = CGImageGetWidth(srcImage); size_t height = CGImageGetHeight(srcImage); if (width == 0 || height == 0) return NO; BOOL hasAlpha = NO; BOOL alphaFirst = NO; BOOL alphaPremultiplied = NO; BOOL byteOrderNormal = NO; switch (bitmapInfo & kCGBitmapAlphaInfoMask) { case kCGImageAlphaPremultipliedLast: { hasAlpha = YES; alphaPremultiplied = YES; } break; case kCGImageAlphaPremultipliedFirst: { hasAlpha = YES; alphaPremultiplied = YES; alphaFirst = YES; } break; case kCGImageAlphaLast: { hasAlpha = YES; } break; case kCGImageAlphaFirst: { hasAlpha = YES; alphaFirst = YES; } break; case kCGImageAlphaNoneSkipLast: { } break; case kCGImageAlphaNoneSkipFirst: { alphaFirst = YES; } break; default: { return NO; } break; } switch (bitmapInfo & kCGBitmapByteOrderMask) { case kCGBitmapByteOrderDefault: { byteOrderNormal = YES; } break; case kCGBitmapByteOrder32Little: { } break; case kCGBitmapByteOrder32Big: { byteOrderNormal = YES; } break; default: { return NO; } break; } /* Try convert with vImageConvert_AnyToAny() (avaliable since iOS 7.0). If fail, try decode with CGContextDrawImage(). CGBitmapContext use a premultiplied alpha format, unpremultiply may lose precision. */ vImage_CGImageFormat destFormat = {0}; destFormat.bitsPerComponent = 8; destFormat.bitsPerPixel = 32; destFormat.colorSpace = YYCGColorSpaceGetDeviceRGB(); destFormat.bitmapInfo = bitmapInfo; dest->data = NULL; if (YYCGImageDecodeToBitmapBufferWithAnyFormat(srcImage, dest, &destFormat)) return YES; CGBitmapInfo contextBitmapInfo = bitmapInfo & kCGBitmapByteOrderMask; if (!hasAlpha || alphaPremultiplied) { contextBitmapInfo |= (bitmapInfo & kCGBitmapAlphaInfoMask); } else { contextBitmapInfo |= alphaFirst ? kCGImageAlphaPremultipliedFirst : kCGImageAlphaPremultipliedLast; } CGContextRef context = CGBitmapContextCreate(NULL, width, height, 8, 0, YYCGColorSpaceGetDeviceRGB(), contextBitmapInfo); if (!context) goto fail; CGContextDrawImage(context, CGRectMake(0, 0, width, height), srcImage); // decode and convert size_t bytesPerRow = CGBitmapContextGetBytesPerRow(context); size_t length = height * bytesPerRow; void *data = CGBitmapContextGetData(context); if (length == 0 || !data) goto fail; dest->data = malloc(length); dest->width = width; dest->height = height; dest->rowBytes = bytesPerRow; if (!dest->data) goto fail; if (hasAlpha && !alphaPremultiplied) { vImage_Buffer tmpSrc = {0}; tmpSrc.data = data; tmpSrc.width = width; tmpSrc.height = height; tmpSrc.rowBytes = bytesPerRow; vImage_Error error; if (alphaFirst && byteOrderNormal) { error = vImageUnpremultiplyData_ARGB8888(&tmpSrc, dest, kvImageNoFlags); } else { error = vImageUnpremultiplyData_RGBA8888(&tmpSrc, dest, kvImageNoFlags); } if (error != kvImageNoError) goto fail; } else { memcpy(dest->data, data, length); } CFRelease(context); return YES; fail: if (context) CFRelease(context); if (dest->data) free(dest->data); dest->data = NULL; return NO; return NO; } CGImageRef YYCGImageCreateDecodedCopy(CGImageRef imageRef, BOOL decodeForDisplay) { if (!imageRef) return NULL; size_t width = CGImageGetWidth(imageRef); size_t height = CGImageGetHeight(imageRef); if (width == 0 || height == 0) return NULL; if (decodeForDisplay) { //decode with redraw (may lose some precision) CGImageAlphaInfo alphaInfo = CGImageGetAlphaInfo(imageRef) & kCGBitmapAlphaInfoMask; BOOL hasAlpha = NO; if (alphaInfo == kCGImageAlphaPremultipliedLast || alphaInfo == kCGImageAlphaPremultipliedFirst || alphaInfo == kCGImageAlphaLast || alphaInfo == kCGImageAlphaFirst) { hasAlpha = YES; } // BGRA8888 (premultiplied) or BGRX8888 // same as UIGraphicsBeginImageContext() and -[UIView drawRect:] CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Host; bitmapInfo |= hasAlpha ? kCGImageAlphaPremultipliedFirst : kCGImageAlphaNoneSkipFirst; CGContextRef context = CGBitmapContextCreate(NULL, width, height, 8, 0, YYCGColorSpaceGetDeviceRGB(), bitmapInfo); if (!context) return NULL; CGContextDrawImage(context, CGRectMake(0, 0, width, height), imageRef); // decode CGImageRef newImage = CGBitmapContextCreateImage(context); CFRelease(context); return newImage; } else { CGColorSpaceRef space = CGImageGetColorSpace(imageRef); size_t bitsPerComponent = CGImageGetBitsPerComponent(imageRef); size_t bitsPerPixel = CGImageGetBitsPerPixel(imageRef); size_t bytesPerRow = CGImageGetBytesPerRow(imageRef); CGBitmapInfo bitmapInfo = CGImageGetBitmapInfo(imageRef); if (bytesPerRow == 0 || width == 0 || height == 0) return NULL; CGDataProviderRef dataProvider = CGImageGetDataProvider(imageRef); if (!dataProvider) return NULL; CFDataRef data = CGDataProviderCopyData(dataProvider); // decode if (!data) return NULL; CGDataProviderRef newProvider = CGDataProviderCreateWithCFData(data); CFRelease(data); if (!newProvider) return NULL; CGImageRef newImage = CGImageCreate(width, height, bitsPerComponent, bitsPerPixel, bytesPerRow, space, bitmapInfo, newProvider, NULL, false, kCGRenderingIntentDefault); CFRelease(newProvider); return newImage; } } CGImageRef YYCGImageCreateAffineTransformCopy(CGImageRef imageRef, CGAffineTransform transform, CGSize destSize, CGBitmapInfo destBitmapInfo) { if (!imageRef) return NULL; size_t srcWidth = CGImageGetWidth(imageRef); size_t srcHeight = CGImageGetHeight(imageRef); size_t destWidth = round(destSize.width); size_t destHeight = round(destSize.height); if (srcWidth == 0 || srcHeight == 0 || destWidth == 0 || destHeight == 0) return NULL; CGDataProviderRef tmpProvider = NULL, destProvider = NULL; CGImageRef tmpImage = NULL, destImage = NULL; vImage_Buffer src = {0}, tmp = {0}, dest = {0}; if(!YYCGImageDecodeToBitmapBufferWith32BitFormat(imageRef, &src, kCGImageAlphaFirst | kCGBitmapByteOrderDefault)) return NULL; size_t destBytesPerRow = YYImageByteAlign(destWidth * 4, 32); tmp.data = malloc(destHeight * destBytesPerRow); if (!tmp.data) goto fail; tmp.width = destWidth; tmp.height = destHeight; tmp.rowBytes = destBytesPerRow; vImage_CGAffineTransform vTransform = *((vImage_CGAffineTransform *)&transform); uint8_t backColor[4] = {0}; vImage_Error error = vImageAffineWarpCG_ARGB8888(&src, &tmp, NULL, &vTransform, backColor, kvImageBackgroundColorFill); if (error != kvImageNoError) goto fail; free(src.data); src.data = NULL; tmpProvider = CGDataProviderCreateWithData(tmp.data, tmp.data, destHeight * destBytesPerRow, YYCGDataProviderReleaseDataCallback); if (!tmpProvider) goto fail; tmp.data = NULL; // hold by provider tmpImage = CGImageCreate(destWidth, destHeight, 8, 32, destBytesPerRow, YYCGColorSpaceGetDeviceRGB(), kCGImageAlphaFirst | kCGBitmapByteOrderDefault, tmpProvider, NULL, false, kCGRenderingIntentDefault); if (!tmpImage) goto fail; CFRelease(tmpProvider); tmpProvider = NULL; if ((destBitmapInfo & kCGBitmapAlphaInfoMask) == kCGImageAlphaFirst && (destBitmapInfo & kCGBitmapByteOrderMask) != kCGBitmapByteOrder32Little) { return tmpImage; } if (!YYCGImageDecodeToBitmapBufferWith32BitFormat(tmpImage, &dest, destBitmapInfo)) goto fail; CFRelease(tmpImage); tmpImage = NULL; destProvider = CGDataProviderCreateWithData(dest.data, dest.data, destHeight * destBytesPerRow, YYCGDataProviderReleaseDataCallback); if (!destProvider) goto fail; dest.data = NULL; // hold by provider destImage = CGImageCreate(destWidth, destHeight, 8, 32, destBytesPerRow, YYCGColorSpaceGetDeviceRGB(), destBitmapInfo, destProvider, NULL, false, kCGRenderingIntentDefault); if (!destImage) goto fail; CFRelease(destProvider); destProvider = NULL; return destImage; fail: if (src.data) free(src.data); if (tmp.data) free(tmp.data); if (dest.data) free(dest.data); if (tmpProvider) CFRelease(tmpProvider); if (tmpImage) CFRelease(tmpImage); if (destProvider) CFRelease(destProvider); return NULL; } UIImageOrientation YYUIImageOrientationFromEXIFValue(NSInteger value) { switch (value) { case kCGImagePropertyOrientationUp: return UIImageOrientationUp; case kCGImagePropertyOrientationDown: return UIImageOrientationDown; case kCGImagePropertyOrientationLeft: return UIImageOrientationLeft; case kCGImagePropertyOrientationRight: return UIImageOrientationRight; case kCGImagePropertyOrientationUpMirrored: return UIImageOrientationUpMirrored; case kCGImagePropertyOrientationDownMirrored: return UIImageOrientationDownMirrored; case kCGImagePropertyOrientationLeftMirrored: return UIImageOrientationLeftMirrored; case kCGImagePropertyOrientationRightMirrored: return UIImageOrientationRightMirrored; default: return UIImageOrientationUp; } } NSInteger YYUIImageOrientationToEXIFValue(UIImageOrientation orientation) { switch (orientation) { case UIImageOrientationUp: return kCGImagePropertyOrientationUp; case UIImageOrientationDown: return kCGImagePropertyOrientationDown; case UIImageOrientationLeft: return kCGImagePropertyOrientationLeft; case UIImageOrientationRight: return kCGImagePropertyOrientationRight; case UIImageOrientationUpMirrored: return kCGImagePropertyOrientationUpMirrored; case UIImageOrientationDownMirrored: return kCGImagePropertyOrientationDownMirrored; case UIImageOrientationLeftMirrored: return kCGImagePropertyOrientationLeftMirrored; case UIImageOrientationRightMirrored: return kCGImagePropertyOrientationRightMirrored; default: return kCGImagePropertyOrientationUp; } } CGImageRef YYCGImageCreateCopyWithOrientation(CGImageRef imageRef, UIImageOrientation orientation, CGBitmapInfo destBitmapInfo) { if (!imageRef) return NULL; if (orientation == UIImageOrientationUp) return (CGImageRef)CFRetain(imageRef); size_t width = CGImageGetWidth(imageRef); size_t height = CGImageGetHeight(imageRef); CGAffineTransform transform = CGAffineTransformIdentity; BOOL swapWidthAndHeight = NO; switch (orientation) { case UIImageOrientationDown: { transform = CGAffineTransformMakeRotation(YYImageDegreesToRadians(180)); transform = CGAffineTransformTranslate(transform, -(CGFloat)width, -(CGFloat)height); } break; case UIImageOrientationLeft: { transform = CGAffineTransformMakeRotation(YYImageDegreesToRadians(90)); transform = CGAffineTransformTranslate(transform, -(CGFloat)0, -(CGFloat)height); swapWidthAndHeight = YES; } break; case UIImageOrientationRight: { transform = CGAffineTransformMakeRotation(YYImageDegreesToRadians(-90)); transform = CGAffineTransformTranslate(transform, -(CGFloat)width, (CGFloat)0); swapWidthAndHeight = YES; } break; case UIImageOrientationUpMirrored: { transform = CGAffineTransformTranslate(transform, (CGFloat)width, 0); transform = CGAffineTransformScale(transform, -1, 1); } break; case UIImageOrientationDownMirrored: { transform = CGAffineTransformTranslate(transform, 0, (CGFloat)height); transform = CGAffineTransformScale(transform, 1, -1); } break; case UIImageOrientationLeftMirrored: { transform = CGAffineTransformMakeRotation(YYImageDegreesToRadians(-90)); transform = CGAffineTransformScale(transform, 1, -1); transform = CGAffineTransformTranslate(transform, -(CGFloat)width, -(CGFloat)height); swapWidthAndHeight = YES; } break; case UIImageOrientationRightMirrored: { transform = CGAffineTransformMakeRotation(YYImageDegreesToRadians(90)); transform = CGAffineTransformScale(transform, 1, -1); swapWidthAndHeight = YES; } break; default: break; } if (CGAffineTransformIsIdentity(transform)) return (CGImageRef)CFRetain(imageRef); CGSize destSize = {width, height}; if (swapWidthAndHeight) { destSize.width = height; destSize.height = width; } return YYCGImageCreateAffineTransformCopy(imageRef, transform, destSize, destBitmapInfo); } YYImageType YYImageDetectType(CFDataRef data) { if (!data) return YYImageTypeUnknown; uint64_t length = CFDataGetLength(data); if (length < 16) return YYImageTypeUnknown; const char *bytes = (char *)CFDataGetBytePtr(data); uint32_t magic4 = *((uint32_t *)bytes); switch (magic4) { case YY_FOUR_CC(0x4D, 0x4D, 0x00, 0x2A): { // big endian TIFF return YYImageTypeTIFF; } break; case YY_FOUR_CC(0x49, 0x49, 0x2A, 0x00): { // little endian TIFF return YYImageTypeTIFF; } break; case YY_FOUR_CC(0x00, 0x00, 0x01, 0x00): { // ICO return YYImageTypeICO; } break; case YY_FOUR_CC(0x00, 0x00, 0x02, 0x00): { // CUR return YYImageTypeICO; } break; case YY_FOUR_CC('i', 'c', 'n', 's'): { // ICNS return YYImageTypeICNS; } break; case YY_FOUR_CC('G', 'I', 'F', '8'): { // GIF return YYImageTypeGIF; } break; case YY_FOUR_CC(0x89, 'P', 'N', 'G'): { // PNG uint32_t tmp = *((uint32_t *)(bytes + 4)); if (tmp == YY_FOUR_CC('\r', '\n', 0x1A, '\n')) { return YYImageTypePNG; } } break; case YY_FOUR_CC('R', 'I', 'F', 'F'): { // WebP uint32_t tmp = *((uint32_t *)(bytes + 8)); if (tmp == YY_FOUR_CC('W', 'E', 'B', 'P')) { return YYImageTypeWebP; } } break; /* case YY_FOUR_CC('B', 'P', 'G', 0xFB): { // BPG return YYImageTypeBPG; } break; */ } uint16_t magic2 = *((uint16_t *)bytes); switch (magic2) { case YY_TWO_CC('B', 'A'): case YY_TWO_CC('B', 'M'): case YY_TWO_CC('I', 'C'): case YY_TWO_CC('P', 'I'): case YY_TWO_CC('C', 'I'): case YY_TWO_CC('C', 'P'): { // BMP return YYImageTypeBMP; } case YY_TWO_CC(0xFF, 0x4F): { // JPEG2000 return YYImageTypeJPEG2000; } } // JPG FF D8 FF if (memcmp(bytes,"\377\330\377",3) == 0) return YYImageTypeJPEG; // JP2 if (memcmp(bytes + 4, "\152\120\040\040\015", 5) == 0) return YYImageTypeJPEG2000; return YYImageTypeUnknown; } CFStringRef YYImageTypeToUTType(YYImageType type) { switch (type) { case YYImageTypeJPEG: return kUTTypeJPEG; case YYImageTypeJPEG2000: return kUTTypeJPEG2000; case YYImageTypeTIFF: return kUTTypeTIFF; case YYImageTypeBMP: return kUTTypeBMP; case YYImageTypeICO: return kUTTypeICO; case YYImageTypeICNS: return kUTTypeAppleICNS; case YYImageTypeGIF: return kUTTypeGIF; case YYImageTypePNG: return kUTTypePNG; default: return NULL; } } YYImageType YYImageTypeFromUTType(CFStringRef uti) { static NSDictionary *dic; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ dic = @{(id)kUTTypeJPEG : @(YYImageTypeJPEG), (id)kUTTypeJPEG2000 : @(YYImageTypeJPEG2000), (id)kUTTypeTIFF : @(YYImageTypeTIFF), (id)kUTTypeBMP : @(YYImageTypeBMP), (id)kUTTypeICO : @(YYImageTypeICO), (id)kUTTypeAppleICNS : @(YYImageTypeICNS), (id)kUTTypeGIF : @(YYImageTypeGIF), (id)kUTTypePNG : @(YYImageTypePNG)}; }); if (!uti) return YYImageTypeUnknown; NSNumber *num = dic[(__bridge __strong id)(uti)]; return num.unsignedIntegerValue; } NSString *YYImageTypeGetExtension(YYImageType type) { switch (type) { case YYImageTypeJPEG: return @"jpg"; case YYImageTypeJPEG2000: return @"jp2"; case YYImageTypeTIFF: return @"tiff"; case YYImageTypeBMP: return @"bmp"; case YYImageTypeICO: return @"ico"; case YYImageTypeICNS: return @"icns"; case YYImageTypeGIF: return @"gif"; case YYImageTypePNG: return @"png"; case YYImageTypeWebP: return @"webp"; default: return nil; } } CFDataRef YYCGImageCreateEncodedData(CGImageRef imageRef, YYImageType type, CGFloat quality) { if (!imageRef) return nil; quality = quality < 0 ? 0 : quality > 1 ? 1 : quality; if (type == YYImageTypeWebP) { #if YYIMAGE_WEBP_ENABLED if (quality == 1) { return YYCGImageCreateEncodedWebPData(imageRef, YES, quality, 4, YYImagePresetDefault); } else { return YYCGImageCreateEncodedWebPData(imageRef, NO, quality, 4, YYImagePresetDefault); } #else return NULL; #endif } CFStringRef uti = YYImageTypeToUTType(type); if (!uti) return nil; CFMutableDataRef data = CFDataCreateMutable(CFAllocatorGetDefault(), 0); if (!data) return NULL; CGImageDestinationRef dest = CGImageDestinationCreateWithData(data, uti, 1, NULL); if (!dest) { CFRelease(data); return NULL; } NSDictionary *options = @{(id)kCGImageDestinationLossyCompressionQuality : @(quality) }; CGImageDestinationAddImage(dest, imageRef, (CFDictionaryRef)options); if (!CGImageDestinationFinalize(dest)) { CFRelease(data); CFRelease(dest); return nil; } CFRelease(dest); if (CFDataGetLength(data) == 0) { CFRelease(data); return NULL; } return data; } #if YYIMAGE_WEBP_ENABLED BOOL YYImageWebPAvailable() { return YES; } CFDataRef YYCGImageCreateEncodedWebPData(CGImageRef imageRef, BOOL lossless, CGFloat quality, int compressLevel, YYImagePreset preset) { if (!imageRef) return nil; size_t width = CGImageGetWidth(imageRef); size_t height = CGImageGetHeight(imageRef); if (width == 0 || width > WEBP_MAX_DIMENSION) return nil; if (height == 0 || height > WEBP_MAX_DIMENSION) return nil; vImage_Buffer buffer = {0}; if(!YYCGImageDecodeToBitmapBufferWith32BitFormat(imageRef, &buffer, kCGImageAlphaLast | kCGBitmapByteOrderDefault)) return nil; WebPConfig config = {0}; WebPPicture picture = {0}; WebPMemoryWriter writer = {0}; CFDataRef webpData = NULL; BOOL pictureNeedFree = NO; quality = quality < 0 ? 0 : quality > 1 ? 1 : quality; preset = preset > YYImagePresetText ? YYImagePresetDefault : preset; compressLevel = compressLevel < 0 ? 0 : compressLevel > 6 ? 6 : compressLevel; if (!WebPConfigPreset(&config, (WebPPreset)preset, quality)) goto fail; config.quality = round(quality * 100.0); config.lossless = lossless; config.method = compressLevel; switch ((WebPPreset)preset) { case WEBP_PRESET_DEFAULT: { config.image_hint = WEBP_HINT_DEFAULT; } break; case WEBP_PRESET_PICTURE: { config.image_hint = WEBP_HINT_PICTURE; } break; case WEBP_PRESET_PHOTO: { config.image_hint = WEBP_HINT_PHOTO; } break; case WEBP_PRESET_DRAWING: case WEBP_PRESET_ICON: case WEBP_PRESET_TEXT: { config.image_hint = WEBP_HINT_GRAPH; } break; } if (!WebPValidateConfig(&config)) goto fail; if (!WebPPictureInit(&picture)) goto fail; pictureNeedFree = YES; picture.width = (int)buffer.width; picture.height = (int)buffer.height; picture.use_argb = lossless; if(!WebPPictureImportRGBA(&picture, buffer.data, (int)buffer.rowBytes)) goto fail; WebPMemoryWriterInit(&writer); picture.writer = WebPMemoryWrite; picture.custom_ptr = &writer; if(!WebPEncode(&config, &picture)) goto fail; webpData = CFDataCreate(CFAllocatorGetDefault(), writer.mem, writer.size); free(writer.mem); WebPPictureFree(&picture); free(buffer.data); return webpData; fail: if (buffer.data) free(buffer.data); if (pictureNeedFree) WebPPictureFree(&picture); return nil; } NSUInteger YYImageGetWebPFrameCount(CFDataRef webpData) { if (!webpData || CFDataGetLength(webpData) == 0) return 0; WebPData data = {CFDataGetBytePtr(webpData), CFDataGetLength(webpData)}; WebPDemuxer *demuxer = WebPDemux(&data); if (!demuxer) return 0; NSUInteger webpFrameCount = WebPDemuxGetI(demuxer, WEBP_FF_FRAME_COUNT); WebPDemuxDelete(demuxer); return webpFrameCount; } CGImageRef YYCGImageCreateWithWebPData(CFDataRef webpData, BOOL decodeForDisplay, BOOL useThreads, BOOL bypassFiltering, BOOL noFancyUpsampling) { /* Call WebPDecode() on a multi-frame webp data will get an error (VP8_STATUS_UNSUPPORTED_FEATURE). Use WebPDemuxer to unpack it first. */ WebPData data = {0}; WebPDemuxer *demuxer = NULL; int frameCount = 0, canvasWidth = 0, canvasHeight = 0; WebPIterator iter = {0}; BOOL iterInited = NO; const uint8_t *payload = NULL; size_t payloadSize = 0; WebPDecoderConfig config = {0}; BOOL hasAlpha = NO; size_t bitsPerComponent = 0, bitsPerPixel = 0, bytesPerRow = 0, destLength = 0; CGBitmapInfo bitmapInfo = 0; WEBP_CSP_MODE colorspace = 0; void *destBytes = NULL; CGDataProviderRef provider = NULL; CGImageRef imageRef = NULL; if (!webpData || CFDataGetLength(webpData) == 0) return NULL; data.bytes = CFDataGetBytePtr(webpData); data.size = CFDataGetLength(webpData); demuxer = WebPDemux(&data); if (!demuxer) goto fail; frameCount = WebPDemuxGetI(demuxer, WEBP_FF_FRAME_COUNT); if (frameCount == 0) { goto fail; } else if (frameCount == 1) { // single-frame payload = data.bytes; payloadSize = data.size; if (!WebPInitDecoderConfig(&config)) goto fail; if (WebPGetFeatures(payload , payloadSize, &config.input) != VP8_STATUS_OK) goto fail; canvasWidth = config.input.width; canvasHeight = config.input.height; } else { // multi-frame canvasWidth = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_WIDTH); canvasHeight = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_HEIGHT); if (canvasWidth < 1 || canvasHeight < 1) goto fail; if (!WebPDemuxGetFrame(demuxer, 1, &iter)) goto fail; iterInited = YES; if (iter.width > canvasWidth || iter.height > canvasHeight) goto fail; payload = iter.fragment.bytes; payloadSize = iter.fragment.size; if (!WebPInitDecoderConfig(&config)) goto fail; if (WebPGetFeatures(payload , payloadSize, &config.input) != VP8_STATUS_OK) goto fail; } if (payload == NULL || payloadSize == 0) goto fail; hasAlpha = config.input.has_alpha; bitsPerComponent = 8; bitsPerPixel = 32; bytesPerRow = YYImageByteAlign(bitsPerPixel / 8 * canvasWidth, 32); destLength = bytesPerRow * canvasHeight; if (decodeForDisplay) { bitmapInfo = kCGBitmapByteOrder32Host; bitmapInfo |= hasAlpha ? kCGImageAlphaPremultipliedFirst : kCGImageAlphaNoneSkipFirst; colorspace = MODE_bgrA; // small endian } else { bitmapInfo = kCGBitmapByteOrderDefault; bitmapInfo |= hasAlpha ? kCGImageAlphaLast : kCGImageAlphaNoneSkipLast; colorspace = MODE_RGBA; } destBytes = calloc(1, destLength); if (!destBytes) goto fail; config.options.use_threads = useThreads; //speed up 23% config.options.bypass_filtering = bypassFiltering; //speed up 11%, cause some banding config.options.no_fancy_upsampling = noFancyUpsampling; //speed down 16%, lose some details config.output.colorspace = colorspace; config.output.is_external_memory = 1; config.output.u.RGBA.rgba = destBytes; config.output.u.RGBA.stride = (int)bytesPerRow; config.output.u.RGBA.size = destLength; VP8StatusCode result = WebPDecode(payload, payloadSize, &config); if ((result != VP8_STATUS_OK) && (result != VP8_STATUS_NOT_ENOUGH_DATA)) goto fail; if (iter.x_offset != 0 || iter.y_offset != 0) { void *tmp = calloc(1, destLength); if (tmp) { vImage_Buffer src = {destBytes, canvasHeight, canvasWidth, bytesPerRow}; vImage_Buffer dest = {tmp, canvasHeight, canvasWidth, bytesPerRow}; vImage_CGAffineTransform transform = {1, 0, 0, 1, iter.x_offset, -iter.y_offset}; uint8_t backColor[4] = {0}; vImageAffineWarpCG_ARGB8888(&src, &dest, NULL, &transform, backColor, kvImageBackgroundColorFill); memcpy(destBytes, tmp, destLength); free(tmp); } } provider = CGDataProviderCreateWithData(destBytes, destBytes, destLength, YYCGDataProviderReleaseDataCallback); if (!provider) goto fail; destBytes = NULL; // hold by provider imageRef = CGImageCreate(canvasWidth, canvasHeight, bitsPerComponent, bitsPerPixel, bytesPerRow, YYCGColorSpaceGetDeviceRGB(), bitmapInfo, provider, NULL, false, kCGRenderingIntentDefault); CFRelease(provider); if (iterInited) WebPDemuxReleaseIterator(&iter); WebPDemuxDelete(demuxer); return imageRef; fail: if (destBytes) free(destBytes); if (provider) CFRelease(provider); if (iterInited) WebPDemuxReleaseIterator(&iter); if (demuxer) WebPDemuxDelete(demuxer); return NULL; } #else BOOL YYImageWebPAvailable() { return NO; } CFDataRef YYCGImageCreateEncodedWebPData(CGImageRef imageRef, BOOL lossless, CGFloat quality, int compressLevel, YYImagePreset preset) { NSLog(@"WebP decoder is disabled"); return NULL; } NSUInteger YYImageGetWebPFrameCount(CFDataRef webpData) { NSLog(@"WebP decoder is disabled"); return 0; } CGImageRef YYCGImageCreateWithWebPData(CFDataRef webpData, BOOL decodeForDisplay, BOOL useThreads, BOOL bypassFiltering, BOOL noFancyUpsampling) { NSLog(@"WebP decoder is disabled"); return NULL; } #endif //////////////////////////////////////////////////////////////////////////////// #pragma mark - Decoder @implementation YYImageFrame + (instancetype)frameWithImage:(UIImage *)image { YYImageFrame *frame = [self new]; frame.image = image; return frame; } - (id)copyWithZone:(NSZone *)zone { YYImageFrame *frame = [self.class new]; frame.index = _index; frame.width = _width; frame.height = _height; frame.offsetX = _offsetX; frame.offsetY = _offsetY; frame.duration = _duration; frame.dispose = _dispose; frame.blend = _blend; frame.image = _image.copy; return frame; } @end // Internal frame object. @interface _YYImageDecoderFrame : YYImageFrame @property (nonatomic, assign) BOOL hasAlpha; ///< Whether frame has alpha. @property (nonatomic, assign) BOOL isFullSize; ///< Whether frame fill the canvas. @property (nonatomic, assign) NSUInteger blendFromIndex; ///< Blend from frame index to current frame. @end @implementation _YYImageDecoderFrame - (id)copyWithZone:(NSZone *)zone { _YYImageDecoderFrame *frame = [super copyWithZone:zone]; frame.hasAlpha = _hasAlpha; frame.isFullSize = _isFullSize; frame.blendFromIndex = _blendFromIndex; return frame; } @end @implementation YYImageDecoder { pthread_mutex_t _lock; // recursive lock BOOL _sourceTypeDetected; CGImageSourceRef _source; yy_png_info *_apngSource; #if YYIMAGE_WEBP_ENABLED WebPDemuxer *_webpSource; #endif UIImageOrientation _orientation; dispatch_semaphore_t _framesLock; NSArray *_frames; ///< Array, without image BOOL _needBlend; NSUInteger _blendFrameIndex; CGContextRef _blendCanvas; } - (void)dealloc { if (_source) CFRelease(_source); if (_apngSource) yy_png_info_release(_apngSource); #if YYIMAGE_WEBP_ENABLED if (_webpSource) WebPDemuxDelete(_webpSource); #endif if (_blendCanvas) CFRelease(_blendCanvas); pthread_mutex_destroy(&_lock); } + (instancetype)decoderWithData:(NSData *)data scale:(CGFloat)scale { if (!data) return nil; YYImageDecoder *decoder = [[YYImageDecoder alloc] initWithScale:scale]; [decoder updateData:data final:YES]; if (decoder.frameCount == 0) return nil; return decoder; } - (instancetype)init { return [self initWithScale:[UIScreen mainScreen].scale]; } - (instancetype)initWithScale:(CGFloat)scale { self = [super init]; if (scale <= 0) scale = 1; _scale = scale; _framesLock = dispatch_semaphore_create(1); pthread_mutexattr_t attr; pthread_mutexattr_init (&attr); pthread_mutexattr_settype (&attr, PTHREAD_MUTEX_RECURSIVE); pthread_mutex_init (&_lock, &attr); pthread_mutexattr_destroy (&attr); return self; } - (BOOL)updateData:(NSData *)data final:(BOOL)final { BOOL result = NO; pthread_mutex_lock(&_lock); result = [self _updateData:data final:final]; pthread_mutex_unlock(&_lock); return result; } - (YYImageFrame *)frameAtIndex:(NSUInteger)index decodeForDisplay:(BOOL)decodeForDisplay { YYImageFrame *result = nil; pthread_mutex_lock(&_lock); result = [self _frameAtIndex:index decodeForDisplay:decodeForDisplay]; pthread_mutex_unlock(&_lock); return result; } - (NSTimeInterval)frameDurationAtIndex:(NSUInteger)index { NSTimeInterval result = 0; dispatch_semaphore_wait(_framesLock, DISPATCH_TIME_FOREVER); if (index < _frames.count) { result = ((_YYImageDecoderFrame *)_frames[index]).duration; } dispatch_semaphore_signal(_framesLock); return result; } - (NSDictionary *)framePropertiesAtIndex:(NSUInteger)index { NSDictionary *result = nil; pthread_mutex_lock(&_lock); result = [self _framePropertiesAtIndex:index]; pthread_mutex_unlock(&_lock); return result; } - (NSDictionary *)imageProperties { NSDictionary *result = nil; pthread_mutex_lock(&_lock); result = [self _imageProperties]; pthread_mutex_unlock(&_lock); return result; } #pragma private (wrap) - (BOOL)_updateData:(NSData *)data final:(BOOL)final { if (_finalized) return NO; if (data.length < _data.length) return NO; _finalized = final; _data = data; YYImageType type = YYImageDetectType((__bridge CFDataRef)data); if (_sourceTypeDetected) { if (_type != type) { return NO; } else { [self _updateSource]; } } else { if (_data.length > 16) { _type = type; _sourceTypeDetected = YES; [self _updateSource]; } } return YES; } - (YYImageFrame *)_frameAtIndex:(NSUInteger)index decodeForDisplay:(BOOL)decodeForDisplay { if (index >= _frames.count) return 0; _YYImageDecoderFrame *frame = [(_YYImageDecoderFrame *)_frames[index] copy]; BOOL decoded = NO; BOOL extendToCanvas = NO; if (_type != YYImageTypeICO && decodeForDisplay) { // ICO contains multi-size frame and should not extend to canvas. extendToCanvas = YES; } if (!_needBlend) { CGImageRef imageRef = [self _newUnblendedImageAtIndex:index extendToCanvas:extendToCanvas decoded:&decoded]; if (!imageRef) return nil; if (decodeForDisplay && !decoded) { CGImageRef imageRefDecoded = YYCGImageCreateDecodedCopy(imageRef, YES); if (imageRefDecoded) { CFRelease(imageRef); imageRef = imageRefDecoded; decoded = YES; } } UIImage *image = [UIImage imageWithCGImage:imageRef scale:_scale orientation:_orientation]; CFRelease(imageRef); if (!image) return nil; image.yy_isDecodedForDisplay = decoded; frame.image = image; return frame; } // blend if (![self _createBlendContextIfNeeded]) return nil; CGImageRef imageRef = NULL; if (_blendFrameIndex + 1 == frame.index) { imageRef = [self _newBlendedImageWithFrame:frame]; _blendFrameIndex = index; } else { // should draw canvas from previous frame _blendFrameIndex = NSNotFound; CGContextClearRect(_blendCanvas, CGRectMake(0, 0, _width, _height)); if (frame.blendFromIndex == frame.index) { CGImageRef unblendedImage = [self _newUnblendedImageAtIndex:index extendToCanvas:NO decoded:NULL]; if (unblendedImage) { CGContextDrawImage(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height), unblendedImage); CFRelease(unblendedImage); } imageRef = CGBitmapContextCreateImage(_blendCanvas); if (frame.dispose == YYImageDisposeBackground) { CGContextClearRect(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height)); } _blendFrameIndex = index; } else { // canvas is not ready for (uint32_t i = (uint32_t)frame.blendFromIndex; i <= (uint32_t)frame.index; i++) { if (i == frame.index) { if (!imageRef) imageRef = [self _newBlendedImageWithFrame:frame]; } else { [self _blendImageWithFrame:_frames[i]]; } } _blendFrameIndex = index; } } if (!imageRef) return nil; UIImage *image = [UIImage imageWithCGImage:imageRef scale:_scale orientation:_orientation]; CFRelease(imageRef); if (!image) return nil; image.yy_isDecodedForDisplay = YES; frame.image = image; if (extendToCanvas) { frame.width = _width; frame.height = _height; frame.offsetX = 0; frame.offsetY = 0; frame.dispose = YYImageDisposeNone; frame.blend = YYImageBlendNone; } return frame; } - (NSDictionary *)_framePropertiesAtIndex:(NSUInteger)index { if (index >= _frames.count) return nil; if (!_source) return nil; CFDictionaryRef properties = CGImageSourceCopyPropertiesAtIndex(_source, index, NULL); if (!properties) return nil; return CFBridgingRelease(properties); } - (NSDictionary *)_imageProperties { if (!_source) return nil; CFDictionaryRef properties = CGImageSourceCopyProperties(_source, NULL); if (!properties) return nil; return CFBridgingRelease(properties); } #pragma private - (void)_updateSource { switch (_type) { case YYImageTypeWebP: { [self _updateSourceWebP]; } break; case YYImageTypePNG: { [self _updateSourceAPNG]; } break; default: { [self _updateSourceImageIO]; } break; } } - (void)_updateSourceWebP { #if YYIMAGE_WEBP_ENABLED _width = 0; _height = 0; _loopCount = 0; if (_webpSource) WebPDemuxDelete(_webpSource); _webpSource = NULL; dispatch_semaphore_wait(_framesLock, DISPATCH_TIME_FOREVER); _frames = nil; dispatch_semaphore_signal(_framesLock); /* https://developers.google.com/speed/webp/docs/api The documentation said we can use WebPIDecoder to decode webp progressively, but currently it can only returns an empty image (not same as progressive jpegs), so we don't use progressive decoding. When using WebPDecode() to decode multi-frame webp, we will get the error "VP8_STATUS_UNSUPPORTED_FEATURE", so we first use WebPDemuxer to unpack it. */ WebPData webPData = {0}; webPData.bytes = _data.bytes; webPData.size = _data.length; WebPDemuxer *demuxer = WebPDemux(&webPData); if (!demuxer) return; uint32_t webpFrameCount = WebPDemuxGetI(demuxer, WEBP_FF_FRAME_COUNT); uint32_t webpLoopCount = WebPDemuxGetI(demuxer, WEBP_FF_LOOP_COUNT); uint32_t canvasWidth = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_WIDTH); uint32_t canvasHeight = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_HEIGHT); if (webpFrameCount == 0 || canvasWidth < 1 || canvasHeight < 1) { WebPDemuxDelete(demuxer); return; } NSMutableArray *frames = [NSMutableArray new]; BOOL needBlend = NO; uint32_t iterIndex = 0; uint32_t lastBlendIndex = 0; WebPIterator iter = {0}; if (WebPDemuxGetFrame(demuxer, 1, &iter)) { // one-based index... do { _YYImageDecoderFrame *frame = [_YYImageDecoderFrame new]; [frames addObject:frame]; if (iter.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND) { frame.dispose = YYImageDisposeBackground; } if (iter.blend_method == WEBP_MUX_BLEND) { frame.blend = YYImageBlendOver; } int canvasWidth = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_WIDTH); int canvasHeight = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_HEIGHT); frame.index = iterIndex; frame.duration = iter.duration / 1000.0; frame.width = iter.width; frame.height = iter.height; frame.hasAlpha = iter.has_alpha; frame.blend = iter.blend_method == WEBP_MUX_BLEND; frame.offsetX = iter.x_offset; frame.offsetY = canvasHeight - iter.y_offset - iter.height; BOOL sizeEqualsToCanvas = (iter.width == canvasWidth && iter.height == canvasHeight); BOOL offsetIsZero = (iter.x_offset == 0 && iter.y_offset == 0); frame.isFullSize = (sizeEqualsToCanvas && offsetIsZero); if ((!frame.blend || !frame.hasAlpha) && frame.isFullSize) { frame.blendFromIndex = lastBlendIndex = iterIndex; } else { if (frame.dispose && frame.isFullSize) { frame.blendFromIndex = lastBlendIndex; lastBlendIndex = iterIndex + 1; } else { frame.blendFromIndex = lastBlendIndex; } } if (frame.index != frame.blendFromIndex) needBlend = YES; iterIndex++; } while (WebPDemuxNextFrame(&iter)); WebPDemuxReleaseIterator(&iter); } if (frames.count != webpFrameCount) { WebPDemuxDelete(demuxer); return; } _width = canvasWidth; _height = canvasHeight; _frameCount = frames.count; _loopCount = webpLoopCount; _needBlend = needBlend; _webpSource = demuxer; dispatch_semaphore_wait(_framesLock, DISPATCH_TIME_FOREVER); _frames = frames; dispatch_semaphore_signal(_framesLock); #else static const char *func = __FUNCTION__; static const int line = __LINE__; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ NSLog(@"[%s: %d] WebP is not available, check the documentation to see how to install WebP component: https://github.com/ibireme/YYImage#installation", func, line); }); #endif } - (void)_updateSourceAPNG { /* APNG extends PNG format to support animation, it was supported by ImageIO since iOS 8. We use a custom APNG decoder to make APNG available in old system, so we ignore the ImageIO's APNG frame info. Typically the custom decoder is a bit faster than ImageIO. */ yy_png_info_release(_apngSource); _apngSource = nil; [self _updateSourceImageIO]; // decode first frame if (_frameCount == 0) return; // png decode failed if (!_finalized) return; // ignore multi-frame before finalized yy_png_info *apng = yy_png_info_create(_data.bytes, (uint32_t)_data.length); if (!apng) return; // apng decode failed if (apng->apng_frame_num == 0 || (apng->apng_frame_num == 1 && apng->apng_first_frame_is_cover)) { yy_png_info_release(apng); return; // no animation } if (_source) { // apng decode succeed, no longer need image souce CFRelease(_source); _source = NULL; } uint32_t canvasWidth = apng->header.width; uint32_t canvasHeight = apng->header.height; NSMutableArray *frames = [NSMutableArray new]; BOOL needBlend = NO; uint32_t lastBlendIndex = 0; for (uint32_t i = 0; i < apng->apng_frame_num; i++) { _YYImageDecoderFrame *frame = [_YYImageDecoderFrame new]; [frames addObject:frame]; yy_png_frame_info *fi = apng->apng_frames + i; frame.index = i; frame.duration = yy_png_delay_to_seconds(fi->frame_control.delay_num, fi->frame_control.delay_den); frame.hasAlpha = YES; frame.width = fi->frame_control.width; frame.height = fi->frame_control.height; frame.offsetX = fi->frame_control.x_offset; frame.offsetY = canvasHeight - fi->frame_control.y_offset - fi->frame_control.height; BOOL sizeEqualsToCanvas = (frame.width == canvasWidth && frame.height == canvasHeight); BOOL offsetIsZero = (fi->frame_control.x_offset == 0 && fi->frame_control.y_offset == 0); frame.isFullSize = (sizeEqualsToCanvas && offsetIsZero); switch (fi->frame_control.dispose_op) { case YY_PNG_DISPOSE_OP_BACKGROUND: { frame.dispose = YYImageDisposeBackground; } break; case YY_PNG_DISPOSE_OP_PREVIOUS: { frame.dispose = YYImageDisposePrevious; } break; default: { frame.dispose = YYImageDisposeNone; } break; } switch (fi->frame_control.blend_op) { case YY_PNG_BLEND_OP_OVER: { frame.blend = YYImageBlendOver; } break; default: { frame.blend = YYImageBlendNone; } break; } if (frame.blend == YYImageBlendNone && frame.isFullSize) { frame.blendFromIndex = i; if (frame.dispose != YYImageDisposePrevious) lastBlendIndex = i; } else { if (frame.dispose == YYImageDisposeBackground && frame.isFullSize) { frame.blendFromIndex = lastBlendIndex; lastBlendIndex = i + 1; } else { frame.blendFromIndex = lastBlendIndex; } } if (frame.index != frame.blendFromIndex) needBlend = YES; } _width = canvasWidth; _height = canvasHeight; _frameCount = frames.count; _loopCount = apng->apng_loop_num; _needBlend = needBlend; _apngSource = apng; dispatch_semaphore_wait(_framesLock, DISPATCH_TIME_FOREVER); _frames = frames; dispatch_semaphore_signal(_framesLock); } - (void)_updateSourceImageIO { _width = 0; _height = 0; _orientation = UIImageOrientationUp; _loopCount = 0; dispatch_semaphore_wait(_framesLock, DISPATCH_TIME_FOREVER); _frames = nil; dispatch_semaphore_signal(_framesLock); if (!_source) { if (_finalized) { _source = CGImageSourceCreateWithData((__bridge CFDataRef)_data, NULL); } else { _source = CGImageSourceCreateIncremental(NULL); if (_source) CGImageSourceUpdateData(_source, (__bridge CFDataRef)_data, false); } } else { CGImageSourceUpdateData(_source, (__bridge CFDataRef)_data, _finalized); } if (!_source) return; _frameCount = CGImageSourceGetCount(_source); if (_frameCount == 0) return; if (!_finalized) { // ignore multi-frame before finalized _frameCount = 1; } else { if (_type == YYImageTypePNG) { // use custom apng decoder and ignore multi-frame _frameCount = 1; } if (_type == YYImageTypeGIF) { // get gif loop count CFDictionaryRef properties = CGImageSourceCopyProperties(_source, NULL); if (properties) { CFDictionaryRef gif = CFDictionaryGetValue(properties, kCGImagePropertyGIFDictionary); if (gif) { CFTypeRef loop = CFDictionaryGetValue(gif, kCGImagePropertyGIFLoopCount); if (loop) CFNumberGetValue(loop, kCFNumberNSIntegerType, &_loopCount); } CFRelease(properties); } } } /* ICO, GIF, APNG may contains multi-frame. */ NSMutableArray *frames = [NSMutableArray new]; for (NSUInteger i = 0; i < _frameCount; i++) { _YYImageDecoderFrame *frame = [_YYImageDecoderFrame new]; frame.index = i; frame.blendFromIndex = i; frame.hasAlpha = YES; frame.isFullSize = YES; [frames addObject:frame]; CFDictionaryRef properties = CGImageSourceCopyPropertiesAtIndex(_source, i, NULL); if (properties) { NSTimeInterval duration = 0; NSInteger orientationValue = 0, width = 0, height = 0; CFTypeRef value = NULL; value = CFDictionaryGetValue(properties, kCGImagePropertyPixelWidth); if (value) CFNumberGetValue(value, kCFNumberNSIntegerType, &width); value = CFDictionaryGetValue(properties, kCGImagePropertyPixelHeight); if (value) CFNumberGetValue(value, kCFNumberNSIntegerType, &height); if (_type == YYImageTypeGIF) { CFDictionaryRef gif = CFDictionaryGetValue(properties, kCGImagePropertyGIFDictionary); if (gif) { // Use the unclamped frame delay if it exists. value = CFDictionaryGetValue(gif, kCGImagePropertyGIFUnclampedDelayTime); if (!value) { // Fall back to the clamped frame delay if the unclamped frame delay does not exist. value = CFDictionaryGetValue(gif, kCGImagePropertyGIFDelayTime); } if (value) CFNumberGetValue(value, kCFNumberDoubleType, &duration); } } frame.width = width; frame.height = height; frame.duration = duration; if (i == 0 && _width + _height == 0) { // init first frame _width = width; _height = height; value = CFDictionaryGetValue(properties, kCGImagePropertyOrientation); if (value) { CFNumberGetValue(value, kCFNumberNSIntegerType, &orientationValue); _orientation = YYUIImageOrientationFromEXIFValue(orientationValue); } } CFRelease(properties); } } dispatch_semaphore_wait(_framesLock, DISPATCH_TIME_FOREVER); _frames = frames; dispatch_semaphore_signal(_framesLock); } - (CGImageRef)_newUnblendedImageAtIndex:(NSUInteger)index extendToCanvas:(BOOL)extendToCanvas decoded:(BOOL *)decoded CF_RETURNS_RETAINED { if (!_finalized && index > 0) return NULL; if (_frames.count <= index) return NULL; _YYImageDecoderFrame *frame = _frames[index]; if (_source) { CGImageRef imageRef = CGImageSourceCreateImageAtIndex(_source, index, (CFDictionaryRef)@{(id)kCGImageSourceShouldCache:@(YES)}); if (imageRef && extendToCanvas) { size_t width = CGImageGetWidth(imageRef); size_t height = CGImageGetHeight(imageRef); if (width == _width && height == _height) { CGImageRef imageRefExtended = YYCGImageCreateDecodedCopy(imageRef, YES); if (imageRefExtended) { CFRelease(imageRef); imageRef = imageRefExtended; if (decoded) *decoded = YES; } } else { CGContextRef context = CGBitmapContextCreate(NULL, _width, _height, 8, 0, YYCGColorSpaceGetDeviceRGB(), kCGBitmapByteOrder32Host | kCGImageAlphaPremultipliedFirst); if (context) { CGContextDrawImage(context, CGRectMake(0, _height - height, width, height), imageRef); CGImageRef imageRefExtended = CGBitmapContextCreateImage(context); CFRelease(context); if (imageRefExtended) { CFRelease(imageRef); imageRef = imageRefExtended; if (decoded) *decoded = YES; } } } } return imageRef; } if (_apngSource) { uint32_t size = 0; uint8_t *bytes = yy_png_copy_frame_data_at_index(_data.bytes, _apngSource, (uint32_t)index, &size); if (!bytes) return NULL; CGDataProviderRef provider = CGDataProviderCreateWithData(bytes, bytes, size, YYCGDataProviderReleaseDataCallback); if (!provider) { free(bytes); return NULL; } bytes = NULL; // hold by provider CGImageSourceRef source = CGImageSourceCreateWithDataProvider(provider, NULL); if (!source) { CFRelease(provider); return NULL; } CFRelease(provider); if(CGImageSourceGetCount(source) < 1) { CFRelease(source); return NULL; } CGImageRef imageRef = CGImageSourceCreateImageAtIndex(source, 0, (CFDictionaryRef)@{(id)kCGImageSourceShouldCache:@(YES)}); CFRelease(source); if (!imageRef) return NULL; if (extendToCanvas) { CGContextRef context = CGBitmapContextCreate(NULL, _width, _height, 8, 0, YYCGColorSpaceGetDeviceRGB(), kCGBitmapByteOrder32Host | kCGImageAlphaPremultipliedFirst); //bgrA if (context) { CGContextDrawImage(context, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height), imageRef); CFRelease(imageRef); imageRef = CGBitmapContextCreateImage(context); CFRelease(context); if (decoded) *decoded = YES; } } return imageRef; } #if YYIMAGE_WEBP_ENABLED if (_webpSource) { WebPIterator iter; if (!WebPDemuxGetFrame(_webpSource, (int)(index + 1), &iter)) return NULL; // demux webp frame data // frame numbers are one-based in webp -----------^ int frameWidth = iter.width; int frameHeight = iter.height; if (frameWidth < 1 || frameHeight < 1) return NULL; int width = extendToCanvas ? (int)_width : frameWidth; int height = extendToCanvas ? (int)_height : frameHeight; if (width > _width || height > _height) return NULL; const uint8_t *payload = iter.fragment.bytes; size_t payloadSize = iter.fragment.size; WebPDecoderConfig config; if (!WebPInitDecoderConfig(&config)) { WebPDemuxReleaseIterator(&iter); return NULL; } if (WebPGetFeatures(payload , payloadSize, &config.input) != VP8_STATUS_OK) { WebPDemuxReleaseIterator(&iter); return NULL; } size_t bitsPerComponent = 8; size_t bitsPerPixel = 32; size_t bytesPerRow = YYImageByteAlign(bitsPerPixel / 8 * width, 32); size_t length = bytesPerRow * height; CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Host | kCGImageAlphaPremultipliedFirst; //bgrA void *pixels = calloc(1, length); if (!pixels) { WebPDemuxReleaseIterator(&iter); return NULL; } config.output.colorspace = MODE_bgrA; config.output.is_external_memory = 1; config.output.u.RGBA.rgba = pixels; config.output.u.RGBA.stride = (int)bytesPerRow; config.output.u.RGBA.size = length; VP8StatusCode result = WebPDecode(payload, payloadSize, &config); // decode if ((result != VP8_STATUS_OK) && (result != VP8_STATUS_NOT_ENOUGH_DATA)) { WebPDemuxReleaseIterator(&iter); free(pixels); return NULL; } WebPDemuxReleaseIterator(&iter); if (extendToCanvas && (iter.x_offset != 0 || iter.y_offset != 0)) { void *tmp = calloc(1, length); if (tmp) { vImage_Buffer src = {pixels, height, width, bytesPerRow}; vImage_Buffer dest = {tmp, height, width, bytesPerRow}; vImage_CGAffineTransform transform = {1, 0, 0, 1, iter.x_offset, -iter.y_offset}; uint8_t backColor[4] = {0}; vImage_Error error = vImageAffineWarpCG_ARGB8888(&src, &dest, NULL, &transform, backColor, kvImageBackgroundColorFill); if (error == kvImageNoError) { memcpy(pixels, tmp, length); } free(tmp); } } CGDataProviderRef provider = CGDataProviderCreateWithData(pixels, pixels, length, YYCGDataProviderReleaseDataCallback); if (!provider) { free(pixels); return NULL; } pixels = NULL; // hold by provider CGImageRef image = CGImageCreate(width, height, bitsPerComponent, bitsPerPixel, bytesPerRow, YYCGColorSpaceGetDeviceRGB(), bitmapInfo, provider, NULL, false, kCGRenderingIntentDefault); CFRelease(provider); if (decoded) *decoded = YES; return image; } #endif return NULL; } - (BOOL)_createBlendContextIfNeeded { if (!_blendCanvas) { _blendFrameIndex = NSNotFound; _blendCanvas = CGBitmapContextCreate(NULL, _width, _height, 8, 0, YYCGColorSpaceGetDeviceRGB(), kCGBitmapByteOrder32Host | kCGImageAlphaPremultipliedFirst); } BOOL suc = _blendCanvas != NULL; return suc; } - (void)_blendImageWithFrame:(_YYImageDecoderFrame *)frame { if (frame.dispose == YYImageDisposePrevious) { // nothing } else if (frame.dispose == YYImageDisposeBackground) { CGContextClearRect(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height)); } else { // no dispose if (frame.blend == YYImageBlendOver) { CGImageRef unblendImage = [self _newUnblendedImageAtIndex:frame.index extendToCanvas:NO decoded:NULL]; if (unblendImage) { CGContextDrawImage(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height), unblendImage); CFRelease(unblendImage); } } else { CGContextClearRect(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height)); CGImageRef unblendImage = [self _newUnblendedImageAtIndex:frame.index extendToCanvas:NO decoded:NULL]; if (unblendImage) { CGContextDrawImage(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height), unblendImage); CFRelease(unblendImage); } } } } - (CGImageRef)_newBlendedImageWithFrame:(_YYImageDecoderFrame *)frame CF_RETURNS_RETAINED{ CGImageRef imageRef = NULL; if (frame.dispose == YYImageDisposePrevious) { if (frame.blend == YYImageBlendOver) { CGImageRef previousImage = CGBitmapContextCreateImage(_blendCanvas); CGImageRef unblendImage = [self _newUnblendedImageAtIndex:frame.index extendToCanvas:NO decoded:NULL]; if (unblendImage) { CGContextDrawImage(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height), unblendImage); CFRelease(unblendImage); } imageRef = CGBitmapContextCreateImage(_blendCanvas); CGContextClearRect(_blendCanvas, CGRectMake(0, 0, _width, _height)); if (previousImage) { CGContextDrawImage(_blendCanvas, CGRectMake(0, 0, _width, _height), previousImage); CFRelease(previousImage); } } else { CGImageRef previousImage = CGBitmapContextCreateImage(_blendCanvas); CGImageRef unblendImage = [self _newUnblendedImageAtIndex:frame.index extendToCanvas:NO decoded:NULL]; if (unblendImage) { CGContextClearRect(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height)); CGContextDrawImage(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height), unblendImage); CFRelease(unblendImage); } imageRef = CGBitmapContextCreateImage(_blendCanvas); CGContextClearRect(_blendCanvas, CGRectMake(0, 0, _width, _height)); if (previousImage) { CGContextDrawImage(_blendCanvas, CGRectMake(0, 0, _width, _height), previousImage); CFRelease(previousImage); } } } else if (frame.dispose == YYImageDisposeBackground) { if (frame.blend == YYImageBlendOver) { CGImageRef unblendImage = [self _newUnblendedImageAtIndex:frame.index extendToCanvas:NO decoded:NULL]; if (unblendImage) { CGContextDrawImage(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height), unblendImage); CFRelease(unblendImage); } imageRef = CGBitmapContextCreateImage(_blendCanvas); CGContextClearRect(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height)); } else { CGImageRef unblendImage = [self _newUnblendedImageAtIndex:frame.index extendToCanvas:NO decoded:NULL]; if (unblendImage) { CGContextClearRect(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height)); CGContextDrawImage(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height), unblendImage); CFRelease(unblendImage); } imageRef = CGBitmapContextCreateImage(_blendCanvas); CGContextClearRect(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height)); } } else { // no dispose if (frame.blend == YYImageBlendOver) { CGImageRef unblendImage = [self _newUnblendedImageAtIndex:frame.index extendToCanvas:NO decoded:NULL]; if (unblendImage) { CGContextDrawImage(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height), unblendImage); CFRelease(unblendImage); } imageRef = CGBitmapContextCreateImage(_blendCanvas); } else { CGImageRef unblendImage = [self _newUnblendedImageAtIndex:frame.index extendToCanvas:NO decoded:NULL]; if (unblendImage) { CGContextClearRect(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height)); CGContextDrawImage(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height), unblendImage); CFRelease(unblendImage); } imageRef = CGBitmapContextCreateImage(_blendCanvas); } } return imageRef; } @end //////////////////////////////////////////////////////////////////////////////// #pragma mark - Encoder @implementation YYImageEncoder { NSMutableArray *_images; NSMutableArray *_durations; } - (instancetype)init { @throw [NSException exceptionWithName:@"YYImageEncoder init error" reason:@"YYImageEncoder must be initialized with a type. Use 'initWithType:' instead." userInfo:nil]; return [self initWithType:YYImageTypeUnknown]; } - (instancetype)initWithType:(YYImageType)type { if (type == YYImageTypeUnknown || type >= YYImageTypeOther) { NSLog(@"[%s: %d] Unsupported image type:%d",__FUNCTION__, __LINE__, (int)type); return nil; } #if !YYIMAGE_WEBP_ENABLED if (type == YYImageTypeWebP) { NSLog(@"[%s: %d] WebP is not available, check the documentation to see how to install WebP component: https://github.com/ibireme/YYImage#installation", __FUNCTION__, __LINE__); return nil; } #endif self = [super init]; if (!self) return nil; _type = type; _images = [NSMutableArray new]; _durations = [NSMutableArray new]; switch (type) { case YYImageTypeJPEG: case YYImageTypeJPEG2000: { _quality = 0.9; } break; case YYImageTypeTIFF: case YYImageTypeBMP: case YYImageTypeGIF: case YYImageTypeICO: case YYImageTypeICNS: case YYImageTypePNG: { _quality = 1; _lossless = YES; } break; case YYImageTypeWebP: { _quality = 0.8; } break; default: break; } return self; } - (void)setQuality:(CGFloat)quality { _quality = quality < 0 ? 0 : quality > 1 ? 1 : quality; } - (void)addImage:(UIImage *)image duration:(NSTimeInterval)duration { if (!image.CGImage) return; duration = duration < 0 ? 0 : duration; [_images addObject:image]; [_durations addObject:@(duration)]; } - (void)addImageWithData:(NSData *)data duration:(NSTimeInterval)duration { if (data.length == 0) return; duration = duration < 0 ? 0 : duration; [_images addObject:data]; [_durations addObject:@(duration)]; } - (void)addImageWithFile:(NSString *)path duration:(NSTimeInterval)duration { if (path.length == 0) return; duration = duration < 0 ? 0 : duration; NSURL *url = [NSURL URLWithString:path]; if (!url) return; [_images addObject:url]; [_durations addObject:@(duration)]; } - (BOOL)_imageIOAvaliable { switch (_type) { case YYImageTypeJPEG: case YYImageTypeJPEG2000: case YYImageTypeTIFF: case YYImageTypeBMP: case YYImageTypeICO: case YYImageTypeICNS: case YYImageTypeGIF: { return _images.count > 0; } break; case YYImageTypePNG: { return _images.count == 1; } break; case YYImageTypeWebP: { return NO; } break; default: return NO; } } - (CGImageDestinationRef)_newImageDestination:(id)dest imageCount:(NSUInteger)count CF_RETURNS_RETAINED { if (!dest) return nil; CGImageDestinationRef destination = NULL; if ([dest isKindOfClass:[NSString class]]) { NSURL *url = [[NSURL alloc] initFileURLWithPath:dest]; if (url) { destination = CGImageDestinationCreateWithURL((CFURLRef)url, YYImageTypeToUTType(_type), count, NULL); } } else if ([dest isKindOfClass:[NSMutableData class]]) { destination = CGImageDestinationCreateWithData((CFMutableDataRef)dest, YYImageTypeToUTType(_type), count, NULL); } return destination; } - (void)_encodeImageWithDestination:(CGImageDestinationRef)destination imageCount:(NSUInteger)count { if (_type == YYImageTypeGIF) { NSDictionary *gifProperty = @{(__bridge id)kCGImagePropertyGIFDictionary: @{(__bridge id)kCGImagePropertyGIFLoopCount: @(_loopCount)}}; CGImageDestinationSetProperties(destination, (__bridge CFDictionaryRef)gifProperty); } for (int i = 0; i < count; i++) { @autoreleasepool { id imageSrc = _images[i]; NSDictionary *frameProperty = NULL; if (_type == YYImageTypeGIF && count > 1) { frameProperty = @{(NSString *)kCGImagePropertyGIFDictionary : @{(NSString *) kCGImagePropertyGIFDelayTime:_durations[i]}}; } else { frameProperty = @{(id)kCGImageDestinationLossyCompressionQuality : @(_quality)}; } if ([imageSrc isKindOfClass:[UIImage class]]) { UIImage *image = imageSrc; if (image.imageOrientation != UIImageOrientationUp && image.CGImage) { CGBitmapInfo info = CGImageGetBitmapInfo(image.CGImage) | CGImageGetAlphaInfo(image.CGImage); CGImageRef rotated = YYCGImageCreateCopyWithOrientation(image.CGImage, image.imageOrientation, info); if (rotated) { image = [UIImage imageWithCGImage:rotated]; CFRelease(rotated); } } if (image.CGImage) CGImageDestinationAddImage(destination, ((UIImage *)imageSrc).CGImage, (CFDictionaryRef)frameProperty); } else if ([imageSrc isKindOfClass:[NSURL class]]) { CGImageSourceRef source = CGImageSourceCreateWithURL((CFURLRef)imageSrc, NULL); if (source) { CGImageDestinationAddImageFromSource(destination, source, 0, (CFDictionaryRef)frameProperty); CFRelease(source); } } else if ([imageSrc isKindOfClass:[NSData class]]) { CGImageSourceRef source = CGImageSourceCreateWithData((CFDataRef)imageSrc, NULL); if (source) { CGImageDestinationAddImageFromSource(destination, source, 0, (CFDictionaryRef)frameProperty); CFRelease(source); } } } } } - (CGImageRef)_newCGImageFromIndex:(NSUInteger)index decoded:(BOOL)decoded CF_RETURNS_RETAINED { UIImage *image = nil; id imageSrc= _images[index]; if ([imageSrc isKindOfClass:[UIImage class]]) { image = imageSrc; } else if ([imageSrc isKindOfClass:[NSURL class]]) { image = [UIImage imageWithContentsOfFile:((NSURL *)imageSrc).absoluteString]; } else if ([imageSrc isKindOfClass:[NSData class]]) { image = [UIImage imageWithData:imageSrc]; } if (!image) return NULL; CGImageRef imageRef = image.CGImage; if (!imageRef) return NULL; if (image.imageOrientation != UIImageOrientationUp) { return YYCGImageCreateCopyWithOrientation(imageRef, image.imageOrientation, kCGBitmapByteOrder32Host | kCGImageAlphaPremultipliedFirst); } if (decoded) { return YYCGImageCreateDecodedCopy(imageRef, YES); } return (CGImageRef)CFRetain(imageRef); } - (NSData *)_encodeWithImageIO { NSMutableData *data = [NSMutableData new]; NSUInteger count = _type == YYImageTypeGIF ? _images.count : 1; CGImageDestinationRef destination = [self _newImageDestination:data imageCount:count]; BOOL suc = NO; if (destination) { [self _encodeImageWithDestination:destination imageCount:count]; suc = CGImageDestinationFinalize(destination); CFRelease(destination); } if (suc && data.length > 0) { return data; } else { return nil; } } - (BOOL)_encodeWithImageIO:(NSString *)path { NSUInteger count = _type == YYImageTypeGIF ? _images.count : 1; CGImageDestinationRef destination = [self _newImageDestination:path imageCount:count]; BOOL suc = NO; if (destination) { [self _encodeImageWithDestination:destination imageCount:count]; suc = CGImageDestinationFinalize(destination); CFRelease(destination); } return suc; } - (NSData *)_encodeAPNG { // encode APNG (ImageIO doesn't support APNG encoding, so we use a custom encoder) NSMutableArray *pngDatas = [NSMutableArray new]; NSMutableArray *pngSizes = [NSMutableArray new]; NSUInteger canvasWidth = 0, canvasHeight = 0; for (int i = 0; i < _images.count; i++) { CGImageRef decoded = [self _newCGImageFromIndex:i decoded:YES]; if (!decoded) return nil; CGSize size = CGSizeMake(CGImageGetWidth(decoded), CGImageGetHeight(decoded)); [pngSizes addObject:[NSValue valueWithCGSize:size]]; if (canvasWidth < size.width) canvasWidth = size.width; if (canvasHeight < size.height) canvasHeight = size.height; CFDataRef frameData = YYCGImageCreateEncodedData(decoded, YYImageTypePNG, 1); CFRelease(decoded); if (!frameData) return nil; [pngDatas addObject:(__bridge id)(frameData)]; CFRelease(frameData); if (size.width < 1 || size.height < 1) return nil; } CGSize firstFrameSize = [(NSValue *)[pngSizes firstObject] CGSizeValue]; if (firstFrameSize.width < canvasWidth || firstFrameSize.height < canvasHeight) { CGImageRef decoded = [self _newCGImageFromIndex:0 decoded:YES]; if (!decoded) return nil; CGContextRef context = CGBitmapContextCreate(NULL, canvasWidth, canvasHeight, 8, 0, YYCGColorSpaceGetDeviceRGB(), kCGBitmapByteOrder32Host | kCGImageAlphaPremultipliedFirst); if (!context) { CFRelease(decoded); return nil; } CGContextDrawImage(context, CGRectMake(0, canvasHeight - firstFrameSize.height, firstFrameSize.width, firstFrameSize.height), decoded); CFRelease(decoded); CGImageRef extendedImage = CGBitmapContextCreateImage(context); CFRelease(context); if (!extendedImage) return nil; CFDataRef frameData = YYCGImageCreateEncodedData(extendedImage, YYImageTypePNG, 1); if (!frameData) { CFRelease(extendedImage); return nil; } pngDatas[0] = (__bridge id)(frameData); CFRelease(frameData); } NSData *firstFrameData = pngDatas[0]; yy_png_info *info = yy_png_info_create(firstFrameData.bytes, (uint32_t)firstFrameData.length); if (!info) return nil; NSMutableData *result = [NSMutableData new]; BOOL insertBefore = NO, insertAfter = NO; uint32_t apngSequenceIndex = 0; uint32_t png_header[2]; png_header[0] = YY_FOUR_CC(0x89, 0x50, 0x4E, 0x47); png_header[1] = YY_FOUR_CC(0x0D, 0x0A, 0x1A, 0x0A); [result appendBytes:png_header length:8]; for (int i = 0; i < info->chunk_num; i++) { yy_png_chunk_info *chunk = info->chunks + i; if (!insertBefore && chunk->fourcc == YY_FOUR_CC('I', 'D', 'A', 'T')) { insertBefore = YES; // insert acTL (APNG Control) uint32_t acTL[5] = {0}; acTL[0] = yy_swap_endian_uint32(8); //length acTL[1] = YY_FOUR_CC('a', 'c', 'T', 'L'); // fourcc acTL[2] = yy_swap_endian_uint32((uint32_t)pngDatas.count); // num frames acTL[3] = yy_swap_endian_uint32((uint32_t)_loopCount); // num plays acTL[4] = yy_swap_endian_uint32((uint32_t)crc32(0, (const Bytef *)(acTL + 1), 12)); //crc32 [result appendBytes:acTL length:20]; // insert fcTL (first frame control) yy_png_chunk_fcTL chunk_fcTL = {0}; chunk_fcTL.sequence_number = apngSequenceIndex; chunk_fcTL.width = (uint32_t)firstFrameSize.width; chunk_fcTL.height = (uint32_t)firstFrameSize.height; yy_png_delay_to_fraction([(NSNumber *)_durations[0] doubleValue], &chunk_fcTL.delay_num, &chunk_fcTL.delay_den); chunk_fcTL.delay_num = chunk_fcTL.delay_num; chunk_fcTL.delay_den = chunk_fcTL.delay_den; chunk_fcTL.dispose_op = YY_PNG_DISPOSE_OP_BACKGROUND; chunk_fcTL.blend_op = YY_PNG_BLEND_OP_SOURCE; uint8_t fcTL[38] = {0}; *((uint32_t *)fcTL) = yy_swap_endian_uint32(26); //length *((uint32_t *)(fcTL + 4)) = YY_FOUR_CC('f', 'c', 'T', 'L'); // fourcc yy_png_chunk_fcTL_write(&chunk_fcTL, fcTL + 8); *((uint32_t *)(fcTL + 34)) = yy_swap_endian_uint32((uint32_t)crc32(0, (const Bytef *)(fcTL + 4), 30)); [result appendBytes:fcTL length:38]; apngSequenceIndex++; } if (!insertAfter && insertBefore && chunk->fourcc != YY_FOUR_CC('I', 'D', 'A', 'T')) { insertAfter = YES; // insert fcTL and fdAT (APNG frame control and data) for (int i = 1; i < pngDatas.count; i++) { NSData *frameData = pngDatas[i]; yy_png_info *frame = yy_png_info_create(frameData.bytes, (uint32_t)frameData.length); if (!frame) { yy_png_info_release(info); return nil; } // insert fcTL (first frame control) yy_png_chunk_fcTL chunk_fcTL = {0}; chunk_fcTL.sequence_number = apngSequenceIndex; chunk_fcTL.width = frame->header.width; chunk_fcTL.height = frame->header.height; yy_png_delay_to_fraction([(NSNumber *)_durations[i] doubleValue], &chunk_fcTL.delay_num, &chunk_fcTL.delay_den); chunk_fcTL.delay_num = chunk_fcTL.delay_num; chunk_fcTL.delay_den = chunk_fcTL.delay_den; chunk_fcTL.dispose_op = YY_PNG_DISPOSE_OP_BACKGROUND; chunk_fcTL.blend_op = YY_PNG_BLEND_OP_SOURCE; uint8_t fcTL[38] = {0}; *((uint32_t *)fcTL) = yy_swap_endian_uint32(26); //length *((uint32_t *)(fcTL + 4)) = YY_FOUR_CC('f', 'c', 'T', 'L'); // fourcc yy_png_chunk_fcTL_write(&chunk_fcTL, fcTL + 8); *((uint32_t *)(fcTL + 34)) = yy_swap_endian_uint32((uint32_t)crc32(0, (const Bytef *)(fcTL + 4), 30)); [result appendBytes:fcTL length:38]; apngSequenceIndex++; // insert fdAT (frame data) for (int d = 0; d < frame->chunk_num; d++) { yy_png_chunk_info *dchunk = frame->chunks + d; if (dchunk->fourcc == YY_FOUR_CC('I', 'D', 'A', 'T')) { uint32_t length = yy_swap_endian_uint32(dchunk->length + 4); [result appendBytes:&length length:4]; //length uint32_t fourcc = YY_FOUR_CC('f', 'd', 'A', 'T'); [result appendBytes:&fourcc length:4]; //fourcc uint32_t sq = yy_swap_endian_uint32(apngSequenceIndex); [result appendBytes:&sq length:4]; //data (sq) [result appendBytes:(((uint8_t *)frameData.bytes) + dchunk->offset + 8) length:dchunk->length]; //data uint8_t *bytes = ((uint8_t *)result.bytes) + result.length - dchunk->length - 8; uint32_t crc = yy_swap_endian_uint32((uint32_t)crc32(0, bytes, dchunk->length + 8)); [result appendBytes:&crc length:4]; //crc apngSequenceIndex++; } } yy_png_info_release(frame); } } [result appendBytes:((uint8_t *)firstFrameData.bytes) + chunk->offset length:chunk->length + 12]; } yy_png_info_release(info); return result; } - (NSData *)_encodeWebP { #if YYIMAGE_WEBP_ENABLED // encode webp NSMutableArray *webpDatas = [NSMutableArray new]; for (NSUInteger i = 0; i < _images.count; i++) { CGImageRef image = [self _newCGImageFromIndex:i decoded:NO]; if (!image) return nil; CFDataRef frameData = YYCGImageCreateEncodedWebPData(image, _lossless, _quality, 4, YYImagePresetDefault); CFRelease(image); if (!frameData) return nil; [webpDatas addObject:(__bridge id)frameData]; CFRelease(frameData); } if (webpDatas.count == 1) { return webpDatas.firstObject; } else { // multi-frame webp WebPMux *mux = WebPMuxNew(); if (!mux) return nil; for (NSUInteger i = 0; i < _images.count; i++) { NSData *data = webpDatas[i]; NSNumber *duration = _durations[i]; WebPMuxFrameInfo frame = {0}; frame.bitstream.bytes = data.bytes; frame.bitstream.size = data.length; frame.duration = (int)(duration.floatValue * 1000.0); frame.id = WEBP_CHUNK_ANMF; frame.dispose_method = WEBP_MUX_DISPOSE_BACKGROUND; frame.blend_method = WEBP_MUX_NO_BLEND; if (WebPMuxPushFrame(mux, &frame, 0) != WEBP_MUX_OK) { WebPMuxDelete(mux); return nil; } } WebPMuxAnimParams params = {(uint32_t)0, (int)_loopCount}; if (WebPMuxSetAnimationParams(mux, ¶ms) != WEBP_MUX_OK) { WebPMuxDelete(mux); return nil; } WebPData output_data; WebPMuxError error = WebPMuxAssemble(mux, &output_data); WebPMuxDelete(mux); if (error != WEBP_MUX_OK) { return nil; } NSData *result = [NSData dataWithBytes:output_data.bytes length:output_data.size]; WebPDataClear(&output_data); return result.length ? result : nil; } #else return nil; #endif } - (NSData *)encode { if (_images.count == 0) return nil; if ([self _imageIOAvaliable]) return [self _encodeWithImageIO]; if (_type == YYImageTypePNG) return [self _encodeAPNG]; if (_type == YYImageTypeWebP) return [self _encodeWebP]; return nil; } - (BOOL)encodeToFile:(NSString *)path { if (_images.count == 0 || path.length == 0) return NO; if ([self _imageIOAvaliable]) return [self _encodeWithImageIO:path]; NSData *data = [self encode]; if (!data) return NO; return [data writeToFile:path atomically:YES]; } + (NSData *)encodeImage:(UIImage *)image type:(YYImageType)type quality:(CGFloat)quality { YYImageEncoder *encoder = [[YYImageEncoder alloc] initWithType:type]; encoder.quality = quality; [encoder addImage:image duration:0]; return [encoder encode]; } + (NSData *)encodeImageWithDecoder:(YYImageDecoder *)decoder type:(YYImageType)type quality:(CGFloat)quality { if (!decoder || decoder.frameCount == 0) return nil; YYImageEncoder *encoder = [[YYImageEncoder alloc] initWithType:type]; encoder.quality = quality; for (int i = 0; i < decoder.frameCount; i++) { UIImage *frame = [decoder frameAtIndex:i decodeForDisplay:YES].image; [encoder addImageWithData:UIImagePNGRepresentation(frame) duration:[decoder frameDurationAtIndex:i]]; } return encoder.encode; } @end @implementation UIImage (YYImageCoder) - (instancetype)yy_imageByDecoded { if (self.yy_isDecodedForDisplay) return self; CGImageRef imageRef = self.CGImage; if (!imageRef) return self; CGImageRef newImageRef = YYCGImageCreateDecodedCopy(imageRef, YES); if (!newImageRef) return self; UIImage *newImage = [[self.class alloc] initWithCGImage:newImageRef scale:self.scale orientation:self.imageOrientation]; CGImageRelease(newImageRef); if (!newImage) newImage = self; // decode failed, return self. newImage.yy_isDecodedForDisplay = YES; return newImage; } - (BOOL)yy_isDecodedForDisplay { if (self.images.count > 1 || [self isKindOfClass:[YYSpriteSheetImage class]]) return YES; NSNumber *num = objc_getAssociatedObject(self, @selector(yy_isDecodedForDisplay)); return [num boolValue]; } - (void)setYy_isDecodedForDisplay:(BOOL)isDecodedForDisplay { objc_setAssociatedObject(self, @selector(yy_isDecodedForDisplay), @(isDecodedForDisplay), OBJC_ASSOCIATION_RETAIN_NONATOMIC); } - (void)yy_saveToAlbumWithCompletionBlock:(void(^)(NSURL *assetURL, NSError *error))completionBlock { dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{ NSData *data = [self _yy_dataRepresentationForSystem:YES]; ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init]; [library writeImageDataToSavedPhotosAlbum:data metadata:nil completionBlock:^(NSURL *assetURL, NSError *error){ if (!completionBlock) return; if (pthread_main_np()) { completionBlock(assetURL, error); } else { dispatch_async(dispatch_get_main_queue(), ^{ completionBlock(assetURL, error); }); } }]; }); } - (NSData *)yy_imageDataRepresentation { return [self _yy_dataRepresentationForSystem:NO]; } /// @param forSystem YES: used for system album (PNG/JPEG/GIF), NO: used for YYImage (PNG/JPEG/GIF/WebP) - (NSData *)_yy_dataRepresentationForSystem:(BOOL)forSystem { NSData *data = nil; if ([self isKindOfClass:[YYImage class]]) { YYImage *image = (id)self; if (image.animatedImageData) { if (forSystem) { // system only support GIF and PNG if (image.animatedImageType == YYImageTypeGIF || image.animatedImageType == YYImageTypePNG) { data = image.animatedImageData; } } else { data = image.animatedImageData; } } } if (!data) { CGImageRef imageRef = self.CGImage ? (CGImageRef)CFRetain(self.CGImage) : nil; if (imageRef) { CGBitmapInfo bitmapInfo = CGImageGetBitmapInfo(imageRef); CGImageAlphaInfo alphaInfo = CGImageGetAlphaInfo(imageRef) & kCGBitmapAlphaInfoMask; BOOL hasAlpha = NO; if (alphaInfo == kCGImageAlphaPremultipliedLast || alphaInfo == kCGImageAlphaPremultipliedFirst || alphaInfo == kCGImageAlphaLast || alphaInfo == kCGImageAlphaFirst) { hasAlpha = YES; } if (self.imageOrientation != UIImageOrientationUp) { CGImageRef rotated = YYCGImageCreateCopyWithOrientation(imageRef, self.imageOrientation, bitmapInfo | alphaInfo); if (rotated) { CFRelease(imageRef); imageRef = rotated; } } @autoreleasepool { UIImage *newImage = [UIImage imageWithCGImage:imageRef]; if (newImage) { if (hasAlpha) { data = UIImagePNGRepresentation([UIImage imageWithCGImage:imageRef]); } else { data = UIImageJPEGRepresentation([UIImage imageWithCGImage:imageRef], 0.9); // same as Apple's example } } } CFRelease(imageRef); } } if (!data) { data = UIImagePNGRepresentation(self); } return data; } @end ================================================ FILE: MFPictureBrowserDemo/Pods/YYImage/YYImage/YYSpriteSheetImage.h ================================================ // // YYSpriteImage.h // YYImage // // Created by ibireme on 15/4/21. // Copyright (c) 2015 ibireme. // // This source code is licensed under the MIT-style license found in the // LICENSE file in the root directory of this source tree. // #import #if __has_include() #import #elif __has_include() #import #else #import "YYAnimatedImageView.h" #endif NS_ASSUME_NONNULL_BEGIN /** An image to display sprite sheet animation. @discussion It is a fully compatible `UIImage` subclass. The animation can be played by YYAnimatedImageView. Sample Code: // 8 * 12 sprites in a single sheet image UIImage *spriteSheet = [UIImage imageNamed:@"sprite-sheet"]; NSMutableArray *contentRects = [NSMutableArray new]; NSMutableArray *durations = [NSMutableArray new]; for (int j = 0; j < 12; j++) { for (int i = 0; i < 8; i++) { CGRect rect; rect.size = CGSizeMake(img.size.width / 8, img.size.height / 12); rect.origin.x = img.size.width / 8 * i; rect.origin.y = img.size.height / 12 * j; [contentRects addObject:[NSValue valueWithCGRect:rect]]; [durations addObject:@(1 / 60.0)]; } } YYSpriteSheetImage *sprite; sprite = [[YYSpriteSheetImage alloc] initWithSpriteSheetImage:img contentRects:contentRects frameDurations:durations loopCount:0]; YYAnimatedImageView *imgView = [YYAnimatedImageView new]; imgView.size = CGSizeMake(img.size.width / 8, img.size.height / 12); imgView.image = sprite; @discussion It can also be used to display single frame in sprite sheet image. Sample Code: YYSpriteSheetImage *sheet = ...; UIImageView *imageView = ...; imageView.image = sheet; imageView.layer.contentsRect = [sheet contentsRectForCALayerAtIndex:6]; */ @interface YYSpriteSheetImage : UIImage /** Creates and returns an image object. @param image The sprite sheet image (contains all frames). @param contentRects The sprite sheet image frame rects in the image coordinates. The rectangle should not outside the image's bounds. The objects in this array should be created with [NSValue valueWithCGRect:]. @param frameDurations The sprite sheet image frame's durations in seconds. The objects in this array should be NSNumber. @param loopCount Animation loop count, 0 means infinite looping. @return An image object, or nil if an error occurs. */ - (nullable instancetype)initWithSpriteSheetImage:(UIImage *)image contentRects:(NSArray *)contentRects frameDurations:(NSArray *)frameDurations loopCount:(NSUInteger)loopCount; @property (nonatomic, readonly) NSArray *contentRects; @property (nonatomic, readonly) NSArray *frameDurations; @property (nonatomic, readonly) NSUInteger loopCount; /** Get the contents rect for CALayer. See "contentsRect" property in CALayer for more information. @param index Index of frame. @return Contents Rect. */ - (CGRect)contentsRectForCALayerAtIndex:(NSUInteger)index; @end NS_ASSUME_NONNULL_END ================================================ FILE: MFPictureBrowserDemo/Pods/YYImage/YYImage/YYSpriteSheetImage.m ================================================ // // YYSpriteImage.m // YYImage // // Created by ibireme on 15/4/21. // Copyright (c) 2015 ibireme. // // This source code is licensed under the MIT-style license found in the // LICENSE file in the root directory of this source tree. // #import "YYSpriteSheetImage.h" @implementation YYSpriteSheetImage - (instancetype)initWithSpriteSheetImage:(UIImage *)image contentRects:(NSArray *)contentRects frameDurations:(NSArray *)frameDurations loopCount:(NSUInteger)loopCount { if (!image.CGImage) return nil; if (contentRects.count < 1 || frameDurations.count < 1) return nil; if (contentRects.count != frameDurations.count) return nil; self = [super initWithCGImage:image.CGImage scale:image.scale orientation:image.imageOrientation]; if (!self) return nil; _contentRects = contentRects.copy; _frameDurations = frameDurations.copy; _loopCount = loopCount; return self; } - (CGRect)contentsRectForCALayerAtIndex:(NSUInteger)index { CGRect layerRect = CGRectMake(0, 0, 1, 1); if (index >= _contentRects.count) return layerRect; CGSize imageSize = self.size; CGRect rect = [self animatedImageContentsRectAtIndex:index]; if (imageSize.width > 0.01 && imageSize.height > 0.01) { layerRect.origin.x = rect.origin.x / imageSize.width; layerRect.origin.y = rect.origin.y / imageSize.height; layerRect.size.width = rect.size.width / imageSize.width; layerRect.size.height = rect.size.height / imageSize.height; layerRect = CGRectIntersection(layerRect, CGRectMake(0, 0, 1, 1)); if (CGRectIsNull(layerRect) || CGRectIsEmpty(layerRect)) { layerRect = CGRectMake(0, 0, 1, 1); } } return layerRect; } #pragma mark @protocol YYAnimatedImage - (NSUInteger)animatedImageFrameCount { return _contentRects.count; } - (NSUInteger)animatedImageLoopCount { return _loopCount; } - (NSUInteger)animatedImageBytesPerFrame { return 0; } - (UIImage *)animatedImageFrameAtIndex:(NSUInteger)index { return self; } - (NSTimeInterval)animatedImageDurationAtIndex:(NSUInteger)index { if (index >= _frameDurations.count) return 0; return ((NSNumber *)_frameDurations[index]).doubleValue; } - (CGRect)animatedImageContentsRectAtIndex:(NSUInteger)index { if (index >= _contentRects.count) return CGRectZero; return ((NSValue *)_contentRects[index]).CGRectValue; } @end ================================================ FILE: MFPictureBrowserDemo/Pods/YYWebImage/LICENSE ================================================ The MIT License (MIT) Copyright (c) 2015 ibireme Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ================================================ FILE: MFPictureBrowserDemo/Pods/YYWebImage/README.md ================================================ YYWebImage ============== [![License MIT](https://img.shields.io/badge/license-MIT-green.svg?style=flat)](https://raw.githubusercontent.com/ibireme/YYWebImage/master/LICENSE)  [![Carthage compatible](https://img.shields.io/badge/Carthage-compatible-4BC51D.svg?style=flat)](https://github.com/Carthage/Carthage)  [![CocoaPods](http://img.shields.io/cocoapods/v/YYWebImage.svg?style=flat)](http://cocoapods.org/?q= YYWebImage)  [![CocoaPods](http://img.shields.io/cocoapods/p/YYWebImage.svg?style=flat)](http://cocoapods.org/?q= YYWebImage)  [![Support](https://img.shields.io/badge/support-iOS%206%2B%20-blue.svg?style=flat)](https://www.apple.com/nl/ios/)  [![Build Status](https://travis-ci.org/ibireme/YYWebImage.svg?branch=master)](https://travis-ci.org/ibireme/YYWebImage) ![ProgressiveBlur~](https://raw.github.com/ibireme/YYWebImage/master/Demo/Demo.gif ) YYWebImage is an asynchronous image loading framework (a component of [YYKit](https://github.com/ibireme/YYKit)). It was created as an improved replacement for SDWebImage, PINRemoteImage and FLAnimatedImage. It use [YYCache](https://github.com/ibireme/YYCache) to support memory and disk cache, and [YYImage](https://github.com/ibireme/YYImage) to support WebP/APNG/GIF image decode.
See these project for more information. Features ============== - Asynchronous image load from remote or local URL. - Animated WebP, APNG, GIF support (dynamic buffer, lower memory usage). - Baseline/progressive/interlaced image decode support. - Image loading category for UIImageView, UIButton, MKAnnotationView and CALayer. - Image effect: blur, round corner, resize, color tint, crop, rotate and more. - High performance memory and disk image cache. - High performance image loader to avoid main thread blocked. - Fully documented. Usage ============== ###Load image from URL // load from remote url imageView.yy_imageURL = [NSURL URLWithString:@"http://github.com/logo.png"]; // load from local url imageView.yy_imageURL = [NSURL fileURLWithPath:@"/tmp/logo.png"]; ###Load animated image // just replace `UIImageView` with `YYAnimatedImageView` UIImageView *imageView = [YYAnimatedImageView new]; imageView.yy_imageURL = [NSURL URLWithString:@"http://github.com/ani.webp"]; ###Load image progressively // progressive [imageView yy_setImageWithURL:url options:YYWebImageOptionProgressive]; // progressive with blur and fade animation (see the demo at the top of this page) [imageView yy_setImageWithURL:url options:YYWebImageOptionProgressiveBlur | YYWebImageOptionSetImageWithFadeAnimation]; ###Load and process image // 1. download image from remote // 2. get download progress // 3. resize image and add round corner // 4. set image with a fade animation [imageView yy_setImageWithURL:url placeholder:nil options:YYWebImageOptionSetImageWithFadeAnimation progress:^(NSInteger receivedSize, NSInteger expectedSize) { progress = (float)receivedSize / expectedSize; } transform:^UIImage *(UIImage *image, NSURL *url) { image = [image yy_imageByResizeToSize:CGSizeMake(100, 100) contentMode:UIViewContentModeCenter]; return [image yy_imageByRoundCornerRadius:10]; } completion:^(UIImage *image, NSURL *url, YYWebImageFromType from, YYWebImageStage stage, NSError *error) { if (from == YYWebImageFromDiskCache) { NSLog(@"load from disk cache"); } }]; ###Image Cache YYImageCache *cache = [YYWebImageManager sharedManager].cache; // get cache capacity cache.memoryCache.totalCost; cache.memoryCache.totalCount; cache.diskCache.totalCost; cache.diskCache.totalCount; // clear cache [cache.memoryCache removeAllObjects]; [cache.diskCache removeAllObjects]; // clear disk cache with progress [cache.diskCache removeAllObjectsWithProgressBlock:^(int removedCount, int totalCount) { // progress } endBlock:^(BOOL error) { // end }]; Installation ============== ### CocoaPods 1. Update cocoapods to the latest version. 2. Add `pod 'YYWebImage'` to your Podfile. 3. Run `pod install` or `pod update`. 4. Import \. 5. Notice: it doesn't include WebP subspec by default, if you want to support WebP format, you may add `pod 'YYImage/WebP'` to your Podfile. You may call `YYImageWebPAvailable()` to check whether the WebP subspec is installed correctly. ### Carthage 1. Add `github "ibireme/YYWebImage"` to your Cartfile. 2. Run `carthage update --platform ios` and add the framework to your project. 3. Import \. 4. Notice: carthage framework doesn't include webp component, if you want to support WebP format, use CocoaPods or install manually. You may call `YYImageWebPAvailable()` to check whether the WebP library is installed correctly. ### Manually 1. Download all the files in the YYWebImage subdirectory. 2. Add the source files to your Xcode project. 3. Link with required frameworks: * UIKit * CoreFoundation * QuartzCore * AssetsLibrary * ImageIO * Accelerate * MobileCoreServices * sqlite3 * libz 4. Import `YYWebImage.h`. 5. Notice: if you want to support WebP format, you may add `Vendor/WebP.framework`(static library) to your Xcode project. Documentation ============== Full API documentation is available on [CocoaDocs](http://cocoadocs.org/docsets/YYWebImage/).
You can also install documentation locally using [appledoc](https://github.com/tomaz/appledoc). Requirements ============== This library requires `iOS 6.0+` and `Xcode 7.0+`. License ============== YYWebImage is provided under the MIT license. See LICENSE file for details.

--- 中文介绍 ============== ![ProgressiveBlur~](https://raw.github.com/ibireme/YYWebImage/master/Demo/Demo.gif ) YYWebImage 是一个异步图片加载框架 ([YYKit](https://github.com/ibireme/YYKit) 组件之一). 其设计目的是试图替代 SDWebImage、PINRemoteImage、FLAnimatedImage 等开源框架,它支持这些开源框架的大部分功能,同时增加了大量新特性、并且有不小的性能提升。 它底层用 [YYCache](https://github.com/ibireme/YYCache) 实现了内存和磁盘缓存, 用 [YYImage](https://github.com/ibireme/YYImage) 实现了 WebP/APNG/GIF 动图的解码和播放。
你可以查看这些项目以获得更多信息。 特性 ============== - 异步的图片加载,支持 HTTP 和本地文件。 - 支持 GIF、APNG、WebP 动画(动态缓存,低内存占用)。 - 支持逐行扫描、隔行扫描、渐进式图像加载。 - UIImageView、UIButton、MKAnnotationView、CALayer 的 Category 方法支持。 - 常见图片处理:模糊、圆角、大小调整、裁切、旋转、色调等。 - 高性能的内存和磁盘缓存。 - 高性能的图片设置方式,以避免主线程阻塞。 - 每个类和方法都有完善的文档注释。 用法 ============== ###从 URL 加载图片 // 加载网络图片 imageView.yy_imageURL = [NSURL URLWithString:@"http://github.com/logo.png"]; // 加载本地图片 imageView.yy_imageURL = [NSURL fileURLWithPath:@"/tmp/logo.png"]; ###加载动图 // 只需要把 `UIImageView` 替换为 `YYAnimatedImageView` 即可。 UIImageView *imageView = [YYAnimatedImageView new]; imageView.yy_imageURL = [NSURL URLWithString:@"http://github.com/ani.webp"]; ###渐进式图片加载 // 渐进式:边下载边显示 [imageView yy_setImageWithURL:url options:YYWebImageOptionProgressive]; // 渐进式加载,增加模糊效果和渐变动画 (见本页最上方的GIF演示) [imageView yy_setImageWithURL:url options:YYWebImageOptionProgressiveBlur | YYWebImageOptionSetImageWithFadeAnimation]; ###加载、处理图片 // 1. 下载图片 // 2. 获得图片下载进度 // 3. 调整图片大小、加圆角 // 4. 显示图片时增加一个淡入动画,以获得更好的用户体验 [imageView yy_setImageWithURL:url placeholder:nil options:YYWebImageOptionSetImageWithFadeAnimation progress:^(NSInteger receivedSize, NSInteger expectedSize) { progress = (float)receivedSize / expectedSize; } transform:^UIImage *(UIImage *image, NSURL *url) { image = [image yy_imageByResizeToSize:CGSizeMake(100, 100) contentMode:UIViewContentModeCenter]; return [image yy_imageByRoundCornerRadius:10]; } completion:^(UIImage *image, NSURL *url, YYWebImageFromType from, YYWebImageStage stage, NSError *error) { if (from == YYWebImageFromDiskCache) { NSLog(@"load from disk cache"); } }]; ###图片缓存 YYImageCache *cache = [YYWebImageManager sharedManager].cache; // 获取缓存大小 cache.memoryCache.totalCost; cache.memoryCache.totalCount; cache.diskCache.totalCost; cache.diskCache.totalCount; // 清空缓存 [cache.memoryCache removeAllObjects]; [cache.diskCache removeAllObjects]; // 清空磁盘缓存,带进度回调 [cache.diskCache removeAllObjectsWithProgressBlock:^(int removedCount, int totalCount) { // progress } endBlock:^(BOOL error) { // end }]; 安装 ============== ### CocoaPods 1. 将 cocoapods 更新至最新版本. 2. 在 Podfile 中添加 `pod 'YYWebImage'`。 3. 执行 `pod install` 或 `pod update`。 4. 导入 \。 5. 注意:pod 配置并没有包含 WebP 组件, 如果你需要支持 WebP,可以在 Podfile 中添加 `pod 'YYImage/WebP'`。你可以调用 `YYImageWebPAvailable()` 来检查一下 WebP 组件是否被正确安装。 ### Carthage 1. 在 Cartfile 中添加 `github "ibireme/YYWebImage"`。 2. 执行 `carthage update --platform ios` 并将生成的 framework 添加到你的工程。 3. 导入 \。 4. 注意: carthage framework 并没有包含 webp 组件。如果你需要支持 WebP,可以用 CocoaPods 安装,或者手动安装。 ### 手动安装 1. 下载 YYWebImage 文件夹内的所有内容。 2. 将 YYWebImage 内的源文件添加(拖放)到你的工程。 3. 链接以下 frameworks: * UIKit * CoreFoundation * QuartzCore * AssetsLibrary * ImageIO * Accelerate * MobileCoreServices * sqlite3 * libz 4. 导入 `YYWebImage.h`。 5. 注意:如果你需要支持 WebP,可以将 `Vendor/WebP.framework`(静态库) 加入你的工程。你可以调用 `YYImageWebPAvailable()` 来检查一下 WebP 组件是否被正确安装。 文档 ============== 你可以在 [CocoaDocs](http://cocoadocs.org/docsets/YYWebImage/) 查看在线 API 文档,也可以用 [appledoc](https://github.com/tomaz/appledoc) 本地生成文档。 系统要求 ============== 该项目最低支持 `iOS 6.0` 和 `Xcode 7.0`。 许可证 ============== YYWebImage 使用 MIT 许可证,详情见 LICENSE 文件。 相关链接 ============== [移动端图片格式调研](http://blog.ibireme.com/2015/11/02/mobile_image_benchmark/)
[iOS 处理图片的一些小 Tip](http://blog.ibireme.com/2015/11/02/ios_image_tips/) ================================================ FILE: MFPictureBrowserDemo/Pods/YYWebImage/YYWebImage/Categories/CALayer+YYWebImage.h ================================================ // // CALayer+YYWebImage.h // YYWebImage // // Created by ibireme on 15/2/23. // Copyright (c) 2015 ibireme. // // This source code is licensed under the MIT-style license found in the // LICENSE file in the root directory of this source tree. // #import #import #if __has_include() #import #else #import "YYWebImageManager.h" #endif NS_ASSUME_NONNULL_BEGIN /** Web image methods for CALayer. It will set image to layer.contents. */ @interface CALayer (YYWebImage) #pragma mark - image /** Current image URL. @discussion Set a new value to this property will cancel the previous request operation and create a new request operation to fetch image. Set nil to clear the image and image URL. */ @property (nullable, nonatomic, strong) NSURL *yy_imageURL; /** Set the view's `image` with a specified URL. @param imageURL The image url (remote or local file path). @param placeholder The image to be set initially, until the image request finishes. */ - (void)yy_setImageWithURL:(nullable NSURL *)imageURL placeholder:(nullable UIImage *)placeholder; /** Set the view's `image` with a specified URL. @param imageURL The image url (remote or local file path). @param options The options to use when request the image. */ - (void)yy_setImageWithURL:(nullable NSURL *)imageURL options:(YYWebImageOptions)options; /** Set the view's `image` with a specified URL. @param imageURL The image url (remote or local file path). @param placeholder The image to be set initially, until the image request finishes. @param options The options to use when request the image. @param completion The block invoked (on main thread) when image request completed. */ - (void)yy_setImageWithURL:(nullable NSURL *)imageURL placeholder:(nullable UIImage *)placeholder options:(YYWebImageOptions)options completion:(nullable YYWebImageCompletionBlock)completion; /** Set the view's `image` with a specified URL. @param imageURL The image url (remote or local file path). @param placeholder The image to be set initially, until the image request finishes. @param options The options to use when request the image. @param progress The block invoked (on main thread) during image request. @param transform The block invoked (on background thread) to do additional image process. @param completion The block invoked (on main thread) when image request completed. */ - (void)yy_setImageWithURL:(nullable NSURL *)imageURL placeholder:(nullable UIImage *)placeholder options:(YYWebImageOptions)options progress:(nullable YYWebImageProgressBlock)progress transform:(nullable YYWebImageTransformBlock)transform completion:(nullable YYWebImageCompletionBlock)completion; /** Set the view's `image` with a specified URL. @param imageURL The image url (remote or local file path). @param placeholder he image to be set initially, until the image request finishes. @param options The options to use when request the image. @param manager The manager to create image request operation. @param progress The block invoked (on main thread) during image request. @param transform The block invoked (on background thread) to do additional image process. @param completion The block invoked (on main thread) when image request completed. */ - (void)yy_setImageWithURL:(nullable NSURL *)imageURL placeholder:(nullable UIImage *)placeholder options:(YYWebImageOptions)options manager:(nullable YYWebImageManager *)manager progress:(nullable YYWebImageProgressBlock)progress transform:(nullable YYWebImageTransformBlock)transform completion:(nullable YYWebImageCompletionBlock)completion; /** Cancel the current image request. */ - (void)yy_cancelCurrentImageRequest; @end NS_ASSUME_NONNULL_END ================================================ FILE: MFPictureBrowserDemo/Pods/YYWebImage/YYWebImage/Categories/CALayer+YYWebImage.m ================================================ // // CALayer+YYWebImage.m // YYWebImage // // Created by ibireme on 15/2/23. // Copyright (c) 2015 ibireme. // // This source code is licensed under the MIT-style license found in the // LICENSE file in the root directory of this source tree. // #import "CALayer+YYWebImage.h" #import "YYWebImageOperation.h" #import "_YYWebImageSetter.h" #import // Dummy class for category @interface CALayer_YYWebImage : NSObject @end @implementation CALayer_YYWebImage @end static int _YYWebImageSetterKey; @implementation CALayer (YYWebImage) - (NSURL *)yy_imageURL { _YYWebImageSetter *setter = objc_getAssociatedObject(self, &_YYWebImageSetterKey); return setter.imageURL; } - (void)setYy_imageURL:(NSURL *)imageURL { [self yy_setImageWithURL:imageURL placeholder:nil options:kNilOptions manager:nil progress:nil transform:nil completion:nil]; } - (void)yy_setImageWithURL:(NSURL *)imageURL placeholder:(UIImage *)placeholder { [self yy_setImageWithURL:imageURL placeholder:placeholder options:kNilOptions manager:nil progress:nil transform:nil completion:nil]; } - (void)yy_setImageWithURL:(NSURL *)imageURL options:(YYWebImageOptions)options { [self yy_setImageWithURL:imageURL placeholder:nil options:options manager:nil progress:nil transform:nil completion:nil]; } - (void)yy_setImageWithURL:(NSURL *)imageURL placeholder:(UIImage *)placeholder options:(YYWebImageOptions)options completion:(YYWebImageCompletionBlock)completion { [self yy_setImageWithURL:imageURL placeholder:placeholder options:options manager:nil progress:nil transform:nil completion:completion]; } - (void)yy_setImageWithURL:(NSURL *)imageURL placeholder:(UIImage *)placeholder options:(YYWebImageOptions)options progress:(YYWebImageProgressBlock)progress transform:(YYWebImageTransformBlock)transform completion:(YYWebImageCompletionBlock)completion { [self yy_setImageWithURL:imageURL placeholder:placeholder options:options manager:nil progress:progress transform:transform completion:completion]; } - (void)yy_setImageWithURL:(NSURL *)imageURL placeholder:(UIImage *)placeholder options:(YYWebImageOptions)options manager:(YYWebImageManager *)manager progress:(YYWebImageProgressBlock)progress transform:(YYWebImageTransformBlock)transform completion:(YYWebImageCompletionBlock)completion { if ([imageURL isKindOfClass:[NSString class]]) imageURL = [NSURL URLWithString:(id)imageURL]; manager = manager ? manager : [YYWebImageManager sharedManager]; _YYWebImageSetter *setter = objc_getAssociatedObject(self, &_YYWebImageSetterKey); if (!setter) { setter = [_YYWebImageSetter new]; objc_setAssociatedObject(self, &_YYWebImageSetterKey, setter, OBJC_ASSOCIATION_RETAIN_NONATOMIC); } int32_t sentinel = [setter cancelWithNewURL:imageURL]; _yy_dispatch_sync_on_main_queue(^{ if ((options & YYWebImageOptionSetImageWithFadeAnimation) && !(options & YYWebImageOptionAvoidSetImage)) { [self removeAnimationForKey:_YYWebImageFadeAnimationKey]; } if (!imageURL) { if (!(options & YYWebImageOptionIgnorePlaceHolder)) { self.contents = (id)placeholder.CGImage; } return; } // get the image from memory as quickly as possible UIImage *imageFromMemory = nil; if (manager.cache && !(options & YYWebImageOptionUseNSURLCache) && !(options & YYWebImageOptionRefreshImageCache)) { imageFromMemory = [manager.cache getImageForKey:[manager cacheKeyForURL:imageURL] withType:YYImageCacheTypeMemory]; } if (imageFromMemory) { if (!(options & YYWebImageOptionAvoidSetImage)) { self.contents = (id)imageFromMemory.CGImage; } if(completion) completion(imageFromMemory, imageURL, YYWebImageFromMemoryCacheFast, YYWebImageStageFinished, nil); return; } if (!(options & YYWebImageOptionIgnorePlaceHolder)) { self.contents = (id)placeholder.CGImage; } __weak typeof(self) _self = self; dispatch_async([_YYWebImageSetter setterQueue], ^{ YYWebImageProgressBlock _progress = nil; if (progress) _progress = ^(NSInteger receivedSize, NSInteger expectedSize) { dispatch_async(dispatch_get_main_queue(), ^{ progress(receivedSize, expectedSize); }); }; __block int32_t newSentinel = 0; __block __weak typeof(setter) weakSetter = nil; YYWebImageCompletionBlock _completion = ^(UIImage *image, NSURL *url, YYWebImageFromType from, YYWebImageStage stage, NSError *error) { __strong typeof(_self) self = _self; BOOL setImage = (stage == YYWebImageStageFinished || stage == YYWebImageStageProgress) && image && !(options & YYWebImageOptionAvoidSetImage); BOOL showFade = (options & YYWebImageOptionSetImageWithFadeAnimation); dispatch_async(dispatch_get_main_queue(), ^{ BOOL sentinelChanged = weakSetter && weakSetter.sentinel != newSentinel; if (setImage && self && !sentinelChanged) { if (showFade) { CATransition *transition = [CATransition animation]; transition.duration = stage == YYWebImageStageFinished ? _YYWebImageFadeTime : _YYWebImageProgressiveFadeTime; transition.timingFunction = [CAMediaTimingFunction functionWithName:kCAMediaTimingFunctionEaseInEaseOut]; transition.type = kCATransitionFade; [self addAnimation:transition forKey:_YYWebImageFadeAnimationKey]; } self.contents = (id)image.CGImage; } if (completion) { if (sentinelChanged) { completion(nil, url, YYWebImageFromNone, YYWebImageStageCancelled, nil); } else { completion(image, url, from, stage, error); } } }); }; newSentinel = [setter setOperationWithSentinel:sentinel url:imageURL options:options manager:manager progress:_progress transform:transform completion:_completion]; weakSetter = setter; }); }); } - (void)yy_cancelCurrentImageRequest { _YYWebImageSetter *setter = objc_getAssociatedObject(self, &_YYWebImageSetterKey); if (setter) [setter cancel]; } @end ================================================ FILE: MFPictureBrowserDemo/Pods/YYWebImage/YYWebImage/Categories/MKAnnotationView+YYWebImage.h ================================================ // // MKAnnotationView+YYWebImage.h // YYWebImage // // Created by ibireme on 15/2/23. // Copyright (c) 2015 ibireme. // // This source code is licensed under the MIT-style license found in the // LICENSE file in the root directory of this source tree. // #import #import #if __has_include() #import #else #import "YYWebImageManager.h" #endif NS_ASSUME_NONNULL_BEGIN /** Web image methods for MKAnnotationView. */ @interface MKAnnotationView (YYWebImage) /** Current image URL. @discussion Set a new value to this property will cancel the previous request operation and create a new request operation to fetch image. Set nil to clear the image and image URL. */ @property (nullable, nonatomic, strong) NSURL *yy_imageURL; /** Set the view's `image` with a specified URL. @param imageURL The image url (remote or local file path). @param placeholder The image to be set initially, until the image request finishes. */ - (void)yy_setImageWithURL:(nullable NSURL *)imageURL placeholder:(nullable UIImage *)placeholder; /** Set the view's `image` with a specified URL. @param imageURL The image url (remote or local file path). @param options The options to use when request the image. */ - (void)yy_setImageWithURL:(nullable NSURL *)imageURL options:(YYWebImageOptions)options; /** Set the view's `image` with a specified URL. @param imageURL The image url (remote or local file path). @param placeholder The image to be set initially, until the image request finishes. @param options The options to use when request the image. @param completion The block invoked (on main thread) when image request completed. */ - (void)yy_setImageWithURL:(nullable NSURL *)imageURL placeholder:(nullable UIImage *)placeholder options:(YYWebImageOptions)options completion:(nullable YYWebImageCompletionBlock)completion; /** Set the view's `image` with a specified URL. @param imageURL The image url (remote or local file path). @param placeholder The image to be set initially, until the image request finishes. @param options The options to use when request the image. @param progress The block invoked (on main thread) during image request. @param transform The block invoked (on background thread) to do additional image process. @param completion The block invoked (on main thread) when image request completed. */ - (void)yy_setImageWithURL:(nullable NSURL *)imageURL placeholder:(nullable UIImage *)placeholder options:(YYWebImageOptions)options progress:(nullable YYWebImageProgressBlock)progress transform:(nullable YYWebImageTransformBlock)transform completion:(nullable YYWebImageCompletionBlock)completion; /** Set the view's `image` with a specified URL. @param imageURL The image url (remote or local file path). @param placeholder he image to be set initially, until the image request finishes. @param options The options to use when request the image. @param manager The manager to create image request operation. @param progress The block invoked (on main thread) during image request. @param transform The block invoked (on background thread) to do additional image process. @param completion The block invoked (on main thread) when image request completed. */ - (void)yy_setImageWithURL:(nullable NSURL *)imageURL placeholder:(nullable UIImage *)placeholder options:(YYWebImageOptions)options manager:(nullable YYWebImageManager *)manager progress:(nullable YYWebImageProgressBlock)progress transform:(nullable YYWebImageTransformBlock)transform completion:(nullable YYWebImageCompletionBlock)completion; /** Cancel the current image request. */ - (void)yy_cancelCurrentImageRequest; @end NS_ASSUME_NONNULL_END ================================================ FILE: MFPictureBrowserDemo/Pods/YYWebImage/YYWebImage/Categories/MKAnnotationView+YYWebImage.m ================================================ // // MKAnnotationView+YYWebImage.m // YYWebImage // // Created by ibireme on 15/2/23. // Copyright (c) 2015 ibireme. // // This source code is licensed under the MIT-style license found in the // LICENSE file in the root directory of this source tree. // #import "MKAnnotationView+YYWebImage.h" #import "YYWebImageOperation.h" #import "_YYWebImageSetter.h" #import // Dummy class for category @interface MKAnnotationView_YYWebImage : NSObject @end @implementation MKAnnotationView_YYWebImage @end static int _YYWebImageSetterKey; @implementation MKAnnotationView (YYWebImage) - (NSURL *)yy_imageURL { _YYWebImageSetter *setter = objc_getAssociatedObject(self, &_YYWebImageSetterKey); return setter.imageURL; } - (void)setYy_imageURL:(NSURL *)imageURL { [self yy_setImageWithURL:imageURL placeholder:nil options:kNilOptions manager:nil progress:nil transform:nil completion:nil]; } - (void)yy_setImageWithURL:(NSURL *)imageURL placeholder:(UIImage *)placeholder { [self yy_setImageWithURL:imageURL placeholder:placeholder options:kNilOptions manager:nil progress:nil transform:nil completion:nil]; } - (void)yy_setImageWithURL:(NSURL *)imageURL options:(YYWebImageOptions)options { [self yy_setImageWithURL:imageURL placeholder:nil options:options manager:nil progress:nil transform:nil completion:nil]; } - (void)yy_setImageWithURL:(NSURL *)imageURL placeholder:(UIImage *)placeholder options:(YYWebImageOptions)options completion:(YYWebImageCompletionBlock)completion { [self yy_setImageWithURL:imageURL placeholder:placeholder options:options manager:nil progress:nil transform:nil completion:completion]; } - (void)yy_setImageWithURL:(NSURL *)imageURL placeholder:(UIImage *)placeholder options:(YYWebImageOptions)options progress:(YYWebImageProgressBlock)progress transform:(YYWebImageTransformBlock)transform completion:(YYWebImageCompletionBlock)completion { [self yy_setImageWithURL:imageURL placeholder:placeholder options:options manager:nil progress:progress transform:transform completion:completion]; } - (void)yy_setImageWithURL:(NSURL *)imageURL placeholder:(UIImage *)placeholder options:(YYWebImageOptions)options manager:(YYWebImageManager *)manager progress:(YYWebImageProgressBlock)progress transform:(YYWebImageTransformBlock)transform completion:(YYWebImageCompletionBlock)completion { if ([imageURL isKindOfClass:[NSString class]]) imageURL = [NSURL URLWithString:(id)imageURL]; manager = manager ? manager : [YYWebImageManager sharedManager]; _YYWebImageSetter *setter = objc_getAssociatedObject(self, &_YYWebImageSetterKey); if (!setter) { setter = [_YYWebImageSetter new]; objc_setAssociatedObject(self, &_YYWebImageSetterKey, setter, OBJC_ASSOCIATION_RETAIN_NONATOMIC); } int32_t sentinel = [setter cancelWithNewURL:imageURL]; _yy_dispatch_sync_on_main_queue(^{ if ((options & YYWebImageOptionSetImageWithFadeAnimation) && !(options & YYWebImageOptionAvoidSetImage)) { if (!self.highlighted) { [self.layer removeAnimationForKey:_YYWebImageFadeAnimationKey]; } } if (!imageURL) { if (!(options & YYWebImageOptionIgnorePlaceHolder)) { self.image = placeholder; } return; } // get the image from memory as quickly as possible UIImage *imageFromMemory = nil; if (manager.cache && !(options & YYWebImageOptionUseNSURLCache) && !(options & YYWebImageOptionRefreshImageCache)) { imageFromMemory = [manager.cache getImageForKey:[manager cacheKeyForURL:imageURL] withType:YYImageCacheTypeMemory]; } if (imageFromMemory) { if (!(options & YYWebImageOptionAvoidSetImage)) { self.image = imageFromMemory; } if(completion) completion(imageFromMemory, imageURL, YYWebImageFromMemoryCacheFast, YYWebImageStageFinished, nil); return; } if (!(options & YYWebImageOptionIgnorePlaceHolder)) { self.image = placeholder; } __weak typeof(self) _self = self; dispatch_async([_YYWebImageSetter setterQueue], ^{ YYWebImageProgressBlock _progress = nil; if (progress) _progress = ^(NSInteger receivedSize, NSInteger expectedSize) { dispatch_async(dispatch_get_main_queue(), ^{ progress(receivedSize, expectedSize); }); }; __block int32_t newSentinel = 0; __block __weak typeof(setter) weakSetter = nil; YYWebImageCompletionBlock _completion = ^(UIImage *image, NSURL *url, YYWebImageFromType from, YYWebImageStage stage, NSError *error) { __strong typeof(_self) self = _self; BOOL setImage = (stage == YYWebImageStageFinished || stage == YYWebImageStageProgress) && image && !(options & YYWebImageOptionAvoidSetImage); BOOL showFade = ((options & YYWebImageOptionSetImageWithFadeAnimation) && !self.highlighted); dispatch_async(dispatch_get_main_queue(), ^{ BOOL sentinelChanged = weakSetter && weakSetter.sentinel != newSentinel; if (setImage && self && !sentinelChanged) { if (showFade) { CATransition *transition = [CATransition animation]; transition.duration = stage == YYWebImageStageFinished ? _YYWebImageFadeTime : _YYWebImageProgressiveFadeTime; transition.timingFunction = [CAMediaTimingFunction functionWithName:kCAMediaTimingFunctionEaseInEaseOut]; transition.type = kCATransitionFade; [self.layer addAnimation:transition forKey:_YYWebImageFadeAnimationKey]; } self.image = image; } if (completion) { if (sentinelChanged) { completion(nil, url, YYWebImageFromNone, YYWebImageStageCancelled, nil); } else { completion(image, url, from, stage, error); } } }); }; newSentinel = [setter setOperationWithSentinel:sentinel url:imageURL options:options manager:manager progress:_progress transform:transform completion:_completion]; weakSetter = setter; }); }); } - (void)yy_cancelCurrentImageRequest { _YYWebImageSetter *setter = objc_getAssociatedObject(self, &_YYWebImageSetterKey); if (setter) [setter cancel]; } @end ================================================ FILE: MFPictureBrowserDemo/Pods/YYWebImage/YYWebImage/Categories/UIButton+YYWebImage.h ================================================ // // UIButton+YYWebImage.h // YYWebImage // // Created by ibireme on 15/2/23. // Copyright (c) 2015 ibireme. // // This source code is licensed under the MIT-style license found in the // LICENSE file in the root directory of this source tree. // #import #if __has_include() #import #else #import "YYWebImageManager.h" #endif NS_ASSUME_NONNULL_BEGIN /** Web image methods for UIButton. */ @interface UIButton (YYWebImage) #pragma mark - image /** Current image URL for the specified state. @return The image URL, or nil. */ - (nullable NSURL *)yy_imageURLForState:(UIControlState)state; /** Set the button's image with a specified URL for the specified state. @param imageURL The image url (remote or local file path). @param state The state that uses the specified image. @param placeholder The image to be set initially, until the image request finishes. */ - (void)yy_setImageWithURL:(nullable NSURL *)imageURL forState:(UIControlState)state placeholder:(nullable UIImage *)placeholder; /** Set the button's image with a specified URL for the specified state. @param imageURL The image url (remote or local file path). @param state The state that uses the specified image. @param options The options to use when request the image. */ - (void)yy_setImageWithURL:(nullable NSURL *)imageURL forState:(UIControlState)state options:(YYWebImageOptions)options; /** Set the button's image with a specified URL for the specified state. @param imageURL The image url (remote or local file path). @param state The state that uses the specified image. @param placeholder The image to be set initially, until the image request finishes. @param options The options to use when request the image. @param completion The block invoked (on main thread) when image request completed. */ - (void)yy_setImageWithURL:(nullable NSURL *)imageURL forState:(UIControlState)state placeholder:(nullable UIImage *)placeholder options:(YYWebImageOptions)options completion:(nullable YYWebImageCompletionBlock)completion; /** Set the button's image with a specified URL for the specified state. @param imageURL The image url (remote or local file path). @param state The state that uses the specified image. @param placeholder The image to be set initially, until the image request finishes. @param options The options to use when request the image. @param progress The block invoked (on main thread) during image request. @param transform The block invoked (on background thread) to do additional image process. @param completion The block invoked (on main thread) when image request completed. */ - (void)yy_setImageWithURL:(nullable NSURL *)imageURL forState:(UIControlState)state placeholder:(nullable UIImage *)placeholder options:(YYWebImageOptions)options progress:(nullable YYWebImageProgressBlock)progress transform:(nullable YYWebImageTransformBlock)transform completion:(nullable YYWebImageCompletionBlock)completion; /** Set the button's image with a specified URL for the specified state. @param imageURL The image url (remote or local file path). @param state The state that uses the specified image. @param placeholder The image to be set initially, until the image request finishes. @param options The options to use when request the image. @param manager The manager to create image request operation. @param progress The block invoked (on main thread) during image request. @param transform The block invoked (on background thread) to do additional image process. @param completion The block invoked (on main thread) when image request completed. */ - (void)yy_setImageWithURL:(nullable NSURL *)imageURL forState:(UIControlState)state placeholder:(nullable UIImage *)placeholder options:(YYWebImageOptions)options manager:(nullable YYWebImageManager *)manager progress:(nullable YYWebImageProgressBlock)progress transform:(nullable YYWebImageTransformBlock)transform completion:(nullable YYWebImageCompletionBlock)completion; /** Cancel the current image request for a specified state. @param state The state that uses the specified image. */ - (void)yy_cancelImageRequestForState:(UIControlState)state; #pragma mark - background image /** Current backgroundImage URL for the specified state. @return The image URL, or nil. */ - (nullable NSURL *)yy_backgroundImageURLForState:(UIControlState)state; /** Set the button's backgroundImage with a specified URL for the specified state. @param imageURL The image url (remote or local file path). @param state The state that uses the specified image. @param placeholder The image to be set initially, until the image request finishes. */ - (void)yy_setBackgroundImageWithURL:(nullable NSURL *)imageURL forState:(UIControlState)state placeholder:(nullable UIImage *)placeholder; /** Set the button's backgroundImage with a specified URL for the specified state. @param imageURL The image url (remote or local file path). @param state The state that uses the specified image. @param options The options to use when request the image. */ - (void)yy_setBackgroundImageWithURL:(nullable NSURL *)imageURL forState:(UIControlState)state options:(YYWebImageOptions)options; /** Set the button's backgroundImage with a specified URL for the specified state. @param imageURL The image url (remote or local file path). @param state The state that uses the specified image. @param placeholder The image to be set initially, until the image request finishes. @param options The options to use when request the image. @param completion The block invoked (on main thread) when image request completed. */ - (void)yy_setBackgroundImageWithURL:(nullable NSURL *)imageURL forState:(UIControlState)state placeholder:(nullable UIImage *)placeholder options:(YYWebImageOptions)options completion:(nullable YYWebImageCompletionBlock)completion; /** Set the button's backgroundImage with a specified URL for the specified state. @param imageURL The image url (remote or local file path). @param state The state that uses the specified image. @param placeholder The image to be set initially, until the image request finishes. @param options The options to use when request the image. @param progress The block invoked (on main thread) during image request. @param transform The block invoked (on background thread) to do additional image process. @param completion The block invoked (on main thread) when image request completed. */ - (void)yy_setBackgroundImageWithURL:(nullable NSURL *)imageURL forState:(UIControlState)state placeholder:(nullable UIImage *)placeholder options:(YYWebImageOptions)options progress:(nullable YYWebImageProgressBlock)progress transform:(nullable YYWebImageTransformBlock)transform completion:(nullable YYWebImageCompletionBlock)completion; /** Set the button's backgroundImage with a specified URL for the specified state. @param imageURL The image url (remote or local file path). @param state The state that uses the specified image. @param placeholder The image to be set initially, until the image request finishes. @param options The options to use when request the image. @param manager The manager to create image request operation. @param progress The block invoked (on main thread) during image request. @param transform The block invoked (on background thread) to do additional image process. @param completion The block invoked (on main thread) when image request completed. */ - (void)yy_setBackgroundImageWithURL:(nullable NSURL *)imageURL forState:(UIControlState)state placeholder:(nullable UIImage *)placeholder options:(YYWebImageOptions)options manager:(nullable YYWebImageManager *)manager progress:(nullable YYWebImageProgressBlock)progress transform:(nullable YYWebImageTransformBlock)transform completion:(nullable YYWebImageCompletionBlock)completion; /** Cancel the current backgroundImage request for a specified state. @param state The state that uses the specified image. */ - (void)yy_cancelBackgroundImageRequestForState:(UIControlState)state; @end NS_ASSUME_NONNULL_END ================================================ FILE: MFPictureBrowserDemo/Pods/YYWebImage/YYWebImage/Categories/UIButton+YYWebImage.m ================================================ // // UIButton+YYWebImage.m // YYWebImage // // Created by ibireme on 15/2/23. // Copyright (c) 2015 ibireme. // // This source code is licensed under the MIT-style license found in the // LICENSE file in the root directory of this source tree. // #import "UIButton+YYWebImage.h" #import "YYWebImageOperation.h" #import "_YYWebImageSetter.h" #import // Dummy class for category @interface UIButton_YYWebImage : NSObject @end @implementation UIButton_YYWebImage @end static inline NSNumber *UIControlStateSingle(UIControlState state) { if (state & UIControlStateHighlighted) return @(UIControlStateHighlighted); if (state & UIControlStateDisabled) return @(UIControlStateDisabled); if (state & UIControlStateSelected) return @(UIControlStateSelected); return @(UIControlStateNormal); } static inline NSArray *UIControlStateMulti(UIControlState state) { NSMutableArray *array = [NSMutableArray new]; if (state & UIControlStateHighlighted) [array addObject:@(UIControlStateHighlighted)]; if (state & UIControlStateDisabled) [array addObject:@(UIControlStateDisabled)]; if (state & UIControlStateSelected) [array addObject:@(UIControlStateSelected)]; if ((state & 0xFF) == 0) [array addObject:@(UIControlStateNormal)]; return array; } static int _YYWebImageSetterKey; static int _YYWebImageBackgroundSetterKey; @interface _YYWebImageSetterDicForButton : NSObject - (_YYWebImageSetter *)setterForState:(NSNumber *)state; - (_YYWebImageSetter *)lazySetterForState:(NSNumber *)state; @end @implementation _YYWebImageSetterDicForButton { NSMutableDictionary *_dic; dispatch_semaphore_t _lock; } - (instancetype)init { self = [super init]; _lock = dispatch_semaphore_create(1); _dic = [NSMutableDictionary new]; return self; } - (_YYWebImageSetter *)setterForState:(NSNumber *)state { dispatch_semaphore_wait(_lock, DISPATCH_TIME_FOREVER); _YYWebImageSetter *setter = _dic[state]; dispatch_semaphore_signal(_lock); return setter; } - (_YYWebImageSetter *)lazySetterForState:(NSNumber *)state { dispatch_semaphore_wait(_lock, DISPATCH_TIME_FOREVER); _YYWebImageSetter *setter = _dic[state]; if (!setter) { setter = [_YYWebImageSetter new]; _dic[state] = setter; } dispatch_semaphore_signal(_lock); return setter; } @end @implementation UIButton (YYWebImage) #pragma mark - image - (void)_yy_setImageWithURL:(NSURL *)imageURL forSingleState:(NSNumber *)state placeholder:(UIImage *)placeholder options:(YYWebImageOptions)options manager:(YYWebImageManager *)manager progress:(YYWebImageProgressBlock)progress transform:(YYWebImageTransformBlock)transform completion:(YYWebImageCompletionBlock)completion { if ([imageURL isKindOfClass:[NSString class]]) imageURL = [NSURL URLWithString:(id)imageURL]; manager = manager ? manager : [YYWebImageManager sharedManager]; _YYWebImageSetterDicForButton *dic = objc_getAssociatedObject(self, &_YYWebImageSetterKey); if (!dic) { dic = [_YYWebImageSetterDicForButton new]; objc_setAssociatedObject(self, &_YYWebImageSetterKey, dic, OBJC_ASSOCIATION_RETAIN_NONATOMIC); } _YYWebImageSetter *setter = [dic lazySetterForState:state]; int32_t sentinel = [setter cancelWithNewURL:imageURL]; _yy_dispatch_sync_on_main_queue(^{ if (!imageURL) { if (!(options & YYWebImageOptionIgnorePlaceHolder)) { [self setImage:placeholder forState:state.integerValue]; } return; } // get the image from memory as quickly as possible UIImage *imageFromMemory = nil; if (manager.cache && !(options & YYWebImageOptionUseNSURLCache) && !(options & YYWebImageOptionRefreshImageCache)) { imageFromMemory = [manager.cache getImageForKey:[manager cacheKeyForURL:imageURL] withType:YYImageCacheTypeMemory]; } if (imageFromMemory) { if (!(options & YYWebImageOptionAvoidSetImage)) { [self setImage:imageFromMemory forState:state.integerValue]; } if(completion) completion(imageFromMemory, imageURL, YYWebImageFromMemoryCacheFast, YYWebImageStageFinished, nil); return; } if (!(options & YYWebImageOptionIgnorePlaceHolder)) { [self setImage:placeholder forState:state.integerValue]; } __weak typeof(self) _self = self; dispatch_async([_YYWebImageSetter setterQueue], ^{ YYWebImageProgressBlock _progress = nil; if (progress) _progress = ^(NSInteger receivedSize, NSInteger expectedSize) { dispatch_async(dispatch_get_main_queue(), ^{ progress(receivedSize, expectedSize); }); }; __block int32_t newSentinel = 0; __block __weak typeof(setter) weakSetter = nil; YYWebImageCompletionBlock _completion = ^(UIImage *image, NSURL *url, YYWebImageFromType from, YYWebImageStage stage, NSError *error) { __strong typeof(_self) self = _self; BOOL setImage = (stage == YYWebImageStageFinished || stage == YYWebImageStageProgress) && image && !(options & YYWebImageOptionAvoidSetImage); dispatch_async(dispatch_get_main_queue(), ^{ BOOL sentinelChanged = weakSetter && weakSetter.sentinel != newSentinel; if (setImage && self && !sentinelChanged) { [self setImage:image forState:state.integerValue]; } if (completion) { if (sentinelChanged) { completion(nil, url, YYWebImageFromNone, YYWebImageStageCancelled, nil); } else { completion(image, url, from, stage, error); } } }); }; newSentinel = [setter setOperationWithSentinel:sentinel url:imageURL options:options manager:manager progress:_progress transform:transform completion:_completion]; weakSetter = setter; }); }); } - (void)_yy_cancelImageRequestForSingleState:(NSNumber *)state { _YYWebImageSetterDicForButton *dic = objc_getAssociatedObject(self, &_YYWebImageSetterKey); _YYWebImageSetter *setter = [dic setterForState:state]; if (setter) [setter cancel]; } - (NSURL *)yy_imageURLForState:(UIControlState)state { _YYWebImageSetterDicForButton *dic = objc_getAssociatedObject(self, &_YYWebImageSetterKey); _YYWebImageSetter *setter = [dic setterForState:UIControlStateSingle(state)]; return setter.imageURL; } - (void)yy_setImageWithURL:(NSURL *)imageURL forState:(UIControlState)state placeholder:(UIImage *)placeholder { [self yy_setImageWithURL:imageURL forState:state placeholder:placeholder options:kNilOptions manager:nil progress:nil transform:nil completion:nil]; } - (void)yy_setImageWithURL:(NSURL *)imageURL forState:(UIControlState)state options:(YYWebImageOptions)options { [self yy_setImageWithURL:imageURL forState:state placeholder:nil options:options manager:nil progress:nil transform:nil completion:nil]; } - (void)yy_setImageWithURL:(NSURL *)imageURL forState:(UIControlState)state placeholder:(UIImage *)placeholder options:(YYWebImageOptions)options completion:(YYWebImageCompletionBlock)completion { [self yy_setImageWithURL:imageURL forState:state placeholder:placeholder options:options manager:nil progress:nil transform:nil completion:completion]; } - (void)yy_setImageWithURL:(NSURL *)imageURL forState:(UIControlState)state placeholder:(UIImage *)placeholder options:(YYWebImageOptions)options progress:(YYWebImageProgressBlock)progress transform:(YYWebImageTransformBlock)transform completion:(YYWebImageCompletionBlock)completion { [self yy_setImageWithURL:imageURL forState:state placeholder:placeholder options:options manager:nil progress:progress transform:transform completion:completion]; } - (void)yy_setImageWithURL:(NSURL *)imageURL forState:(UIControlState)state placeholder:(UIImage *)placeholder options:(YYWebImageOptions)options manager:(YYWebImageManager *)manager progress:(YYWebImageProgressBlock)progress transform:(YYWebImageTransformBlock)transform completion:(YYWebImageCompletionBlock)completion { for (NSNumber *num in UIControlStateMulti(state)) { [self _yy_setImageWithURL:imageURL forSingleState:num placeholder:placeholder options:options manager:manager progress:progress transform:transform completion:completion]; } } - (void)yy_cancelImageRequestForState:(UIControlState)state { for (NSNumber *num in UIControlStateMulti(state)) { [self _yy_cancelImageRequestForSingleState:num]; } } #pragma mark - background image - (void)_yy_setBackgroundImageWithURL:(NSURL *)imageURL forSingleState:(NSNumber *)state placeholder:(UIImage *)placeholder options:(YYWebImageOptions)options manager:(YYWebImageManager *)manager progress:(YYWebImageProgressBlock)progress transform:(YYWebImageTransformBlock)transform completion:(YYWebImageCompletionBlock)completion { if ([imageURL isKindOfClass:[NSString class]]) imageURL = [NSURL URLWithString:(id)imageURL]; manager = manager ? manager : [YYWebImageManager sharedManager]; _YYWebImageSetterDicForButton *dic = objc_getAssociatedObject(self, &_YYWebImageBackgroundSetterKey); if (!dic) { dic = [_YYWebImageSetterDicForButton new]; objc_setAssociatedObject(self, &_YYWebImageBackgroundSetterKey, dic, OBJC_ASSOCIATION_RETAIN_NONATOMIC); } _YYWebImageSetter *setter = [dic lazySetterForState:state]; int32_t sentinel = [setter cancelWithNewURL:imageURL]; _yy_dispatch_sync_on_main_queue(^{ if (!imageURL) { if (!(options & YYWebImageOptionIgnorePlaceHolder)) { [self setBackgroundImage:placeholder forState:state.integerValue]; } return; } // get the image from memory as quickly as possible UIImage *imageFromMemory = nil; if (manager.cache && !(options & YYWebImageOptionUseNSURLCache) && !(options & YYWebImageOptionRefreshImageCache)) { imageFromMemory = [manager.cache getImageForKey:[manager cacheKeyForURL:imageURL] withType:YYImageCacheTypeMemory]; } if (imageFromMemory) { if (!(options & YYWebImageOptionAvoidSetImage)) { [self setBackgroundImage:imageFromMemory forState:state.integerValue]; } if(completion) completion(imageFromMemory, imageURL, YYWebImageFromMemoryCacheFast, YYWebImageStageFinished, nil); return; } if (!(options & YYWebImageOptionIgnorePlaceHolder)) { [self setBackgroundImage:placeholder forState:state.integerValue]; } __weak typeof(self) _self = self; dispatch_async([_YYWebImageSetter setterQueue], ^{ YYWebImageProgressBlock _progress = nil; if (progress) _progress = ^(NSInteger receivedSize, NSInteger expectedSize) { dispatch_async(dispatch_get_main_queue(), ^{ progress(receivedSize, expectedSize); }); }; __block int32_t newSentinel = 0; __block __weak typeof(setter) weakSetter = nil; YYWebImageCompletionBlock _completion = ^(UIImage *image, NSURL *url, YYWebImageFromType from, YYWebImageStage stage, NSError *error) { __strong typeof(_self) self = _self; BOOL setImage = (stage == YYWebImageStageFinished || stage == YYWebImageStageProgress) && image && !(options & YYWebImageOptionAvoidSetImage); dispatch_async(dispatch_get_main_queue(), ^{ BOOL sentinelChanged = weakSetter && weakSetter.sentinel != newSentinel; if (setImage && self && !sentinelChanged) { [self setBackgroundImage:image forState:state.integerValue]; } if (completion) { if (sentinelChanged) { completion(nil, url, YYWebImageFromNone, YYWebImageStageCancelled, nil); } else { completion(image, url, from, stage, error); } } }); }; newSentinel = [setter setOperationWithSentinel:sentinel url:imageURL options:options manager:manager progress:_progress transform:transform completion:_completion]; weakSetter = setter; }); }); } - (void)_yy_cancelBackgroundImageRequestForSingleState:(NSNumber *)state { _YYWebImageSetterDicForButton *dic = objc_getAssociatedObject(self, &_YYWebImageBackgroundSetterKey); _YYWebImageSetter *setter = [dic setterForState:state]; if (setter) [setter cancel]; } - (NSURL *)yy_backgroundImageURLForState:(UIControlState)state { _YYWebImageSetterDicForButton *dic = objc_getAssociatedObject(self, &_YYWebImageBackgroundSetterKey); _YYWebImageSetter *setter = [dic setterForState:UIControlStateSingle(state)]; return setter.imageURL; } - (void)yy_setBackgroundImageWithURL:(NSURL *)imageURL forState:(UIControlState)state placeholder:(UIImage *)placeholder { [self yy_setBackgroundImageWithURL:imageURL forState:state placeholder:placeholder options:kNilOptions manager:nil progress:nil transform:nil completion:nil]; } - (void)yy_setBackgroundImageWithURL:(NSURL *)imageURL forState:(UIControlState)state options:(YYWebImageOptions)options { [self yy_setBackgroundImageWithURL:imageURL forState:state placeholder:nil options:options manager:nil progress:nil transform:nil completion:nil]; } - (void)yy_setBackgroundImageWithURL:(NSURL *)imageURL forState:(UIControlState)state placeholder:(UIImage *)placeholder options:(YYWebImageOptions)options completion:(YYWebImageCompletionBlock)completion { [self yy_setBackgroundImageWithURL:imageURL forState:state placeholder:placeholder options:options manager:nil progress:nil transform:nil completion:completion]; } - (void)yy_setBackgroundImageWithURL:(NSURL *)imageURL forState:(UIControlState)state placeholder:(UIImage *)placeholder options:(YYWebImageOptions)options progress:(YYWebImageProgressBlock)progress transform:(YYWebImageTransformBlock)transform completion:(YYWebImageCompletionBlock)completion { [self yy_setBackgroundImageWithURL:imageURL forState:state placeholder:placeholder options:options manager:nil progress:progress transform:transform completion:completion]; } - (void)yy_setBackgroundImageWithURL:(NSURL *)imageURL forState:(UIControlState)state placeholder:(UIImage *)placeholder options:(YYWebImageOptions)options manager:(YYWebImageManager *)manager progress:(YYWebImageProgressBlock)progress transform:(YYWebImageTransformBlock)transform completion:(YYWebImageCompletionBlock)completion { for (NSNumber *num in UIControlStateMulti(state)) { [self _yy_setBackgroundImageWithURL:imageURL forSingleState:num placeholder:placeholder options:options manager:manager progress:progress transform:transform completion:completion]; } } - (void)yy_cancelBackgroundImageRequestForState:(UIControlState)state { for (NSNumber *num in UIControlStateMulti(state)) { [self _yy_cancelBackgroundImageRequestForSingleState:num]; } } @end ================================================ FILE: MFPictureBrowserDemo/Pods/YYWebImage/YYWebImage/Categories/UIImage+YYWebImage.h ================================================ // // UIImage+YYWebImage.h // YYWebImage // // Created by ibireme on 13/4/4. // Copyright (c) 2015 ibireme. // // This source code is licensed under the MIT-style license found in the // LICENSE file in the root directory of this source tree. // #import NS_ASSUME_NONNULL_BEGIN /** Provide some commen method for `UIImage`. Image process is based on CoreGraphic and vImage. */ @interface UIImage (YYWebImage) #pragma mark - Create image ///============================================================================= /// @name Create image ///============================================================================= /** Create an animated image with GIF data. After created, you can access the images via property '.images'. If the data is not animated gif, this function is same as [UIImage imageWithData:data scale:scale]; @discussion It has a better display performance, but costs more memory (width * height * frames Bytes). It only suited to display small gif such as animated emoticon. If you want to display large gif, see `YYImage`. @param data GIF data. @param scale The scale factor @return A new image created from GIF, or nil when an error occurs. */ + (nullable UIImage *)yy_imageWithSmallGIFData:(NSData *)data scale:(CGFloat)scale; /** Create and return a 1x1 point size image with the given color. @param color The color. */ + (nullable UIImage *)yy_imageWithColor:(UIColor *)color; /** Create and return a pure color image with the given color and size. @param color The color. @param size New image's type. */ + (nullable UIImage *)yy_imageWithColor:(UIColor *)color size:(CGSize)size; /** Create and return an image with custom draw code. @param size The image size. @param drawBlock The draw block. @return The new image. */ + (nullable UIImage *)yy_imageWithSize:(CGSize)size drawBlock:(void (^)(CGContextRef context))drawBlock; #pragma mark - Image Info ///============================================================================= /// @name Image Info ///============================================================================= /** Whether this image has alpha channel. */ - (BOOL)yy_hasAlphaChannel; #pragma mark - Modify Image ///============================================================================= /// @name Modify Image ///============================================================================= /** Draws the entire image in the specified rectangle, content changed with the contentMode. @discussion This method draws the entire image in the current graphics context, respecting the image's orientation setting. In the default coordinate system, images are situated down and to the right of the origin of the specified rectangle. This method respects any transforms applied to the current graphics context, however. @param rect The rectangle in which to draw the image. @param contentMode Draw content mode @param clips A Boolean value that determines whether content are confined to the rect. */ - (void)yy_drawInRect:(CGRect)rect withContentMode:(UIViewContentMode)contentMode clipsToBounds:(BOOL)clips; /** Returns a new image which is scaled from this image. The image will be stretched as needed. @param size The new size to be scaled, values should be positive. @return The new image with the given size. */ - (nullable UIImage *)yy_imageByResizeToSize:(CGSize)size; /** Returns a new image which is scaled from this image. The image content will be changed with thencontentMode. @param size The new size to be scaled, values should be positive. @param contentMode The content mode for image content. @return The new image with the given size. */ - (nullable UIImage *)yy_imageByResizeToSize:(CGSize)size contentMode:(UIViewContentMode)contentMode; /** Returns a new image which is cropped from this image. @param rect Image's inner rect. @return The new image, or nil if an error occurs. */ - (nullable UIImage *)yy_imageByCropToRect:(CGRect)rect; /** Returns a new image which is edge inset from this image. @param insets Inset (positive) for each of the edges, values can be negative to 'outset'. @param color Extend edge's fill color, nil means clear color. @return The new image, or nil if an error occurs. */ - (nullable UIImage *)yy_imageByInsetEdge:(UIEdgeInsets)insets withColor:(nullable UIColor *)color; /** Rounds a new image with a given corner size. @param radius The radius of each corner oval. Values larger than half the rectangle's width or height are clamped appropriately to half the width or height. */ - (nullable UIImage *)yy_imageByRoundCornerRadius:(CGFloat)radius; /** Rounds a new image with a given corner size. @param radius The radius of each corner oval. Values larger than half the rectangle's width or height are clamped appropriately to half the width or height. @param borderWidth The inset border line width. Values larger than half the rectangle's width or height are clamped appropriately to half the width or height. @param borderColor The border stroke color. nil means clear color. */ - (nullable UIImage *)yy_imageByRoundCornerRadius:(CGFloat)radius borderWidth:(CGFloat)borderWidth borderColor:(nullable UIColor *)borderColor; /** Rounds a new image with a given corner size. @param radius The radius of each corner oval. Values larger than half the rectangle's width or height are clamped appropriately to half the width or height. @param corners A bitmask value that identifies the corners that you want rounded. You can use this parameter to round only a subset of the corners of the rectangle. @param borderWidth The inset border line width. Values larger than half the rectangle's width or height are clamped appropriately to half the width or height. @param borderColor The border stroke color. nil means clear color. @param borderLineJoin The border line join. */ - (nullable UIImage *)yy_imageByRoundCornerRadius:(CGFloat)radius corners:(UIRectCorner)corners borderWidth:(CGFloat)borderWidth borderColor:(nullable UIColor *)borderColor borderLineJoin:(CGLineJoin)borderLineJoin; /** Returns a new rotated image (relative to the center). @param radians Rotated radians in counterclockwise.⟲ @param fitSize YES: new image's size is extend to fit all content. NO: image's size will not change, content may be clipped. */ - (nullable UIImage *)yy_imageByRotate:(CGFloat)radians fitSize:(BOOL)fitSize; /** Returns a new image rotated counterclockwise by a quarter‑turn (90°). ⤺ The width and height will be exchanged. */ - (nullable UIImage *)yy_imageByRotateLeft90; /** Returns a new image rotated clockwise by a quarter‑turn (90°). ⤼ The width and height will be exchanged. */ - (nullable UIImage *)yy_imageByRotateRight90; /** Returns a new image rotated 180° . ↻ */ - (nullable UIImage *)yy_imageByRotate180; /** Returns a vertically flipped image. ⥯ */ - (nullable UIImage *)yy_imageByFlipVertical; /** Returns a horizontally flipped image. ⇋ */ - (nullable UIImage *)yy_imageByFlipHorizontal; #pragma mark - Image Effect ///============================================================================= /// @name Image Effect ///============================================================================= /** Tint the image in alpha channel with the given color. @param color The color. */ - (nullable UIImage *)yy_imageByTintColor:(UIColor *)color; /** Returns a grayscaled image. */ - (nullable UIImage *)yy_imageByGrayscale; /** Applies a blur effect to this image. Suitable for blur any content. */ - (nullable UIImage *)yy_imageByBlurSoft; /** Applies a blur effect to this image. Suitable for blur any content except pure white. (same as iOS Control Panel) */ - (nullable UIImage *)yy_imageByBlurLight; /** Applies a blur effect to this image. Suitable for displaying black text. (same as iOS Navigation Bar White) */ - (nullable UIImage *)yy_imageByBlurExtraLight; /** Applies a blur effect to this image. Suitable for displaying white text. (same as iOS Notification Center) */ - (nullable UIImage *)yy_imageByBlurDark; /** Applies a blur and tint color to this image. @param tintColor The tint color. */ - (nullable UIImage *)yy_imageByBlurWithTint:(UIColor *)tintColor; /** Applies a blur, tint color, and saturation adjustment to this image, optionally within the area specified by @a maskImage. @param blurRadius The radius of the blur in points, 0 means no blur effect. @param tintColor An optional UIColor object that is uniformly blended with the result of the blur and saturation operations. The alpha channel of this color determines how strong the tint is. nil means no tint. @param tintBlendMode The @a tintColor blend mode. Default is kCGBlendModeNormal (0). @param saturation A value of 1.0 produces no change in the resulting image. Values less than 1.0 will desaturation the resulting image while values greater than 1.0 will have the opposite effect. 0 means gray scale. @param maskImage If specified, @a inputImage is only modified in the area(s) defined by this mask. This must be an image mask or it must meet the requirements of the mask parameter of CGContextClipToMask. @return image with effect, or nil if an error occurs (e.g. no enough memory). */ - (nullable UIImage *)yy_imageByBlurRadius:(CGFloat)blurRadius tintColor:(nullable UIColor *)tintColor tintMode:(CGBlendMode)tintBlendMode saturation:(CGFloat)saturation maskImage:(nullable UIImage *)maskImage; @end NS_ASSUME_NONNULL_END ================================================ FILE: MFPictureBrowserDemo/Pods/YYWebImage/YYWebImage/Categories/UIImage+YYWebImage.m ================================================ // // UIImage+YYWebImage.m // YYWebImage // // Created by ibireme on 13/4/4. // Copyright (c) 2015 ibireme. // // This source code is licensed under the MIT-style license found in the // LICENSE file in the root directory of this source tree. // #import "UIImage+YYWebImage.h" #import #import #import // Dummy class for category @interface UIImage_YYWebImage : NSObject @end @implementation UIImage_YYWebImage @end /// Convert degrees to radians. static inline CGFloat _DegreesToRadians(CGFloat degrees) { return degrees * M_PI / 180; } /** Resize rect to fit the size using a given contentMode. @param rect The draw rect @param size The content size @param mode The content mode @return A resized rect for the given content mode. @discussion UIViewContentModeRedraw is same as UIViewContentModeScaleToFill. */ static CGRect _YYCGRectFitWithContentMode(CGRect rect, CGSize size, UIViewContentMode mode) { rect = CGRectStandardize(rect); size.width = size.width < 0 ? -size.width : size.width; size.height = size.height < 0 ? -size.height : size.height; CGPoint center = CGPointMake(CGRectGetMidX(rect), CGRectGetMidY(rect)); switch (mode) { case UIViewContentModeScaleAspectFit: case UIViewContentModeScaleAspectFill: { if (rect.size.width < 0.01 || rect.size.height < 0.01 || size.width < 0.01 || size.height < 0.01) { rect.origin = center; rect.size = CGSizeZero; } else { CGFloat scale; if (mode == UIViewContentModeScaleAspectFit) { if (size.width / size.height < rect.size.width / rect.size.height) { scale = rect.size.height / size.height; } else { scale = rect.size.width / size.width; } } else { if (size.width / size.height < rect.size.width / rect.size.height) { scale = rect.size.width / size.width; } else { scale = rect.size.height / size.height; } } size.width *= scale; size.height *= scale; rect.size = size; rect.origin = CGPointMake(center.x - size.width * 0.5, center.y - size.height * 0.5); } } break; case UIViewContentModeCenter: { rect.size = size; rect.origin = CGPointMake(center.x - size.width * 0.5, center.y - size.height * 0.5); } break; case UIViewContentModeTop: { rect.origin.x = center.x - size.width * 0.5; rect.size = size; } break; case UIViewContentModeBottom: { rect.origin.x = center.x - size.width * 0.5; rect.origin.y += rect.size.height - size.height; rect.size = size; } break; case UIViewContentModeLeft: { rect.origin.y = center.y - size.height * 0.5; rect.size = size; } break; case UIViewContentModeRight: { rect.origin.y = center.y - size.height * 0.5; rect.origin.x += rect.size.width - size.width; rect.size = size; } break; case UIViewContentModeTopLeft: { rect.size = size; } break; case UIViewContentModeTopRight: { rect.origin.x += rect.size.width - size.width; rect.size = size; } break; case UIViewContentModeBottomLeft: { rect.origin.y += rect.size.height - size.height; rect.size = size; } break; case UIViewContentModeBottomRight: { rect.origin.x += rect.size.width - size.width; rect.origin.y += rect.size.height - size.height; rect.size = size; } break; case UIViewContentModeScaleToFill: case UIViewContentModeRedraw: default: { rect = rect; } } return rect; } static NSTimeInterval _yy_CGImageSourceGetGIFFrameDelayAtIndex(CGImageSourceRef source, size_t index) { NSTimeInterval delay = 0; CFDictionaryRef dic = CGImageSourceCopyPropertiesAtIndex(source, index, NULL); if (dic) { CFDictionaryRef dicGIF = CFDictionaryGetValue(dic, kCGImagePropertyGIFDictionary); if (dicGIF) { NSNumber *num = CFDictionaryGetValue(dicGIF, kCGImagePropertyGIFUnclampedDelayTime); if (num.doubleValue <= __FLT_EPSILON__) { num = CFDictionaryGetValue(dicGIF, kCGImagePropertyGIFDelayTime); } delay = num.doubleValue; } CFRelease(dic); } // http://nullsleep.tumblr.com/post/16524517190/animated-gif-minimum-frame-delay-browser-compatibility if (delay < 0.02) delay = 0.1; return delay; } @implementation UIImage (YYWebImage) + (UIImage *)yy_imageWithSmallGIFData:(NSData *)data scale:(CGFloat)scale { CGImageSourceRef source = CGImageSourceCreateWithData((__bridge CFTypeRef)(data), NULL); if (!source) return nil; size_t count = CGImageSourceGetCount(source); if (count <= 1) { CFRelease(source); return [self.class imageWithData:data scale:scale]; } NSUInteger frames[count]; double oneFrameTime = 1 / 50.0; // 50 fps NSTimeInterval totalTime = 0; NSUInteger totalFrame = 0; NSUInteger gcdFrame = 0; for (size_t i = 0; i < count; i++) { NSTimeInterval delay = _yy_CGImageSourceGetGIFFrameDelayAtIndex(source, i); totalTime += delay; NSInteger frame = lrint(delay / oneFrameTime); if (frame < 1) frame = 1; frames[i] = frame; totalFrame += frames[i]; if (i == 0) gcdFrame = frames[i]; else { NSUInteger frame = frames[i], tmp; if (frame < gcdFrame) { tmp = frame; frame = gcdFrame; gcdFrame = tmp; } while (true) { tmp = frame % gcdFrame; if (tmp == 0) break; frame = gcdFrame; gcdFrame = tmp; } } } NSMutableArray *array = [NSMutableArray new]; for (size_t i = 0; i < count; i++) { CGImageRef imageRef = CGImageSourceCreateImageAtIndex(source, i, NULL); if (!imageRef) { CFRelease(source); return nil; } size_t width = CGImageGetWidth(imageRef); size_t height = CGImageGetHeight(imageRef); if (width == 0 || height == 0) { CFRelease(source); CFRelease(imageRef); return nil; } CGImageAlphaInfo alphaInfo = CGImageGetAlphaInfo(imageRef) & kCGBitmapAlphaInfoMask; BOOL hasAlpha = NO; if (alphaInfo == kCGImageAlphaPremultipliedLast || alphaInfo == kCGImageAlphaPremultipliedFirst || alphaInfo == kCGImageAlphaLast || alphaInfo == kCGImageAlphaFirst) { hasAlpha = YES; } // BGRA8888 (premultiplied) or BGRX8888 // same as UIGraphicsBeginImageContext() and -[UIView drawRect:] CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Host; bitmapInfo |= hasAlpha ? kCGImageAlphaPremultipliedFirst : kCGImageAlphaNoneSkipFirst; CGColorSpaceRef space = CGColorSpaceCreateDeviceRGB(); CGContextRef context = CGBitmapContextCreate(NULL, width, height, 8, 0, space, bitmapInfo); CGColorSpaceRelease(space); if (!context) { CFRelease(source); CFRelease(imageRef); return nil; } CGContextDrawImage(context, CGRectMake(0, 0, width, height), imageRef); // decode CGImageRef decoded = CGBitmapContextCreateImage(context); CFRelease(context); if (!decoded) { CFRelease(source); CFRelease(imageRef); return nil; } UIImage *image = [UIImage imageWithCGImage:decoded scale:scale orientation:UIImageOrientationUp]; CGImageRelease(imageRef); CGImageRelease(decoded); if (!image) { CFRelease(source); return nil; } for (size_t j = 0, max = frames[i] / gcdFrame; j < max; j++) { [array addObject:image]; } } CFRelease(source); UIImage *image = [self.class animatedImageWithImages:array duration:totalTime]; return image; } + (UIImage *)yy_imageWithColor:(UIColor *)color { return [self yy_imageWithColor:color size:CGSizeMake(1, 1)]; } + (UIImage *)yy_imageWithColor:(UIColor *)color size:(CGSize)size { if (!color || size.width <= 0 || size.height <= 0) return nil; CGRect rect = CGRectMake(0.0f, 0.0f, size.width, size.height); UIGraphicsBeginImageContextWithOptions(rect.size, NO, 0); CGContextRef context = UIGraphicsGetCurrentContext(); CGContextSetFillColorWithColor(context, color.CGColor); CGContextFillRect(context, rect); UIImage *image = UIGraphicsGetImageFromCurrentImageContext(); UIGraphicsEndImageContext(); return image; } + (UIImage *)yy_imageWithSize:(CGSize)size drawBlock:(void (^)(CGContextRef context))drawBlock { if (!drawBlock) return nil; UIGraphicsBeginImageContextWithOptions(size, NO, 0); CGContextRef context = UIGraphicsGetCurrentContext(); if (!context) return nil; drawBlock(context); UIImage *image = UIGraphicsGetImageFromCurrentImageContext(); UIGraphicsEndImageContext(); return image; } - (BOOL)yy_hasAlphaChannel { if (self.CGImage == NULL) return NO; CGImageAlphaInfo alpha = CGImageGetAlphaInfo(self.CGImage) & kCGBitmapAlphaInfoMask; return (alpha == kCGImageAlphaFirst || alpha == kCGImageAlphaLast || alpha == kCGImageAlphaPremultipliedFirst || alpha == kCGImageAlphaPremultipliedLast); } - (void)yy_drawInRect:(CGRect)rect withContentMode:(UIViewContentMode)contentMode clipsToBounds:(BOOL)clips{ CGRect drawRect = _YYCGRectFitWithContentMode(rect, self.size, contentMode); if (drawRect.size.width == 0 || drawRect.size.height == 0) return; if (clips) { CGContextRef context = UIGraphicsGetCurrentContext(); if (context) { CGContextSaveGState(context); CGContextAddRect(context, rect); CGContextClip(context); [self drawInRect:drawRect]; CGContextRestoreGState(context); } } else { [self drawInRect:drawRect]; } } - (UIImage *)yy_imageByResizeToSize:(CGSize)size { if (size.width <= 0 || size.height <= 0) return nil; UIGraphicsBeginImageContextWithOptions(size, NO, self.scale); [self drawInRect:CGRectMake(0, 0, size.width, size.height)]; UIImage *image = UIGraphicsGetImageFromCurrentImageContext(); UIGraphicsEndImageContext(); return image; } - (UIImage *)yy_imageByResizeToSize:(CGSize)size contentMode:(UIViewContentMode)contentMode { if (size.width <= 0 || size.height <= 0) return nil; UIGraphicsBeginImageContextWithOptions(size, NO, self.scale); [self yy_drawInRect:CGRectMake(0, 0, size.width, size.height) withContentMode:contentMode clipsToBounds:NO]; UIImage *image = UIGraphicsGetImageFromCurrentImageContext(); UIGraphicsEndImageContext(); return image; } - (UIImage *)yy_imageByCropToRect:(CGRect)rect { rect.origin.x *= self.scale; rect.origin.y *= self.scale; rect.size.width *= self.scale; rect.size.height *= self.scale; if (rect.size.width <= 0 || rect.size.height <= 0) return nil; CGImageRef imageRef = CGImageCreateWithImageInRect(self.CGImage, rect); UIImage *image = [UIImage imageWithCGImage:imageRef scale:self.scale orientation:self.imageOrientation]; CGImageRelease(imageRef); return image; } - (UIImage *)yy_imageByInsetEdge:(UIEdgeInsets)insets withColor:(UIColor *)color { CGSize size = self.size; size.width -= insets.left + insets.right; size.height -= insets.top + insets.bottom; if (size.width <= 0 || size.height <= 0) return nil; CGRect rect = CGRectMake(-insets.left, -insets.top, self.size.width, self.size.height); UIGraphicsBeginImageContextWithOptions(size, NO, self.scale); CGContextRef context = UIGraphicsGetCurrentContext(); if (color) { CGContextSetFillColorWithColor(context, color.CGColor); CGMutablePathRef path = CGPathCreateMutable(); CGPathAddRect(path, NULL, CGRectMake(0, 0, size.width, size.height)); CGPathAddRect(path, NULL, rect); CGContextAddPath(context, path); CGContextEOFillPath(context); CGPathRelease(path); } [self drawInRect:rect]; UIImage *image = UIGraphicsGetImageFromCurrentImageContext(); UIGraphicsEndImageContext(); return image; } - (UIImage *)yy_imageByRoundCornerRadius:(CGFloat)radius { return [self yy_imageByRoundCornerRadius:radius borderWidth:0 borderColor:nil]; } - (UIImage *)yy_imageByRoundCornerRadius:(CGFloat)radius borderWidth:(CGFloat)borderWidth borderColor:(UIColor *)borderColor { return [self yy_imageByRoundCornerRadius:radius corners:UIRectCornerAllCorners borderWidth:borderWidth borderColor:borderColor borderLineJoin:kCGLineJoinMiter]; } - (UIImage *)yy_imageByRoundCornerRadius:(CGFloat)radius corners:(UIRectCorner)corners borderWidth:(CGFloat)borderWidth borderColor:(UIColor *)borderColor borderLineJoin:(CGLineJoin)borderLineJoin { if (corners != UIRectCornerAllCorners) { UIRectCorner tmp = 0; if (corners & UIRectCornerTopLeft) tmp |= UIRectCornerBottomLeft; if (corners & UIRectCornerTopRight) tmp |= UIRectCornerBottomRight; if (corners & UIRectCornerBottomLeft) tmp |= UIRectCornerTopLeft; if (corners & UIRectCornerBottomRight) tmp |= UIRectCornerTopRight; corners = tmp; } UIGraphicsBeginImageContextWithOptions(self.size, NO, self.scale); CGContextRef context = UIGraphicsGetCurrentContext(); CGRect rect = CGRectMake(0, 0, self.size.width, self.size.height); CGContextScaleCTM(context, 1, -1); CGContextTranslateCTM(context, 0, -rect.size.height); CGFloat minSize = MIN(self.size.width, self.size.height); if (borderWidth < minSize / 2) { UIBezierPath *path = [UIBezierPath bezierPathWithRoundedRect:CGRectInset(rect, borderWidth, borderWidth) byRoundingCorners:corners cornerRadii:CGSizeMake(radius, borderWidth)]; [path closePath]; CGContextSaveGState(context); [path addClip]; CGContextDrawImage(context, rect, self.CGImage); CGContextRestoreGState(context); } if (borderColor && borderWidth < minSize / 2 && borderWidth > 0) { CGFloat strokeInset = (floor(borderWidth * self.scale) + 0.5) / self.scale; CGRect strokeRect = CGRectInset(rect, strokeInset, strokeInset); CGFloat strokeRadius = radius > self.scale / 2 ? radius - self.scale / 2 : 0; UIBezierPath *path = [UIBezierPath bezierPathWithRoundedRect:strokeRect byRoundingCorners:corners cornerRadii:CGSizeMake(strokeRadius, borderWidth)]; [path closePath]; path.lineWidth = borderWidth; path.lineJoinStyle = borderLineJoin; [borderColor setStroke]; [path stroke]; } UIImage *image = UIGraphicsGetImageFromCurrentImageContext(); UIGraphicsEndImageContext(); return image; } - (UIImage *)yy_imageByRotate:(CGFloat)radians fitSize:(BOOL)fitSize { size_t width = (size_t)CGImageGetWidth(self.CGImage); size_t height = (size_t)CGImageGetHeight(self.CGImage); CGRect newRect = CGRectApplyAffineTransform(CGRectMake(0., 0., width, height), fitSize ? CGAffineTransformMakeRotation(radians) : CGAffineTransformIdentity); CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); CGContextRef context = CGBitmapContextCreate(NULL, (size_t)newRect.size.width, (size_t)newRect.size.height, 8, (size_t)newRect.size.width * 4, colorSpace, kCGBitmapByteOrderDefault | kCGImageAlphaPremultipliedFirst); CGColorSpaceRelease(colorSpace); if (!context) return nil; CGContextSetShouldAntialias(context, true); CGContextSetAllowsAntialiasing(context, true); CGContextSetInterpolationQuality(context, kCGInterpolationHigh); CGContextTranslateCTM(context, +(newRect.size.width * 0.5), +(newRect.size.height * 0.5)); CGContextRotateCTM(context, radians); CGContextDrawImage(context, CGRectMake(-(width * 0.5), -(height * 0.5), width, height), self.CGImage); CGImageRef imgRef = CGBitmapContextCreateImage(context); UIImage *img = [UIImage imageWithCGImage:imgRef scale:self.scale orientation:self.imageOrientation]; CGImageRelease(imgRef); CGContextRelease(context); return img; } - (UIImage *)_yy_flipHorizontal:(BOOL)horizontal vertical:(BOOL)vertical { if (!self.CGImage) return nil; size_t width = (size_t)CGImageGetWidth(self.CGImage); size_t height = (size_t)CGImageGetHeight(self.CGImage); size_t bytesPerRow = width * 4; CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); CGContextRef context = CGBitmapContextCreate(NULL, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrderDefault | kCGImageAlphaPremultipliedFirst); CGColorSpaceRelease(colorSpace); if (!context) return nil; CGContextDrawImage(context, CGRectMake(0, 0, width, height), self.CGImage); UInt8 *data = (UInt8 *)CGBitmapContextGetData(context); if (!data) { CGContextRelease(context); return nil; } vImage_Buffer src = { data, height, width, bytesPerRow }; vImage_Buffer dest = { data, height, width, bytesPerRow }; if (vertical) { vImageVerticalReflect_ARGB8888(&src, &dest, kvImageBackgroundColorFill); } if (horizontal) { vImageHorizontalReflect_ARGB8888(&src, &dest, kvImageBackgroundColorFill); } CGImageRef imgRef = CGBitmapContextCreateImage(context); CGContextRelease(context); UIImage *img = [UIImage imageWithCGImage:imgRef scale:self.scale orientation:self.imageOrientation]; CGImageRelease(imgRef); return img; } - (UIImage *)yy_imageByRotateLeft90 { return [self yy_imageByRotate:_DegreesToRadians(90) fitSize:YES]; } - (UIImage *)yy_imageByRotateRight90 { return [self yy_imageByRotate:_DegreesToRadians(-90) fitSize:YES]; } - (UIImage *)yy_imageByRotate180 { return [self _yy_flipHorizontal:YES vertical:YES]; } - (UIImage *)yy_imageByFlipVertical { return [self _yy_flipHorizontal:NO vertical:YES]; } - (UIImage *)yy_imageByFlipHorizontal { return [self _yy_flipHorizontal:YES vertical:NO]; } - (UIImage *)yy_imageByTintColor:(UIColor *)color { UIGraphicsBeginImageContextWithOptions(self.size, NO, self.scale); CGRect rect = CGRectMake(0, 0, self.size.width, self.size.height); [color set]; UIRectFill(rect); [self drawAtPoint:CGPointMake(0, 0) blendMode:kCGBlendModeDestinationIn alpha:1]; UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext(); UIGraphicsEndImageContext(); return newImage; } - (UIImage *)yy_imageByGrayscale { return [self yy_imageByBlurRadius:0 tintColor:nil tintMode:0 saturation:0 maskImage:nil]; } - (UIImage *)yy_imageByBlurSoft { return [self yy_imageByBlurRadius:60 tintColor:[UIColor colorWithWhite:0.84 alpha:0.36] tintMode:kCGBlendModeNormal saturation:1.8 maskImage:nil]; } - (UIImage *)yy_imageByBlurLight { return [self yy_imageByBlurRadius:60 tintColor:[UIColor colorWithWhite:1.0 alpha:0.3] tintMode:kCGBlendModeNormal saturation:1.8 maskImage:nil]; } - (UIImage *)yy_imageByBlurExtraLight { return [self yy_imageByBlurRadius:40 tintColor:[UIColor colorWithWhite:0.97 alpha:0.82] tintMode:kCGBlendModeNormal saturation:1.8 maskImage:nil]; } - (UIImage *)yy_imageByBlurDark { return [self yy_imageByBlurRadius:40 tintColor:[UIColor colorWithWhite:0.11 alpha:0.73] tintMode:kCGBlendModeNormal saturation:1.8 maskImage:nil]; } - (UIImage *)yy_imageByBlurWithTint:(UIColor *)tintColor { const CGFloat EffectColorAlpha = 0.6; UIColor *effectColor = tintColor; size_t componentCount = CGColorGetNumberOfComponents(tintColor.CGColor); if (componentCount == 2) { CGFloat b; if ([tintColor getWhite:&b alpha:NULL]) { effectColor = [UIColor colorWithWhite:b alpha:EffectColorAlpha]; } } else { CGFloat r, g, b; if ([tintColor getRed:&r green:&g blue:&b alpha:NULL]) { effectColor = [UIColor colorWithRed:r green:g blue:b alpha:EffectColorAlpha]; } } return [self yy_imageByBlurRadius:20 tintColor:effectColor tintMode:kCGBlendModeNormal saturation:-1.0 maskImage:nil]; } - (UIImage *)yy_imageByBlurRadius:(CGFloat)blurRadius tintColor:(UIColor *)tintColor tintMode:(CGBlendMode)tintBlendMode saturation:(CGFloat)saturation maskImage:(UIImage *)maskImage { if (self.size.width < 1 || self.size.height < 1) { NSLog(@"UIImage+YYAdd error: invalid size: (%.2f x %.2f). Both dimensions must be >= 1: %@", self.size.width, self.size.height, self); return nil; } if (!self.CGImage) { NSLog(@"UIImage+YYAdd error: inputImage must be backed by a CGImage: %@", self); return nil; } if (maskImage && !maskImage.CGImage) { NSLog(@"UIImage+YYAdd error: effectMaskImage must be backed by a CGImage: %@", maskImage); return nil; } // iOS7 and above can use new func. BOOL hasNewFunc = (long)vImageBuffer_InitWithCGImage != 0 && (long)vImageCreateCGImageFromBuffer != 0; BOOL hasBlur = blurRadius > __FLT_EPSILON__; BOOL hasSaturation = fabs(saturation - 1.0) > __FLT_EPSILON__; CGSize size = self.size; CGRect rect = { CGPointZero, size }; CGFloat scale = self.scale; CGImageRef imageRef = self.CGImage; BOOL opaque = NO; if (!hasBlur && !hasSaturation) { return [self _yy_mergeImageRef:imageRef tintColor:tintColor tintBlendMode:tintBlendMode maskImage:maskImage opaque:opaque]; } vImage_Buffer effect = { 0 }, scratch = { 0 }; vImage_Buffer *input = NULL, *output = NULL; vImage_CGImageFormat format = { .bitsPerComponent = 8, .bitsPerPixel = 32, .colorSpace = NULL, .bitmapInfo = kCGImageAlphaPremultipliedFirst | kCGBitmapByteOrder32Little, //requests a BGRA buffer. .version = 0, .decode = NULL, .renderingIntent = kCGRenderingIntentDefault }; if (hasNewFunc) { vImage_Error err; err = vImageBuffer_InitWithCGImage(&effect, &format, NULL, imageRef, kvImagePrintDiagnosticsToConsole); if (err != kvImageNoError) { NSLog(@"UIImage+YYAdd error: vImageBuffer_InitWithCGImage returned error code %zi for inputImage: %@", err, self); return nil; } err = vImageBuffer_Init(&scratch, effect.height, effect.width, format.bitsPerPixel, kvImageNoFlags); if (err != kvImageNoError) { NSLog(@"UIImage+YYAdd error: vImageBuffer_Init returned error code %zi for inputImage: %@", err, self); return nil; } } else { UIGraphicsBeginImageContextWithOptions(size, opaque, scale); CGContextRef effectCtx = UIGraphicsGetCurrentContext(); CGContextScaleCTM(effectCtx, 1.0, -1.0); CGContextTranslateCTM(effectCtx, 0, -size.height); CGContextDrawImage(effectCtx, rect, imageRef); effect.data = CGBitmapContextGetData(effectCtx); effect.width = CGBitmapContextGetWidth(effectCtx); effect.height = CGBitmapContextGetHeight(effectCtx); effect.rowBytes = CGBitmapContextGetBytesPerRow(effectCtx); UIGraphicsBeginImageContextWithOptions(size, opaque, scale); CGContextRef scratchCtx = UIGraphicsGetCurrentContext(); scratch.data = CGBitmapContextGetData(scratchCtx); scratch.width = CGBitmapContextGetWidth(scratchCtx); scratch.height = CGBitmapContextGetHeight(scratchCtx); scratch.rowBytes = CGBitmapContextGetBytesPerRow(scratchCtx); } input = &effect; output = &scratch; if (hasBlur) { // A description of how to compute the box kernel width from the Gaussian // radius (aka standard deviation) appears in the SVG spec: // http://www.w3.org/TR/SVG/filters.html#feGaussianBlurElement // // For larger values of 's' (s >= 2.0), an approximation can be used: Three // successive box-blurs build a piece-wise quadratic convolution kernel, which // approximates the Gaussian kernel to within roughly 3%. // // let d = floor(s * 3*sqrt(2*pi)/4 + 0.5) // // ... if d is odd, use three box-blurs of size 'd', centered on the output pixel. // CGFloat inputRadius = blurRadius * scale; if (inputRadius - 2.0 < __FLT_EPSILON__) inputRadius = 2.0; uint32_t radius = floor((inputRadius * 3.0 * sqrt(2 * M_PI) / 4 + 0.5) / 2); radius |= 1; // force radius to be odd so that the three box-blur methodology works. int iterations; if (blurRadius * scale < 0.5) iterations = 1; else if (blurRadius * scale < 1.5) iterations = 2; else iterations = 3; NSInteger tempSize = vImageBoxConvolve_ARGB8888(input, output, NULL, 0, 0, radius, radius, NULL, kvImageGetTempBufferSize | kvImageEdgeExtend); void *temp = malloc(tempSize); for (int i = 0; i < iterations; i++) { vImageBoxConvolve_ARGB8888(input, output, temp, 0, 0, radius, radius, NULL, kvImageEdgeExtend); // swap vImage_Buffer *swap_tmp = input; input = output; output = swap_tmp; } free(temp); } if (hasSaturation) { // These values appear in the W3C Filter Effects spec: // https://dvcs.w3.org/hg/FXTF/raw-file/default/filters/Publish.html#grayscaleEquivalent CGFloat s = saturation; CGFloat matrixFloat[] = { 0.0722 + 0.9278 * s, 0.0722 - 0.0722 * s, 0.0722 - 0.0722 * s, 0, 0.7152 - 0.7152 * s, 0.7152 + 0.2848 * s, 0.7152 - 0.7152 * s, 0, 0.2126 - 0.2126 * s, 0.2126 - 0.2126 * s, 0.2126 + 0.7873 * s, 0, 0, 0, 0, 1, }; const int32_t divisor = 256; NSUInteger matrixSize = sizeof(matrixFloat) / sizeof(matrixFloat[0]); int16_t matrix[matrixSize]; for (NSUInteger i = 0; i < matrixSize; ++i) { matrix[i] = (int16_t)roundf(matrixFloat[i] * divisor); } vImageMatrixMultiply_ARGB8888(input, output, matrix, divisor, NULL, NULL, kvImageNoFlags); // swap vImage_Buffer *swap_tmp = input; input = output; output = swap_tmp; } UIImage *outputImage = nil; if (hasNewFunc) { CGImageRef effectCGImage = NULL; effectCGImage = vImageCreateCGImageFromBuffer(input, &format, &_yy_cleanupBuffer, NULL, kvImageNoAllocate, NULL); if (effectCGImage == NULL) { effectCGImage = vImageCreateCGImageFromBuffer(input, &format, NULL, NULL, kvImageNoFlags, NULL); free(input->data); } free(output->data); outputImage = [self _yy_mergeImageRef:effectCGImage tintColor:tintColor tintBlendMode:tintBlendMode maskImage:maskImage opaque:opaque]; CGImageRelease(effectCGImage); } else { CGImageRef effectCGImage; UIImage *effectImage; if (input != &effect) effectImage = UIGraphicsGetImageFromCurrentImageContext(); UIGraphicsEndImageContext(); if (input == &effect) effectImage = UIGraphicsGetImageFromCurrentImageContext(); UIGraphicsEndImageContext(); effectCGImage = effectImage.CGImage; outputImage = [self _yy_mergeImageRef:effectCGImage tintColor:tintColor tintBlendMode:tintBlendMode maskImage:maskImage opaque:opaque]; } return outputImage; } // Helper function to handle deferred cleanup of a buffer. static void _yy_cleanupBuffer(void *userData, void *buf_data) { free(buf_data); } // Helper function to add tint and mask. - (UIImage *)_yy_mergeImageRef:(CGImageRef)effectCGImage tintColor:(UIColor *)tintColor tintBlendMode:(CGBlendMode)tintBlendMode maskImage:(UIImage *)maskImage opaque:(BOOL)opaque { BOOL hasTint = tintColor != nil && CGColorGetAlpha(tintColor.CGColor) > __FLT_EPSILON__; BOOL hasMask = maskImage != nil; CGSize size = self.size; CGRect rect = { CGPointZero, size }; CGFloat scale = self.scale; if (!hasTint && !hasMask) { return [UIImage imageWithCGImage:effectCGImage]; } UIGraphicsBeginImageContextWithOptions(size, opaque, scale); CGContextRef context = UIGraphicsGetCurrentContext(); CGContextScaleCTM(context, 1.0, -1.0); CGContextTranslateCTM(context, 0, -size.height); if (hasMask) { CGContextDrawImage(context, rect, self.CGImage); CGContextSaveGState(context); CGContextClipToMask(context, rect, maskImage.CGImage); } CGContextDrawImage(context, rect, effectCGImage); if (hasTint) { CGContextSaveGState(context); CGContextSetBlendMode(context, tintBlendMode); CGContextSetFillColorWithColor(context, tintColor.CGColor); CGContextFillRect(context, rect); CGContextRestoreGState(context); } if (hasMask) { CGContextRestoreGState(context); } UIImage *outputImage = UIGraphicsGetImageFromCurrentImageContext(); UIGraphicsEndImageContext(); return outputImage; } @end ================================================ FILE: MFPictureBrowserDemo/Pods/YYWebImage/YYWebImage/Categories/UIImageView+YYWebImage.h ================================================ // // UIImageView+YYWebImage.h // YYWebImage // // Created by ibireme on 15/2/23. // Copyright (c) 2015 ibireme. // // This source code is licensed under the MIT-style license found in the // LICENSE file in the root directory of this source tree. // #import #if __has_include() #import #else #import "YYWebImageManager.h" #endif NS_ASSUME_NONNULL_BEGIN /** Web image methods for UIImageView. */ @interface UIImageView (YYWebImage) #pragma mark - image /** Current image URL. @discussion Set a new value to this property will cancel the previous request operation and create a new request operation to fetch image. Set nil to clear the image and image URL. */ @property (nullable, nonatomic, strong) NSURL *yy_imageURL; /** Set the view's `image` with a specified URL. @param imageURL The image url (remote or local file path). @param placeholder The image to be set initially, until the image request finishes. */ - (void)yy_setImageWithURL:(nullable NSURL *)imageURL placeholder:(nullable UIImage *)placeholder; /** Set the view's `image` with a specified URL. @param imageURL The image url (remote or local file path). @param options The options to use when request the image. */ - (void)yy_setImageWithURL:(nullable NSURL *)imageURL options:(YYWebImageOptions)options; /** Set the view's `image` with a specified URL. @param imageURL The image url (remote or local file path). @param placeholder The image to be set initially, until the image request finishes. @param options The options to use when request the image. @param completion The block invoked (on main thread) when image request completed. */ - (void)yy_setImageWithURL:(nullable NSURL *)imageURL placeholder:(nullable UIImage *)placeholder options:(YYWebImageOptions)options completion:(nullable YYWebImageCompletionBlock)completion; /** Set the view's `image` with a specified URL. @param imageURL The image url (remote or local file path). @param placeholder The image to be set initially, until the image request finishes. @param options The options to use when request the image. @param progress The block invoked (on main thread) during image request. @param transform The block invoked (on background thread) to do additional image process. @param completion The block invoked (on main thread) when image request completed. */ - (void)yy_setImageWithURL:(nullable NSURL *)imageURL placeholder:(nullable UIImage *)placeholder options:(YYWebImageOptions)options progress:(nullable YYWebImageProgressBlock)progress transform:(nullable YYWebImageTransformBlock)transform completion:(nullable YYWebImageCompletionBlock)completion; /** Set the view's `image` with a specified URL. @param imageURL The image url (remote or local file path). @param placeholder he image to be set initially, until the image request finishes. @param options The options to use when request the image. @param manager The manager to create image request operation. @param progress The block invoked (on main thread) during image request. @param transform The block invoked (on background thread) to do additional image process. @param completion The block invoked (on main thread) when image request completed. */ - (void)yy_setImageWithURL:(nullable NSURL *)imageURL placeholder:(nullable UIImage *)placeholder options:(YYWebImageOptions)options manager:(nullable YYWebImageManager *)manager progress:(nullable YYWebImageProgressBlock)progress transform:(nullable YYWebImageTransformBlock)transform completion:(nullable YYWebImageCompletionBlock)completion; /** Cancel the current image request. */ - (void)yy_cancelCurrentImageRequest; #pragma mark - highlight image /** Current highlighted image URL. @discussion Set a new value to this property will cancel the previous request operation and create a new request operation to fetch image. Set nil to clear the highlighted image and image URL. */ @property (nullable, nonatomic, strong) NSURL *yy_highlightedImageURL; /** Set the view's `highlightedImage` with a specified URL. @param imageURL The image url (remote or local file path). @param placeholder The image to be set initially, until the image request finishes. */ - (void)yy_setHighlightedImageWithURL:(nullable NSURL *)imageURL placeholder:(nullable UIImage *)placeholder; /** Set the view's `highlightedImage` with a specified URL. @param imageURL The image url (remote or local file path). @param options The options to use when request the image. */ - (void)yy_setHighlightedImageWithURL:(nullable NSURL *)imageURL options:(YYWebImageOptions)options; /** Set the view's `highlightedImage` with a specified URL. @param imageURL The image url (remote or local file path). @param placeholder The image to be set initially, until the image request finishes. @param options The options to use when request the image. @param completion The block invoked (on main thread) when image request completed. */ - (void)yy_setHighlightedImageWithURL:(nullable NSURL *)imageURL placeholder:(nullable UIImage *)placeholder options:(YYWebImageOptions)options completion:(nullable YYWebImageCompletionBlock)completion; /** Set the view's `highlightedImage` with a specified URL. @param imageURL The image url (remote or local file path). @param placeholder The image to be set initially, until the image request finishes. @param options The options to use when request the image. @param progress The block invoked (on main thread) during image request. @param transform The block invoked (on background thread) to do additional image process. @param completion The block invoked (on main thread) when image request completed. */ - (void)yy_setHighlightedImageWithURL:(nullable NSURL *)imageURL placeholder:(nullable UIImage *)placeholder options:(YYWebImageOptions)options progress:(nullable YYWebImageProgressBlock)progress transform:(nullable YYWebImageTransformBlock)transform completion:(nullable YYWebImageCompletionBlock)completion; /** Set the view's `highlightedImage` with a specified URL. @param imageURL The image url (remote or local file path). @param placeholder The image to be set initially, until the image request finishes. @param options The options to use when request the image. @param manager The manager to create image request operation. @param progress The block invoked (on main thread) during image request. @param transform The block invoked (on background thread) to do additional image process. @param completion The block invoked (on main thread) when image request completed. */ - (void)yy_setHighlightedImageWithURL:(nullable NSURL *)imageURL placeholder:(nullable UIImage *)placeholder options:(YYWebImageOptions)options manager:(nullable YYWebImageManager *)manager progress:(nullable YYWebImageProgressBlock)progress transform:(nullable YYWebImageTransformBlock)transform completion:(nullable YYWebImageCompletionBlock)completion; /** Cancel the current highlighed image request. */ - (void)yy_cancelCurrentHighlightedImageRequest; @end NS_ASSUME_NONNULL_END ================================================ FILE: MFPictureBrowserDemo/Pods/YYWebImage/YYWebImage/Categories/UIImageView+YYWebImage.m ================================================ // // UIImageView+YYWebImage.m // YYWebImage // // Created by ibireme on 15/2/23. // Copyright (c) 2015 ibireme. // // This source code is licensed under the MIT-style license found in the // LICENSE file in the root directory of this source tree. // #import "UIImageView+YYWebImage.h" #import "YYWebImageOperation.h" #import "_YYWebImageSetter.h" #import // Dummy class for category @interface UIImageView_YYWebImage : NSObject @end @implementation UIImageView_YYWebImage @end static int _YYWebImageSetterKey; static int _YYWebImageHighlightedSetterKey; @implementation UIImageView (YYWebImage) #pragma mark - image - (NSURL *)yy_imageURL { _YYWebImageSetter *setter = objc_getAssociatedObject(self, &_YYWebImageSetterKey); return setter.imageURL; } - (void)setYy_imageURL:(NSURL *)imageURL { [self yy_setImageWithURL:imageURL placeholder:nil options:kNilOptions manager:nil progress:nil transform:nil completion:nil]; } - (void)yy_setImageWithURL:(NSURL *)imageURL placeholder:(UIImage *)placeholder { [self yy_setImageWithURL:imageURL placeholder:placeholder options:kNilOptions manager:nil progress:nil transform:nil completion:nil]; } - (void)yy_setImageWithURL:(NSURL *)imageURL options:(YYWebImageOptions)options { [self yy_setImageWithURL:imageURL placeholder:nil options:options manager:nil progress:nil transform:nil completion:nil]; } - (void)yy_setImageWithURL:(NSURL *)imageURL placeholder:(UIImage *)placeholder options:(YYWebImageOptions)options completion:(YYWebImageCompletionBlock)completion { [self yy_setImageWithURL:imageURL placeholder:placeholder options:options manager:nil progress:nil transform:nil completion:completion]; } - (void)yy_setImageWithURL:(NSURL *)imageURL placeholder:(UIImage *)placeholder options:(YYWebImageOptions)options progress:(YYWebImageProgressBlock)progress transform:(YYWebImageTransformBlock)transform completion:(YYWebImageCompletionBlock)completion { [self yy_setImageWithURL:imageURL placeholder:placeholder options:options manager:nil progress:progress transform:transform completion:completion]; } - (void)yy_setImageWithURL:(NSURL *)imageURL placeholder:(UIImage *)placeholder options:(YYWebImageOptions)options manager:(YYWebImageManager *)manager progress:(YYWebImageProgressBlock)progress transform:(YYWebImageTransformBlock)transform completion:(YYWebImageCompletionBlock)completion { if ([imageURL isKindOfClass:[NSString class]]) imageURL = [NSURL URLWithString:(id)imageURL]; manager = manager ? manager : [YYWebImageManager sharedManager]; _YYWebImageSetter *setter = objc_getAssociatedObject(self, &_YYWebImageSetterKey); if (!setter) { setter = [_YYWebImageSetter new]; objc_setAssociatedObject(self, &_YYWebImageSetterKey, setter, OBJC_ASSOCIATION_RETAIN_NONATOMIC); } int32_t sentinel = [setter cancelWithNewURL:imageURL]; _yy_dispatch_sync_on_main_queue(^{ if ((options & YYWebImageOptionSetImageWithFadeAnimation) && !(options & YYWebImageOptionAvoidSetImage)) { if (!self.highlighted) { [self.layer removeAnimationForKey:_YYWebImageFadeAnimationKey]; } } if (!imageURL) { if (!(options & YYWebImageOptionIgnorePlaceHolder)) { self.image = placeholder; } return; } // get the image from memory as quickly as possible UIImage *imageFromMemory = nil; if (manager.cache && !(options & YYWebImageOptionUseNSURLCache) && !(options & YYWebImageOptionRefreshImageCache)) { imageFromMemory = [manager.cache getImageForKey:[manager cacheKeyForURL:imageURL] withType:YYImageCacheTypeMemory]; } if (imageFromMemory) { if (!(options & YYWebImageOptionAvoidSetImage)) { self.image = imageFromMemory; } if(completion) completion(imageFromMemory, imageURL, YYWebImageFromMemoryCacheFast, YYWebImageStageFinished, nil); return; } if (!(options & YYWebImageOptionIgnorePlaceHolder)) { self.image = placeholder; } __weak typeof(self) _self = self; dispatch_async([_YYWebImageSetter setterQueue], ^{ YYWebImageProgressBlock _progress = nil; if (progress) _progress = ^(NSInteger receivedSize, NSInteger expectedSize) { dispatch_async(dispatch_get_main_queue(), ^{ progress(receivedSize, expectedSize); }); }; __block int32_t newSentinel = 0; __block __weak typeof(setter) weakSetter = nil; YYWebImageCompletionBlock _completion = ^(UIImage *image, NSURL *url, YYWebImageFromType from, YYWebImageStage stage, NSError *error) { __strong typeof(_self) self = _self; BOOL setImage = (stage == YYWebImageStageFinished || stage == YYWebImageStageProgress) && image && !(options & YYWebImageOptionAvoidSetImage); dispatch_async(dispatch_get_main_queue(), ^{ BOOL sentinelChanged = weakSetter && weakSetter.sentinel != newSentinel; if (setImage && self && !sentinelChanged) { BOOL showFade = ((options & YYWebImageOptionSetImageWithFadeAnimation) && !self.highlighted); if (showFade) { CATransition *transition = [CATransition animation]; transition.duration = stage == YYWebImageStageFinished ? _YYWebImageFadeTime : _YYWebImageProgressiveFadeTime; transition.timingFunction = [CAMediaTimingFunction functionWithName:kCAMediaTimingFunctionEaseInEaseOut]; transition.type = kCATransitionFade; [self.layer addAnimation:transition forKey:_YYWebImageFadeAnimationKey]; } self.image = image; } if (completion) { if (sentinelChanged) { completion(nil, url, YYWebImageFromNone, YYWebImageStageCancelled, nil); } else { completion(image, url, from, stage, error); } } }); }; newSentinel = [setter setOperationWithSentinel:sentinel url:imageURL options:options manager:manager progress:_progress transform:transform completion:_completion]; weakSetter = setter; }); }); } - (void)yy_cancelCurrentImageRequest { _YYWebImageSetter *setter = objc_getAssociatedObject(self, &_YYWebImageSetterKey); if (setter) [setter cancel]; } #pragma mark - highlighted image - (NSURL *)yy_highlightedImageURL { _YYWebImageSetter *setter = objc_getAssociatedObject(self, &_YYWebImageHighlightedSetterKey); return setter.imageURL; } - (void)setYy_highlightedImageURL:(NSURL *)imageURL { [self yy_setHighlightedImageWithURL:imageURL placeholder:nil options:kNilOptions manager:nil progress:nil transform:nil completion:nil]; } - (void)yy_setHighlightedImageWithURL:(NSURL *)imageURL placeholder:(UIImage *)placeholder { [self yy_setHighlightedImageWithURL:imageURL placeholder:placeholder options:kNilOptions manager:nil progress:nil transform:nil completion:nil]; } - (void)yy_setHighlightedImageWithURL:(NSURL *)imageURL options:(YYWebImageOptions)options { [self yy_setHighlightedImageWithURL:imageURL placeholder:nil options:options manager:nil progress:nil transform:nil completion:nil]; } - (void)yy_setHighlightedImageWithURL:(NSURL *)imageURL placeholder:(UIImage *)placeholder options:(YYWebImageOptions)options completion:(YYWebImageCompletionBlock)completion { [self yy_setHighlightedImageWithURL:imageURL placeholder:placeholder options:options manager:nil progress:nil transform:nil completion:completion]; } - (void)yy_setHighlightedImageWithURL:(NSURL *)imageURL placeholder:(UIImage *)placeholder options:(YYWebImageOptions)options progress:(YYWebImageProgressBlock)progress transform:(YYWebImageTransformBlock)transform completion:(YYWebImageCompletionBlock)completion { [self yy_setHighlightedImageWithURL:imageURL placeholder:placeholder options:options manager:nil progress:progress transform:nil completion:completion]; } - (void)yy_setHighlightedImageWithURL:(NSURL *)imageURL placeholder:(UIImage *)placeholder options:(YYWebImageOptions)options manager:(YYWebImageManager *)manager progress:(YYWebImageProgressBlock)progress transform:(YYWebImageTransformBlock)transform completion:(YYWebImageCompletionBlock)completion { if ([imageURL isKindOfClass:[NSString class]]) imageURL = [NSURL URLWithString:(id)imageURL]; manager = manager ? manager : [YYWebImageManager sharedManager]; _YYWebImageSetter *setter = objc_getAssociatedObject(self, &_YYWebImageHighlightedSetterKey); if (!setter) { setter = [_YYWebImageSetter new]; objc_setAssociatedObject(self, &_YYWebImageHighlightedSetterKey, setter, OBJC_ASSOCIATION_RETAIN_NONATOMIC); } int32_t sentinel = [setter cancelWithNewURL:imageURL]; _yy_dispatch_sync_on_main_queue(^{ if ((options & YYWebImageOptionSetImageWithFadeAnimation) && !(options & YYWebImageOptionAvoidSetImage)) { if (self.highlighted) { [self.layer removeAnimationForKey:_YYWebImageFadeAnimationKey]; } } if (!imageURL) { if (!(options & YYWebImageOptionIgnorePlaceHolder)) { self.highlightedImage = placeholder; } return; } // get the image from memory as quickly as possible UIImage *imageFromMemory = nil; if (manager.cache && !(options & YYWebImageOptionUseNSURLCache) && !(options & YYWebImageOptionRefreshImageCache)) { imageFromMemory = [manager.cache getImageForKey:[manager cacheKeyForURL:imageURL] withType:YYImageCacheTypeMemory]; } if (imageFromMemory) { if (!(options & YYWebImageOptionAvoidSetImage)) { self.highlightedImage = imageFromMemory; } if(completion) completion(imageFromMemory, imageURL, YYWebImageFromMemoryCacheFast, YYWebImageStageFinished, nil); return; } if (!(options & YYWebImageOptionIgnorePlaceHolder)) { self.highlightedImage = placeholder; } __weak typeof(self) _self = self; dispatch_async([_YYWebImageSetter setterQueue], ^{ YYWebImageProgressBlock _progress = nil; if (progress) _progress = ^(NSInteger receivedSize, NSInteger expectedSize) { dispatch_async(dispatch_get_main_queue(), ^{ progress(receivedSize, expectedSize); }); }; __block int32_t newSentinel = 0; __block __weak typeof(setter) weakSetter = nil; YYWebImageCompletionBlock _completion = ^(UIImage *image, NSURL *url, YYWebImageFromType from, YYWebImageStage stage, NSError *error) { __strong typeof(_self) self = _self; BOOL setImage = (stage == YYWebImageStageFinished || stage == YYWebImageStageProgress) && image && !(options & YYWebImageOptionAvoidSetImage); BOOL showFade = ((options & YYWebImageOptionSetImageWithFadeAnimation) && self.highlighted); dispatch_async(dispatch_get_main_queue(), ^{ BOOL sentinelChanged = weakSetter && weakSetter.sentinel != newSentinel; if (setImage && self && !sentinelChanged) { if (showFade) { CATransition *transition = [CATransition animation]; transition.duration = stage == YYWebImageStageFinished ? _YYWebImageFadeTime : _YYWebImageProgressiveFadeTime; transition.timingFunction = [CAMediaTimingFunction functionWithName:kCAMediaTimingFunctionEaseInEaseOut]; transition.type = kCATransitionFade; [self.layer addAnimation:transition forKey:_YYWebImageFadeAnimationKey]; } self.highlightedImage = image; } if (completion) { if (sentinelChanged) { completion(nil, url, YYWebImageFromNone, YYWebImageStageCancelled, nil); } else { completion(image, url, from, stage, error); } } }); }; newSentinel = [setter setOperationWithSentinel:sentinel url:imageURL options:options manager:manager progress:_progress transform:transform completion:_completion]; weakSetter = setter; }); }); } - (void)yy_cancelCurrentHighlightedImageRequest { _YYWebImageSetter *setter = objc_getAssociatedObject(self, &_YYWebImageHighlightedSetterKey); if (setter) [setter cancel]; } @end ================================================ FILE: MFPictureBrowserDemo/Pods/YYWebImage/YYWebImage/Categories/_YYWebImageSetter.h ================================================ // // _YYWebImageSetter.h // YYWebImage // // Created by ibireme on 15/7/15. // Copyright (c) 2015 ibireme. // // This source code is licensed under the MIT-style license found in the // LICENSE file in the root directory of this source tree. // #import #import #if __has_include() #import #else #import "YYWebImageManager.h" #endif NS_ASSUME_NONNULL_BEGIN /** Submits a block for execution on a main queue and waits until the block completes. */ static inline void _yy_dispatch_sync_on_main_queue(void (^block)()) { if (pthread_main_np()) { block(); } else { dispatch_sync(dispatch_get_main_queue(), block); } } extern NSString *const _YYWebImageFadeAnimationKey; extern const NSTimeInterval _YYWebImageFadeTime; extern const NSTimeInterval _YYWebImageProgressiveFadeTime; /** Private class used by web image categories. Typically, you should not use this class directly. */ @interface _YYWebImageSetter : NSObject /// Current image url. @property (nullable, nonatomic, readonly) NSURL *imageURL; /// Current sentinel. @property (nonatomic, readonly) int32_t sentinel; /// Create new operation for web image and return a sentinel value. - (int32_t)setOperationWithSentinel:(int32_t)sentinel url:(nullable NSURL *)imageURL options:(YYWebImageOptions)options manager:(YYWebImageManager *)manager progress:(nullable YYWebImageProgressBlock)progress transform:(nullable YYWebImageTransformBlock)transform completion:(nullable YYWebImageCompletionBlock)completion; /// Cancel and return a sentinel value. The imageURL will be set to nil. - (int32_t)cancel; /// Cancel and return a sentinel value. The imageURL will be set to new value. - (int32_t)cancelWithNewURL:(nullable NSURL *)imageURL; /// A queue to set operation. + (dispatch_queue_t)setterQueue; @end NS_ASSUME_NONNULL_END ================================================ FILE: MFPictureBrowserDemo/Pods/YYWebImage/YYWebImage/Categories/_YYWebImageSetter.m ================================================ // // _YYWebImageSetter.m // YYWebImage // // Created by ibireme on 15/7/15. // Copyright (c) 2015 ibireme. // // This source code is licensed under the MIT-style license found in the // LICENSE file in the root directory of this source tree. // #import "_YYWebImageSetter.h" #import "YYWebImageOperation.h" #import NSString *const _YYWebImageFadeAnimationKey = @"YYWebImageFade"; const NSTimeInterval _YYWebImageFadeTime = 0.2; const NSTimeInterval _YYWebImageProgressiveFadeTime = 0.4; @implementation _YYWebImageSetter { dispatch_semaphore_t _lock; NSURL *_imageURL; NSOperation *_operation; int32_t _sentinel; } - (instancetype)init { self = [super init]; _lock = dispatch_semaphore_create(1); return self; } - (NSURL *)imageURL { dispatch_semaphore_wait(_lock, DISPATCH_TIME_FOREVER); NSURL *imageURL = _imageURL; dispatch_semaphore_signal(_lock); return imageURL; } - (void)dealloc { OSAtomicIncrement32(&_sentinel); [_operation cancel]; } - (int32_t)setOperationWithSentinel:(int32_t)sentinel url:(NSURL *)imageURL options:(YYWebImageOptions)options manager:(YYWebImageManager *)manager progress:(YYWebImageProgressBlock)progress transform:(YYWebImageTransformBlock)transform completion:(YYWebImageCompletionBlock)completion { if (sentinel != _sentinel) { if (completion) completion(nil, imageURL, YYWebImageFromNone, YYWebImageStageCancelled, nil); return _sentinel; } NSOperation *operation = [manager requestImageWithURL:imageURL options:options progress:progress transform:transform completion:completion]; if (!operation && completion) { NSDictionary *userInfo = @{ NSLocalizedDescriptionKey : @"YYWebImageOperation create failed." }; completion(nil, imageURL, YYWebImageFromNone, YYWebImageStageFinished, [NSError errorWithDomain:@"com.ibireme.webimage" code:-1 userInfo:userInfo]); } dispatch_semaphore_wait(_lock, DISPATCH_TIME_FOREVER); if (sentinel == _sentinel) { if (_operation) [_operation cancel]; _operation = operation; sentinel = OSAtomicIncrement32(&_sentinel); } else { [operation cancel]; } dispatch_semaphore_signal(_lock); return sentinel; } - (int32_t)cancel { return [self cancelWithNewURL:nil]; } - (int32_t)cancelWithNewURL:(NSURL *)imageURL { int32_t sentinel; dispatch_semaphore_wait(_lock, DISPATCH_TIME_FOREVER); if (_operation) { [_operation cancel]; _operation = nil; } _imageURL = imageURL; sentinel = OSAtomicIncrement32(&_sentinel); dispatch_semaphore_signal(_lock); return sentinel; } + (dispatch_queue_t)setterQueue { static dispatch_queue_t queue; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ queue = dispatch_queue_create("com.ibireme.webimage.setter", DISPATCH_QUEUE_SERIAL); dispatch_set_target_queue(queue, dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)); }); return queue; } @end ================================================ FILE: MFPictureBrowserDemo/Pods/YYWebImage/YYWebImage/YYImageCache.h ================================================ // // YYImageCache.h // YYWebImage // // Created by ibireme on 15/2/15. // Copyright (c) 2015 ibireme. // // This source code is licensed under the MIT-style license found in the // LICENSE file in the root directory of this source tree. // #import @class YYMemoryCache, YYDiskCache; NS_ASSUME_NONNULL_BEGIN /// Image cache type typedef NS_OPTIONS(NSUInteger, YYImageCacheType) { /// No value. YYImageCacheTypeNone = 0, /// Get/store image with memory cache. YYImageCacheTypeMemory = 1 << 0, /// Get/store image with disk cache. YYImageCacheTypeDisk = 1 << 1, /// Get/store image with both memory cache and disk cache. YYImageCacheTypeAll = YYImageCacheTypeMemory | YYImageCacheTypeDisk, }; /** YYImageCache is a cache that stores UIImage and image data based on memory cache and disk cache. @discussion The disk cache will try to protect the original image data: * If the original image is still image, it will be saved as png/jpeg file based on alpha information. * If the original image is animated gif, apng or webp, it will be saved as original format. * If the original image's scale is not 1, the scale value will be saved as extended data. Although UIImage can be serialized with NSCoding protocol, but it's not a good idea: Apple actually use UIImagePNGRepresentation() to encode all kind of image, it may lose the original multi-frame data. The result is packed to plist file and cannot view with photo viewer directly. If the image has no alpha channel, using JPEG instead of PNG can save more disk size and encoding/decoding time. */ @interface YYImageCache : NSObject #pragma mark - Attribute ///============================================================================= /// @name Attribute ///============================================================================= /** The name of the cache. Default is nil. */ @property (nullable, copy) NSString *name; /** The underlying memory cache. see `YYMemoryCache` for more information.*/ @property (strong, readonly) YYMemoryCache *memoryCache; /** The underlying disk cache. see `YYDiskCache` for more information.*/ @property (strong, readonly) YYDiskCache *diskCache; /** Whether decode animated image when fetch image from disk cache. Default is YES. @discussion When fetch image from disk cache, it will use 'YYImage' to decode animated image such as WebP/APNG/GIF. Set to 'NO' to ignore animated image. */ @property BOOL allowAnimatedImage; /** Whether decode the image to memory bitmap. Default is YES. @discussion If the value is YES, then the image will be decoded to memory bitmap for better display performance, but may cost more memory. */ @property BOOL decodeForDisplay; #pragma mark - Initializer ///============================================================================= /// @name Initializer ///============================================================================= - (instancetype)init UNAVAILABLE_ATTRIBUTE; + (instancetype)new UNAVAILABLE_ATTRIBUTE; /** Returns global shared image cache instance. @return The singleton YYImageCache instance. */ + (instancetype)sharedCache; /** The designated initializer. Multiple instances with the same path will make the cache unstable. @param path Full path of a directory in which the cache will write data. Once initialized you should not read and write to this directory. @result A new cache object, or nil if an error occurs. */ - (nullable instancetype)initWithPath:(NSString *)path NS_DESIGNATED_INITIALIZER; #pragma mark - Access Methods ///============================================================================= /// @name Access Methods ///============================================================================= /** Sets the image with the specified key in the cache (both memory and disk). This method returns immediately and executes the store operation in background. @param image The image to be stored in the cache. If nil, this method has no effect. @param key The key with which to associate the image. If nil, this method has no effect. */ - (void)setImage:(UIImage *)image forKey:(NSString *)key; /** Sets the image with the specified key in the cache. This method returns immediately and executes the store operation in background. @discussion If the `type` contain `YYImageCacheTypeMemory`, then the `image` will be stored in the memory cache; `imageData` will be used instead if `image` is nil. If the `type` contain `YYImageCacheTypeDisk`, then the `imageData` will be stored in the disk cache; `image` will be used instead if `imageData` is nil. @param image The image to be stored in the cache. @param imageData The image data to be stored in the cache. @param key The key with which to associate the image. If nil, this method has no effect. @param type The cache type to store image. */ - (void)setImage:(nullable UIImage *)image imageData:(nullable NSData *)imageData forKey:(NSString *)key withType:(YYImageCacheType)type; /** Removes the image of the specified key in the cache (both memory and disk). This method returns immediately and executes the remove operation in background. @param key The key identifying the image to be removed. If nil, this method has no effect. */ - (void)removeImageForKey:(NSString *)key; /** Removes the image of the specified key in the cache. This method returns immediately and executes the remove operation in background. @param key The key identifying the image to be removed. If nil, this method has no effect. @param type The cache type to remove image. */ - (void)removeImageForKey:(NSString *)key withType:(YYImageCacheType)type; /** Returns a Boolean value that indicates whether a given key is in cache. If the image is not in memory, this method may blocks the calling thread until file read finished. @param key A string identifying the image. If nil, just return NO. @return Whether the image is in cache. */ - (BOOL)containsImageForKey:(NSString *)key; /** Returns a Boolean value that indicates whether a given key is in cache. If the image is not in memory and the `type` contains `YYImageCacheTypeDisk`, this method may blocks the calling thread until file read finished. @param key A string identifying the image. If nil, just return NO. @param type The cache type. @return Whether the image is in cache. */ - (BOOL)containsImageForKey:(NSString *)key withType:(YYImageCacheType)type; /** Returns the image associated with a given key. If the image is not in memory, this method may blocks the calling thread until file read finished. @param key A string identifying the image. If nil, just return nil. @return The image associated with key, or nil if no image is associated with key. */ - (nullable UIImage *)getImageForKey:(NSString *)key; /** Returns the image associated with a given key. If the image is not in memory and the `type` contains `YYImageCacheTypeDisk`, this method may blocks the calling thread until file read finished. @param key A string identifying the image. If nil, just return nil. @return The image associated with key, or nil if no image is associated with key. */ - (nullable UIImage *)getImageForKey:(NSString *)key withType:(YYImageCacheType)type; /** Asynchronously get the image associated with a given key. @param key A string identifying the image. If nil, just return nil. @param type The cache type. @param block A completion block which will be called on main thread. */ - (void)getImageForKey:(NSString *)key withType:(YYImageCacheType)type withBlock:(void(^)(UIImage * _Nullable image, YYImageCacheType type))block; /** Returns the image data associated with a given key. This method may blocks the calling thread until file read finished. @param key A string identifying the image. If nil, just return nil. @return The image data associated with key, or nil if no image is associated with key. */ - (nullable NSData *)getImageDataForKey:(NSString *)key; /** Asynchronously get the image data associated with a given key. @param key A string identifying the image. If nil, just return nil. @param block A completion block which will be called on main thread. */ - (void)getImageDataForKey:(NSString *)key withBlock:(void(^)(NSData * _Nullable imageData))block; @end NS_ASSUME_NONNULL_END ================================================ FILE: MFPictureBrowserDemo/Pods/YYWebImage/YYWebImage/YYImageCache.m ================================================ // // YYImageCache.m // YYWebImage // // Created by ibireme on 15/2/15. // Copyright (c) 2015 ibireme. // // This source code is licensed under the MIT-style license found in the // LICENSE file in the root directory of this source tree. // #import "YYImageCache.h" #import "YYImage.h" #import "UIImage+YYWebImage.h" #if __has_include() #import #else #import "YYImage.h" #endif #if __has_include() #import #else #import "YYCache.h" #endif static inline dispatch_queue_t YYImageCacheIOQueue() { return dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0); } static inline dispatch_queue_t YYImageCacheDecodeQueue() { return dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_LOW, 0); } @interface YYImageCache () - (NSUInteger)imageCost:(UIImage *)image; - (UIImage *)imageFromData:(NSData *)data; @end @implementation YYImageCache - (NSUInteger)imageCost:(UIImage *)image { CGImageRef cgImage = image.CGImage; if (!cgImage) return 1; CGFloat height = CGImageGetHeight(cgImage); size_t bytesPerRow = CGImageGetBytesPerRow(cgImage); NSUInteger cost = bytesPerRow * height; if (cost == 0) cost = 1; return cost; } - (UIImage *)imageFromData:(NSData *)data { NSData *scaleData = [YYDiskCache getExtendedDataFromObject:data]; CGFloat scale = 0; if (scaleData) { scale = ((NSNumber *)[NSKeyedUnarchiver unarchiveObjectWithData:scaleData]).doubleValue; } if (scale <= 0) scale = [UIScreen mainScreen].scale; UIImage *image; if (_allowAnimatedImage) { image = [[YYImage alloc] initWithData:data scale:scale]; if (_decodeForDisplay) image = [image yy_imageByDecoded]; } else { YYImageDecoder *decoder = [YYImageDecoder decoderWithData:data scale:scale]; image = [decoder frameAtIndex:0 decodeForDisplay:_decodeForDisplay].image; } return image; } #pragma mark Public + (instancetype)sharedCache { static YYImageCache *cache = nil; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ NSString *cachePath = [NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES) firstObject]; cachePath = [cachePath stringByAppendingPathComponent:@"com.ibireme.yykit"]; cachePath = [cachePath stringByAppendingPathComponent:@"images"]; cache = [[self alloc] initWithPath:cachePath]; }); return cache; } - (instancetype)init { @throw [NSException exceptionWithName:@"YYImageCache init error" reason:@"YYImageCache must be initialized with a path. Use 'initWithPath:' instead." userInfo:nil]; return [self initWithPath:@""]; } - (instancetype)initWithPath:(NSString *)path { YYMemoryCache *memoryCache = [YYMemoryCache new]; memoryCache.shouldRemoveAllObjectsOnMemoryWarning = YES; memoryCache.shouldRemoveAllObjectsWhenEnteringBackground = YES; memoryCache.countLimit = NSUIntegerMax; memoryCache.costLimit = NSUIntegerMax; memoryCache.ageLimit = 12 * 60 * 60; YYDiskCache *diskCache = [[YYDiskCache alloc] initWithPath:path]; diskCache.customArchiveBlock = ^(id object) { return (NSData *)object; }; diskCache.customUnarchiveBlock = ^(NSData *data) { return (id)data; }; if (!memoryCache || !diskCache) return nil; self = [super init]; _memoryCache = memoryCache; _diskCache = diskCache; _allowAnimatedImage = YES; _decodeForDisplay = YES; return self; } - (void)setImage:(UIImage *)image forKey:(NSString *)key { [self setImage:image imageData:nil forKey:key withType:YYImageCacheTypeAll]; } - (void)setImage:(UIImage *)image imageData:(NSData *)imageData forKey:(NSString *)key withType:(YYImageCacheType)type { if (!key || (image == nil && imageData.length == 0)) return; __weak typeof(self) _self = self; if (type & YYImageCacheTypeMemory) { // add to memory cache if (image) { if (image.yy_isDecodedForDisplay) { [_memoryCache setObject:image forKey:key withCost:[_self imageCost:image]]; } else { dispatch_async(YYImageCacheDecodeQueue(), ^{ __strong typeof(_self) self = _self; if (!self) return; [self.memoryCache setObject:[image yy_imageByDecoded] forKey:key withCost:[self imageCost:image]]; }); } } else if (imageData) { dispatch_async(YYImageCacheDecodeQueue(), ^{ __strong typeof(_self) self = _self; if (!self) return; UIImage *newImage = [self imageFromData:imageData]; [self.memoryCache setObject:newImage forKey:key withCost:[self imageCost:newImage]]; }); } } if (type & YYImageCacheTypeDisk) { // add to disk cache if (imageData) { if (image) { [YYDiskCache setExtendedData:[NSKeyedArchiver archivedDataWithRootObject:@(image.scale)] toObject:imageData]; } [_diskCache setObject:imageData forKey:key]; } else if (image) { dispatch_async(YYImageCacheIOQueue(), ^{ __strong typeof(_self) self = _self; if (!self) return; NSData *data = [image yy_imageDataRepresentation]; [YYDiskCache setExtendedData:[NSKeyedArchiver archivedDataWithRootObject:@(image.scale)] toObject:data]; [self.diskCache setObject:data forKey:key]; }); } } } - (void)removeImageForKey:(NSString *)key { [self removeImageForKey:key withType:YYImageCacheTypeAll]; } - (void)removeImageForKey:(NSString *)key withType:(YYImageCacheType)type { if (type & YYImageCacheTypeMemory) [_memoryCache removeObjectForKey:key]; if (type & YYImageCacheTypeDisk) [_diskCache removeObjectForKey:key]; } - (BOOL)containsImageForKey:(NSString *)key { return [self containsImageForKey:key withType:YYImageCacheTypeAll]; } - (BOOL)containsImageForKey:(NSString *)key withType:(YYImageCacheType)type { if (type & YYImageCacheTypeMemory) { if ([_memoryCache containsObjectForKey:key]) return YES; } if (type & YYImageCacheTypeDisk) { if ([_diskCache containsObjectForKey:key]) return YES; } return NO; } - (UIImage *)getImageForKey:(NSString *)key { return [self getImageForKey:key withType:YYImageCacheTypeAll]; } - (UIImage *)getImageForKey:(NSString *)key withType:(YYImageCacheType)type { if (!key) return nil; if (type & YYImageCacheTypeMemory) { UIImage *image = [_memoryCache objectForKey:key]; if (image) return image; } if (type & YYImageCacheTypeDisk) { NSData *data = (id)[_diskCache objectForKey:key]; UIImage *image = [self imageFromData:data]; if (image && (type & YYImageCacheTypeMemory)) { [_memoryCache setObject:image forKey:key withCost:[self imageCost:image]]; } return image; } return nil; } - (void)getImageForKey:(NSString *)key withType:(YYImageCacheType)type withBlock:(void (^)(UIImage *image, YYImageCacheType type))block { if (!block) return; dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{ UIImage *image = nil; if (type & YYImageCacheTypeMemory) { image = [_memoryCache objectForKey:key]; if (image) { dispatch_async(dispatch_get_main_queue(), ^{ block(image, YYImageCacheTypeMemory); }); return; } } if (type & YYImageCacheTypeDisk) { NSData *data = (id)[_diskCache objectForKey:key]; image = [self imageFromData:data]; if (image) { [_memoryCache setObject:image forKey:key]; dispatch_async(dispatch_get_main_queue(), ^{ block(image, YYImageCacheTypeDisk); }); return; } } dispatch_async(dispatch_get_main_queue(), ^{ block(nil, YYImageCacheTypeNone); }); }); } - (NSData *)getImageDataForKey:(NSString *)key { return (id)[_diskCache objectForKey:key]; } - (void)getImageDataForKey:(NSString *)key withBlock:(void (^)(NSData *imageData))block { if (!block) return; dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{ NSData *data = (id)[_diskCache objectForKey:key]; dispatch_async(dispatch_get_main_queue(), ^{ block(data); }); }); } @end ================================================ FILE: MFPictureBrowserDemo/Pods/YYWebImage/YYWebImage/YYWebImage.h ================================================ // // YYWebImage.h // YYWebImage // // Created by ibireme on 15/2/23. // Copyright (c) 2015 ibireme. // // This source code is licensed under the MIT-style license found in the // LICENSE file in the root directory of this source tree. // #import #if __has_include() FOUNDATION_EXPORT double YYWebImageVersionNumber; FOUNDATION_EXPORT const unsigned char YYWebImageVersionString[]; #import #import #import #import #import #import #import #import #else #import "YYImageCache.h" #import "YYWebImageOperation.h" #import "YYWebImageManager.h" #import "UIImage+YYWebImage.h" #import "UIImageView+YYWebImage.h" #import "UIButton+YYWebImage.h" #import "CALayer+YYWebImage.h" #import "MKAnnotationView+YYWebImage.h" #endif #if __has_include() #import #elif __has_include() #import #import #import #import #import #else #import "YYImage.h" #import "YYFrameImage.h" #import "YYSpriteSheetImage.h" #import "YYImageCoder.h" #import "YYAnimatedImageView.h" #endif #if __has_include() #import #elif __has_include() #import #import #import #import #else #import "YYCache.h" #import "YYMemoryCache.h" #import "YYDiskCache.h" #import "YYKVStorage.h" #endif ================================================ FILE: MFPictureBrowserDemo/Pods/YYWebImage/YYWebImage/YYWebImageManager.h ================================================ // // YYWebImageManager.h // YYWebImage // // Created by ibireme on 15/2/19. // Copyright (c) 2015 ibireme. // // This source code is licensed under the MIT-style license found in the // LICENSE file in the root directory of this source tree. // #import #if __has_include() #import #else #import "YYImageCache.h" #endif @class YYWebImageOperation; NS_ASSUME_NONNULL_BEGIN /// The options to control image operation. typedef NS_OPTIONS(NSUInteger, YYWebImageOptions) { /// Show network activity on status bar when download image. YYWebImageOptionShowNetworkActivity = 1 << 0, /// Display progressive/interlaced/baseline image during download (same as web browser). YYWebImageOptionProgressive = 1 << 1, /// Display blurred progressive JPEG or interlaced PNG image during download. /// This will ignore baseline image for better user experience. YYWebImageOptionProgressiveBlur = 1 << 2, /// Use NSURLCache instead of YYImageCache. YYWebImageOptionUseNSURLCache = 1 << 3, /// Allows untrusted SSL ceriticates. YYWebImageOptionAllowInvalidSSLCertificates = 1 << 4, /// Allows background task to download image when app is in background. YYWebImageOptionAllowBackgroundTask = 1 << 5, /// Handles cookies stored in NSHTTPCookieStore. YYWebImageOptionHandleCookies = 1 << 6, /// Load the image from remote and refresh the image cache. YYWebImageOptionRefreshImageCache = 1 << 7, /// Do not load image from/to disk cache. YYWebImageOptionIgnoreDiskCache = 1 << 8, /// Do not change the view's image before set a new URL to it. YYWebImageOptionIgnorePlaceHolder = 1 << 9, /// Ignore image decoding. /// This may used for image downloading without display. YYWebImageOptionIgnoreImageDecoding = 1 << 10, /// Ignore multi-frame image decoding. /// This will handle the GIF/APNG/WebP/ICO image as single frame image. YYWebImageOptionIgnoreAnimatedImage = 1 << 11, /// Set the image to view with a fade animation. /// This will add a "fade" animation on image view's layer for better user experience. YYWebImageOptionSetImageWithFadeAnimation = 1 << 12, /// Do not set the image to the view when image fetch complete. /// You may set the image manually. YYWebImageOptionAvoidSetImage = 1 << 13, /// This flag will add the URL to a blacklist (in memory) when the URL fail to be downloaded, /// so the library won't keep trying. YYWebImageOptionIgnoreFailedURL = 1 << 14, }; /// Indicated where the image came from. typedef NS_ENUM(NSUInteger, YYWebImageFromType) { /// No value. YYWebImageFromNone = 0, /// Fetched from memory cache immediately. /// If you called "setImageWithURL:..." and the image is already in memory, /// then you will get this value at the same call. YYWebImageFromMemoryCacheFast, /// Fetched from memory cache. YYWebImageFromMemoryCache, /// Fetched from disk cache. YYWebImageFromDiskCache, /// Fetched from remote (web or file path). YYWebImageFromRemote, }; /// Indicated image fetch complete stage. typedef NS_ENUM(NSInteger, YYWebImageStage) { /// Incomplete, progressive image. YYWebImageStageProgress = -1, /// Cancelled. YYWebImageStageCancelled = 0, /// Finished (succeed or failed). YYWebImageStageFinished = 1, }; /** The block invoked in remote image fetch progress. @param receivedSize Current received size in bytes. @param expectedSize Expected total size in bytes (-1 means unknown). */ typedef void(^YYWebImageProgressBlock)(NSInteger receivedSize, NSInteger expectedSize); /** The block invoked before remote image fetch finished to do additional image process. @discussion This block will be invoked before `YYWebImageCompletionBlock` to give you a chance to do additional image process (such as resize or crop). If there's no need to transform the image, just return the `image` parameter. @example You can clip the image, blur it and add rounded corners with these code: ^(UIImage *image, NSURL *url) { // Maybe you need to create an @autoreleasepool to limit memory cost. image = [image yy_imageByResizeToSize:CGSizeMake(100, 100) contentMode:UIViewContentModeScaleAspectFill]; image = [image yy_imageByBlurRadius:20 tintColor:nil tintMode:kCGBlendModeNormal saturation:1.2 maskImage:nil]; image = [image yy_imageByRoundCornerRadius:5]; return image; } @param image The image fetched from url. @param url The image url (remote or local file path). @return The transformed image. */ typedef UIImage * _Nullable (^YYWebImageTransformBlock)(UIImage *image, NSURL *url); /** The block invoked when image fetch finished or cancelled. @param image The image. @param url The image url (remote or local file path). @param from Where the image came from. @param error Error during image fetching. @param finished If the operation is cancelled, this value is NO, otherwise YES. */ typedef void (^YYWebImageCompletionBlock)(UIImage * _Nullable image, NSURL *url, YYWebImageFromType from, YYWebImageStage stage, NSError * _Nullable error); /** A manager to create and manage web image operation. */ @interface YYWebImageManager : NSObject /** Returns global YYWebImageManager instance. @return YYWebImageManager shared instance. */ + (instancetype)sharedManager; /** Creates a manager with an image cache and operation queue. @param cache Image cache used by manager (pass nil to avoid image cache). @param queue The operation queue on which image operations are scheduled and run (pass nil to make the new operation start immediately without queue). @return A new manager. */ - (instancetype)initWithCache:(nullable YYImageCache *)cache queue:(nullable NSOperationQueue *)queue NS_DESIGNATED_INITIALIZER; - (instancetype)init UNAVAILABLE_ATTRIBUTE; + (instancetype)new UNAVAILABLE_ATTRIBUTE; /** Creates and returns a new image operation, the operation will start immediately. @param url The image url (remote or local file path). @param options The options to control image operation. @param progress Progress block which will be invoked on background thread (pass nil to avoid). @param transform Transform block which will be invoked on background thread (pass nil to avoid). @param completion Completion block which will be invoked on background thread (pass nil to avoid). @return A new image operation. */ - (nullable YYWebImageOperation *)requestImageWithURL:(NSURL *)url options:(YYWebImageOptions)options progress:(nullable YYWebImageProgressBlock)progress transform:(nullable YYWebImageTransformBlock)transform completion:(nullable YYWebImageCompletionBlock)completion; /** The image cache used by image operation. You can set it to nil to avoid image cache. */ @property (nullable, nonatomic, strong) YYImageCache *cache; /** The operation queue on which image operations are scheduled and run. You can set it to nil to make the new operation start immediately without queue. You can use this queue to control maximum number of concurrent operations, to obtain the status of the current operations, or to cancel all operations in this manager. */ @property (nullable, nonatomic, strong) NSOperationQueue *queue; /** The shared transform block to process image. Default is nil. When called `requestImageWithURL:options:progress:transform:completion` and the `transform` is nil, this block will be used. */ @property (nullable, nonatomic, copy) YYWebImageTransformBlock sharedTransformBlock; /** The image request timeout interval in seconds. Default is 15. */ @property (nonatomic) NSTimeInterval timeout; /** The username used by NSURLCredential, default is nil. */ @property (nullable, nonatomic, copy) NSString *username; /** The password used by NSURLCredential, default is nil. */ @property (nullable, nonatomic, copy) NSString *password; /** The image HTTP request header. Default is "Accept:image/webp,image/\*;q=0.8". */ @property (nullable, nonatomic, copy) NSDictionary *headers; /** A block which will be invoked for each image HTTP request to do additional HTTP header process. Default is nil. Use this block to add or remove HTTP header field for a specified URL. */ @property (nullable, nonatomic, copy) NSDictionary *(^headersFilter)(NSURL *url, NSDictionary * _Nullable header); /** A block which will be invoked for each image operation. Default is nil. Use this block to provide a custom image cache key for a specified URL. */ @property (nullable, nonatomic, copy) NSString *(^cacheKeyFilter)(NSURL *url); /** Returns the HTTP headers for a specified URL. @param url A specified URL. @return HTTP headers. */ - (nullable NSDictionary *)headersForURL:(NSURL *)url; /** Returns the cache key for a specified URL. @param url A specified URL @return Cache key used in YYImageCache. */ - (NSString *)cacheKeyForURL:(NSURL *)url; /** Increments the number of active network requests. If this number was zero before incrementing, this will start animating the status bar network activity indicator. This method is thread safe. This method has no effect in App Extension. */ + (void)incrementNetworkActivityCount; /** Decrements the number of active network requests. If this number becomes zero after decrementing, this will stop animating the status bar network activity indicator. This method is thread safe. This method has no effect in App Extension. */ + (void)decrementNetworkActivityCount; /** Get current number of active network requests. This method is thread safe. This method has no effect in App Extension. */ + (NSInteger)currentNetworkActivityCount; @end NS_ASSUME_NONNULL_END ================================================ FILE: MFPictureBrowserDemo/Pods/YYWebImage/YYWebImage/YYWebImageManager.m ================================================ // // YYWebImageManager.m // YYWebImage // // Created by ibireme on 15/2/19. // Copyright (c) 2015 ibireme. // // This source code is licensed under the MIT-style license found in the // LICENSE file in the root directory of this source tree. // #import "YYWebImageManager.h" #import "YYImageCache.h" #import "YYWebImageOperation.h" #import "YYImageCoder.h" #import #define kNetworkIndicatorDelay (1/30.0) /// Returns nil in App Extension. static UIApplication *_YYSharedApplication() { static BOOL isAppExtension = NO; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ Class cls = NSClassFromString(@"UIApplication"); if(!cls || ![cls respondsToSelector:@selector(sharedApplication)]) isAppExtension = YES; if ([[[NSBundle mainBundle] bundlePath] hasSuffix:@".appex"]) isAppExtension = YES; }); #pragma clang diagnostic push #pragma clang diagnostic ignored "-Wundeclared-selector" return isAppExtension ? nil : [UIApplication performSelector:@selector(sharedApplication)]; #pragma clang diagnostic pop } @interface _YYWebImageApplicationNetworkIndicatorInfo : NSObject @property (nonatomic, assign) NSInteger count; @property (nonatomic, strong) NSTimer *timer; @end @implementation _YYWebImageApplicationNetworkIndicatorInfo @end @implementation YYWebImageManager + (instancetype)sharedManager { static YYWebImageManager *manager; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ YYImageCache *cache = [YYImageCache sharedCache]; NSOperationQueue *queue = [NSOperationQueue new]; if ([queue respondsToSelector:@selector(setQualityOfService:)]) { queue.qualityOfService = NSQualityOfServiceBackground; } manager = [[self alloc] initWithCache:cache queue:queue]; }); return manager; } - (instancetype)init { @throw [NSException exceptionWithName:@"YYWebImageManager init error" reason:@"Use the designated initializer to init." userInfo:nil]; return [self initWithCache:nil queue:nil]; } - (instancetype)initWithCache:(YYImageCache *)cache queue:(NSOperationQueue *)queue{ self = [super init]; if (!self) return nil; _cache = cache; _queue = queue; _timeout = 15.0; if (YYImageWebPAvailable()) { _headers = @{ @"Accept" : @"image/webp,image/*;q=0.8" }; } else { _headers = @{ @"Accept" : @"image/*;q=0.8" }; } return self; } - (YYWebImageOperation *)requestImageWithURL:(NSURL *)url options:(YYWebImageOptions)options progress:(YYWebImageProgressBlock)progress transform:(YYWebImageTransformBlock)transform completion:(YYWebImageCompletionBlock)completion { NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:url]; request.timeoutInterval = _timeout; request.HTTPShouldHandleCookies = (options & YYWebImageOptionHandleCookies) != 0; request.allHTTPHeaderFields = [self headersForURL:url]; request.HTTPShouldUsePipelining = YES; request.cachePolicy = (options & YYWebImageOptionUseNSURLCache) ? NSURLRequestUseProtocolCachePolicy : NSURLRequestReloadIgnoringLocalCacheData; YYWebImageOperation *operation = [[YYWebImageOperation alloc] initWithRequest:request options:options cache:_cache cacheKey:[self cacheKeyForURL:url] progress:progress transform:transform ? transform : _sharedTransformBlock completion:completion]; if (_username && _password) { operation.credential = [NSURLCredential credentialWithUser:_username password:_password persistence:NSURLCredentialPersistenceForSession]; } if (operation) { NSOperationQueue *queue = _queue; if (queue) { [queue addOperation:operation]; } else { [operation start]; } } return operation; } - (NSDictionary *)headersForURL:(NSURL *)url { if (!url) return nil; return _headersFilter ? _headersFilter(url, _headers) : _headers; } - (NSString *)cacheKeyForURL:(NSURL *)url { if (!url) return nil; return _cacheKeyFilter ? _cacheKeyFilter(url) : url.absoluteString; } #pragma mark Network Indicator + (_YYWebImageApplicationNetworkIndicatorInfo *)_networkIndicatorInfo { return objc_getAssociatedObject(self, @selector(_networkIndicatorInfo)); } + (void)_setNetworkIndicatorInfo:(_YYWebImageApplicationNetworkIndicatorInfo *)info { objc_setAssociatedObject(self, @selector(_networkIndicatorInfo), info, OBJC_ASSOCIATION_RETAIN); } + (void)_delaySetActivity:(NSTimer *)timer { UIApplication *app = _YYSharedApplication(); if (!app) return; NSNumber *visiable = timer.userInfo; if (app.networkActivityIndicatorVisible != visiable.boolValue) { [app setNetworkActivityIndicatorVisible:visiable.boolValue]; } [timer invalidate]; } + (void)_changeNetworkActivityCount:(NSInteger)delta { if (!_YYSharedApplication()) return; void (^block)() = ^{ _YYWebImageApplicationNetworkIndicatorInfo *info = [self _networkIndicatorInfo]; if (!info) { info = [_YYWebImageApplicationNetworkIndicatorInfo new]; [self _setNetworkIndicatorInfo:info]; } NSInteger count = info.count; count += delta; info.count = count; [info.timer invalidate]; info.timer = [NSTimer timerWithTimeInterval:kNetworkIndicatorDelay target:self selector:@selector(_delaySetActivity:) userInfo:@(info.count > 0) repeats:NO]; [[NSRunLoop mainRunLoop] addTimer:info.timer forMode:NSRunLoopCommonModes]; }; if ([NSThread isMainThread]) { block(); } else { dispatch_async(dispatch_get_main_queue(), block); } } + (void)incrementNetworkActivityCount { [self _changeNetworkActivityCount:1]; } + (void)decrementNetworkActivityCount { [self _changeNetworkActivityCount:-1]; } + (NSInteger)currentNetworkActivityCount { _YYWebImageApplicationNetworkIndicatorInfo *info = [self _networkIndicatorInfo]; return info.count; } @end ================================================ FILE: MFPictureBrowserDemo/Pods/YYWebImage/YYWebImage/YYWebImageOperation.h ================================================ // // YYWebImageOperation.h // YYWebImage // // Created by ibireme on 15/2/15. // Copyright (c) 2015 ibireme. // // This source code is licensed under the MIT-style license found in the // LICENSE file in the root directory of this source tree. // #import #if __has_include() #import #import #else #import "YYImageCache.h" #import "YYWebImageManager.h" #endif NS_ASSUME_NONNULL_BEGIN /** The YYWebImageOperation class is an NSOperation subclass used to fetch image from URL request. @discussion It's an asynchronous operation. You typically execute it by adding it to an operation queue, or calls 'start' to execute it manually. When the operation is started, it will: 1. Get the image from the cache, if exist, return it with `completion` block. 2. Start an URL connection to fetch image from the request, invoke the `progress` to notify request progress (and invoke `completion` block to return the progressive image if enabled by progressive option). 3. Process the image by invoke the `transform` block. 4. Put the image to cache and return it with `completion` block. */ @interface YYWebImageOperation : NSOperation @property (nonatomic, strong, readonly) NSURLRequest *request; ///< The image URL request. @property (nullable, nonatomic, strong, readonly) NSURLResponse *response; ///< The response for request. @property (nullable, nonatomic, strong, readonly) YYImageCache *cache; ///< The image cache. @property (nonatomic, strong, readonly) NSString *cacheKey; ///< The image cache key. @property (nonatomic, readonly) YYWebImageOptions options; ///< The operation's option. /** Whether the URL connection should consult the credential storage for authenticating the connection. Default is YES. @discussion This is the value that is returned in the `NSURLConnectionDelegate` method `-connectionShouldUseCredentialStorage:`. */ @property (nonatomic) BOOL shouldUseCredentialStorage; /** The credential used for authentication challenges in `-connection:didReceiveAuthenticationChallenge:`. @discussion This will be overridden by any shared credentials that exist for the username or password of the request URL, if present. */ @property (nullable, nonatomic, strong) NSURLCredential *credential; /** Creates and returns a new operation. You should call `start` to execute this operation, or you can add the operation to an operation queue. @param request The Image request. This value should not be nil. @param options A mask to specify options to use for this operation. @param cache An image cache. Pass nil to avoid image cache. @param cacheKey An image cache key. Pass nil to avoid image cache. @param progress A block invoked in image fetch progress. The block will be invoked in background thread. Pass nil to avoid it. @param transform A block invoked before image fetch finished to do additional image process. The block will be invoked in background thread. Pass nil to avoid it. @param completion A block invoked when image fetch finished or cancelled. The block will be invoked in background thread. Pass nil to avoid it. @return The image request opeartion, or nil if an error occurs. */ - (instancetype)initWithRequest:(NSURLRequest *)request options:(YYWebImageOptions)options cache:(nullable YYImageCache *)cache cacheKey:(nullable NSString *)cacheKey progress:(nullable YYWebImageProgressBlock)progress transform:(nullable YYWebImageTransformBlock)transform completion:(nullable YYWebImageCompletionBlock)completion NS_DESIGNATED_INITIALIZER; - (instancetype)init UNAVAILABLE_ATTRIBUTE; + (instancetype)new UNAVAILABLE_ATTRIBUTE; @end NS_ASSUME_NONNULL_END ================================================ FILE: MFPictureBrowserDemo/Pods/YYWebImage/YYWebImage/YYWebImageOperation.m ================================================ // // YYWebImageOperation.m // YYWebImage // // Created by ibireme on 15/2/15. // Copyright (c) 2015 ibireme. // // This source code is licensed under the MIT-style license found in the // LICENSE file in the root directory of this source tree. // #import "YYWebImageOperation.h" #import "UIImage+YYWebImage.h" #import #import #if __has_include() #import #else #import "YYImage.h" #endif #define MIN_PROGRESSIVE_TIME_INTERVAL 0.2 #define MIN_PROGRESSIVE_BLUR_TIME_INTERVAL 0.4 /// Returns nil in App Extension. static UIApplication *_YYSharedApplication() { static BOOL isAppExtension = NO; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ Class cls = NSClassFromString(@"UIApplication"); if(!cls || ![cls respondsToSelector:@selector(sharedApplication)]) isAppExtension = YES; if ([[[NSBundle mainBundle] bundlePath] hasSuffix:@".appex"]) isAppExtension = YES; }); #pragma clang diagnostic push #pragma clang diagnostic ignored "-Wundeclared-selector" return isAppExtension ? nil : [UIApplication performSelector:@selector(sharedApplication)]; #pragma clang diagnostic pop } /// Returns YES if the right-bottom pixel is filled. static BOOL YYCGImageLastPixelFilled(CGImageRef image) { if (!image) return NO; size_t width = CGImageGetWidth(image); size_t height = CGImageGetHeight(image); if (width == 0 || height == 0) return NO; CGContextRef ctx = CGBitmapContextCreate(NULL, 1, 1, 8, 0, YYCGColorSpaceGetDeviceRGB(), kCGImageAlphaPremultipliedFirst | kCGBitmapByteOrderDefault); if (!ctx) return NO; CGContextDrawImage(ctx, CGRectMake( -(int)width + 1, 0, width, height), image); uint8_t *bytes = CGBitmapContextGetData(ctx); BOOL isAlpha = bytes && bytes[0] == 0; CFRelease(ctx); return !isAlpha; } /// Returns JPEG SOS (Start Of Scan) Marker static NSData *JPEGSOSMarker() { // "Start Of Scan" Marker static NSData *marker = nil; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ uint8_t bytes[2] = {0xFF, 0xDA}; marker = [NSData dataWithBytes:bytes length:2]; }); return marker; } static NSMutableSet *URLBlacklist; static dispatch_semaphore_t URLBlacklistLock; static void URLBlacklistInit() { static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ URLBlacklist = [NSMutableSet new]; URLBlacklistLock = dispatch_semaphore_create(1); }); } static BOOL URLBlackListContains(NSURL *url) { if (!url || url == (id)[NSNull null]) return NO; URLBlacklistInit(); dispatch_semaphore_wait(URLBlacklistLock, DISPATCH_TIME_FOREVER); BOOL contains = [URLBlacklist containsObject:url]; dispatch_semaphore_signal(URLBlacklistLock); return contains; } static void URLInBlackListAdd(NSURL *url) { if (!url || url == (id)[NSNull null]) return; URLBlacklistInit(); dispatch_semaphore_wait(URLBlacklistLock, DISPATCH_TIME_FOREVER); [URLBlacklist addObject:url]; dispatch_semaphore_signal(URLBlacklistLock); } /// A proxy used to hold a weak object. @interface _YYWebImageWeakProxy : NSProxy @property (nonatomic, weak, readonly) id target; - (instancetype)initWithTarget:(id)target; + (instancetype)proxyWithTarget:(id)target; @end @implementation _YYWebImageWeakProxy - (instancetype)initWithTarget:(id)target { _target = target; return self; } + (instancetype)proxyWithTarget:(id)target { return [[_YYWebImageWeakProxy alloc] initWithTarget:target]; } - (id)forwardingTargetForSelector:(SEL)selector { return _target; } - (void)forwardInvocation:(NSInvocation *)invocation { void *null = NULL; [invocation setReturnValue:&null]; } - (NSMethodSignature *)methodSignatureForSelector:(SEL)selector { return [NSObject instanceMethodSignatureForSelector:@selector(init)]; } - (BOOL)respondsToSelector:(SEL)aSelector { return [_target respondsToSelector:aSelector]; } - (BOOL)isEqual:(id)object { return [_target isEqual:object]; } - (NSUInteger)hash { return [_target hash]; } - (Class)superclass { return [_target superclass]; } - (Class)class { return [_target class]; } - (BOOL)isKindOfClass:(Class)aClass { return [_target isKindOfClass:aClass]; } - (BOOL)isMemberOfClass:(Class)aClass { return [_target isMemberOfClass:aClass]; } - (BOOL)conformsToProtocol:(Protocol *)aProtocol { return [_target conformsToProtocol:aProtocol]; } - (BOOL)isProxy { return YES; } - (NSString *)description { return [_target description]; } - (NSString *)debugDescription { return [_target debugDescription]; } @end @interface YYWebImageOperation() @property (readwrite, getter=isExecuting) BOOL executing; @property (readwrite, getter=isFinished) BOOL finished; @property (readwrite, getter=isCancelled) BOOL cancelled; @property (readwrite, getter=isStarted) BOOL started; @property (nonatomic, strong) NSRecursiveLock *lock; @property (nonatomic, strong) NSURLConnection *connection; @property (nonatomic, strong) NSMutableData *data; @property (nonatomic, assign) NSInteger expectedSize; @property (nonatomic, assign) UIBackgroundTaskIdentifier taskID; @property (nonatomic, assign) NSTimeInterval lastProgressiveDecodeTimestamp; @property (nonatomic, strong) YYImageDecoder *progressiveDecoder; @property (nonatomic, assign) BOOL progressiveIgnored; @property (nonatomic, assign) BOOL progressiveDetected; @property (nonatomic, assign) NSUInteger progressiveScanedLength; @property (nonatomic, assign) NSUInteger progressiveDisplayCount; @property (nonatomic, copy) YYWebImageProgressBlock progress; @property (nonatomic, copy) YYWebImageTransformBlock transform; @property (nonatomic, copy) YYWebImageCompletionBlock completion; @end @implementation YYWebImageOperation @synthesize executing = _executing; @synthesize finished = _finished; @synthesize cancelled = _cancelled; /// Network thread entry point. + (void)_networkThreadMain:(id)object { @autoreleasepool { [[NSThread currentThread] setName:@"com.ibireme.webimage.request"]; NSRunLoop *runLoop = [NSRunLoop currentRunLoop]; [runLoop addPort:[NSMachPort port] forMode:NSDefaultRunLoopMode]; [runLoop run]; } } /// Global image request network thread, used by NSURLConnection delegate. + (NSThread *)_networkThread { static NSThread *thread = nil; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ thread = [[NSThread alloc] initWithTarget:self selector:@selector(_networkThreadMain:) object:nil]; if ([thread respondsToSelector:@selector(setQualityOfService:)]) { thread.qualityOfService = NSQualityOfServiceBackground; } [thread start]; }); return thread; } /// Global image queue, used for image reading and decoding. + (dispatch_queue_t)_imageQueue { #define MAX_QUEUE_COUNT 16 static int queueCount; static dispatch_queue_t queues[MAX_QUEUE_COUNT]; static dispatch_once_t onceToken; static int32_t counter = 0; dispatch_once(&onceToken, ^{ queueCount = (int)[NSProcessInfo processInfo].activeProcessorCount; queueCount = queueCount < 1 ? 1 : queueCount > MAX_QUEUE_COUNT ? MAX_QUEUE_COUNT : queueCount; if ([UIDevice currentDevice].systemVersion.floatValue >= 8.0) { for (NSUInteger i = 0; i < queueCount; i++) { dispatch_queue_attr_t attr = dispatch_queue_attr_make_with_qos_class(DISPATCH_QUEUE_SERIAL, QOS_CLASS_UTILITY, 0); queues[i] = dispatch_queue_create("com.ibireme.image.decode", attr); } } else { for (NSUInteger i = 0; i < queueCount; i++) { queues[i] = dispatch_queue_create("com.ibireme.image.decode", DISPATCH_QUEUE_SERIAL); dispatch_set_target_queue(queues[i], dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_LOW, 0)); } } }); int32_t cur = OSAtomicIncrement32(&counter); if (cur < 0) cur = -cur; return queues[(cur) % queueCount]; #undef MAX_QUEUE_COUNT } - (instancetype)init { @throw [NSException exceptionWithName:@"YYWebImageOperation init error" reason:@"YYWebImageOperation must be initialized with a request. Use the designated initializer to init." userInfo:nil]; return [self initWithRequest:[NSURLRequest requestWithURL:[NSURL URLWithString:@""]] options:0 cache:nil cacheKey:nil progress:nil transform:nil completion:nil]; } - (instancetype)initWithRequest:(NSURLRequest *)request options:(YYWebImageOptions)options cache:(YYImageCache *)cache cacheKey:(NSString *)cacheKey progress:(YYWebImageProgressBlock)progress transform:(YYWebImageTransformBlock)transform completion:(YYWebImageCompletionBlock)completion { self = [super init]; if (!self) return nil; if (!request) return nil; _request = request; _options = options; _cache = cache; _cacheKey = cacheKey ? cacheKey : request.URL.absoluteString; _shouldUseCredentialStorage = YES; _progress = progress; _transform = transform; _completion = completion; _executing = NO; _finished = NO; _cancelled = NO; _taskID = UIBackgroundTaskInvalid; _lock = [NSRecursiveLock new]; return self; } - (void)dealloc { [_lock lock]; if (_taskID != UIBackgroundTaskInvalid) { [_YYSharedApplication() endBackgroundTask:_taskID]; _taskID = UIBackgroundTaskInvalid; } if ([self isExecuting]) { self.cancelled = YES; self.finished = YES; if (_connection) { [_connection cancel]; if (![_request.URL isFileURL] && (_options & YYWebImageOptionShowNetworkActivity)) { [YYWebImageManager decrementNetworkActivityCount]; } } if (_completion) { @autoreleasepool { _completion(nil, _request.URL, YYWebImageFromNone, YYWebImageStageCancelled, nil); } } } [_lock unlock]; } - (void)_endBackgroundTask { [_lock lock]; if (_taskID != UIBackgroundTaskInvalid) { [_YYSharedApplication() endBackgroundTask:_taskID]; _taskID = UIBackgroundTaskInvalid; } [_lock unlock]; } #pragma mark - Runs in operation thread - (void)_finish { self.executing = NO; self.finished = YES; [self _endBackgroundTask]; } // runs on network thread - (void)_startOperation { if ([self isCancelled]) return; @autoreleasepool { // get image from cache if (_cache && !(_options & YYWebImageOptionUseNSURLCache) && !(_options & YYWebImageOptionRefreshImageCache)) { UIImage *image = [_cache getImageForKey:_cacheKey withType:YYImageCacheTypeMemory]; if (image) { [_lock lock]; if (![self isCancelled]) { if (_completion) _completion(image, _request.URL, YYWebImageFromMemoryCache, YYWebImageStageFinished, nil); } [self _finish]; [_lock unlock]; return; } if (!(_options & YYWebImageOptionIgnoreDiskCache)) { __weak typeof(self) _self = self; dispatch_async([self.class _imageQueue], ^{ __strong typeof(_self) self = _self; if (!self || [self isCancelled]) return; UIImage *image = [self.cache getImageForKey:self.cacheKey withType:YYImageCacheTypeDisk]; if (image) { [self.cache setImage:image imageData:nil forKey:self.cacheKey withType:YYImageCacheTypeMemory]; [self performSelector:@selector(_didReceiveImageFromDiskCache:) onThread:[self.class _networkThread] withObject:image waitUntilDone:NO]; } else { [self performSelector:@selector(_startRequest:) onThread:[self.class _networkThread] withObject:nil waitUntilDone:NO]; } }); return; } } } [self performSelector:@selector(_startRequest:) onThread:[self.class _networkThread] withObject:nil waitUntilDone:NO]; } // runs on network thread - (void)_startRequest:(id)object { if ([self isCancelled]) return; @autoreleasepool { if ((_options & YYWebImageOptionIgnoreFailedURL) && URLBlackListContains(_request.URL)) { NSError *error = [NSError errorWithDomain:NSURLErrorDomain code:NSURLErrorFileDoesNotExist userInfo:@{ NSLocalizedDescriptionKey : @"Failed to load URL, blacklisted." }]; [_lock lock]; if (![self isCancelled]) { if (_completion) _completion(nil, _request.URL, YYWebImageFromNone, YYWebImageStageFinished, error); } [self _finish]; [_lock unlock]; return; } if (_request.URL.isFileURL) { NSArray *keys = @[NSURLFileSizeKey]; NSDictionary *attr = [_request.URL resourceValuesForKeys:keys error:nil]; NSNumber *fileSize = attr[NSURLFileSizeKey]; _expectedSize = fileSize ? fileSize.unsignedIntegerValue : -1; } // request image from web [_lock lock]; if (![self isCancelled]) { _connection = [[NSURLConnection alloc] initWithRequest:_request delegate:[_YYWebImageWeakProxy proxyWithTarget:self]]; if (![_request.URL isFileURL] && (_options & YYWebImageOptionShowNetworkActivity)) { [YYWebImageManager incrementNetworkActivityCount]; } } [_lock unlock]; } } // runs on network thread, called from outer "cancel" - (void)_cancelOperation { @autoreleasepool { if (_connection) { if (![_request.URL isFileURL] && (_options & YYWebImageOptionShowNetworkActivity)) { [YYWebImageManager decrementNetworkActivityCount]; } } [_connection cancel]; _connection = nil; if (_completion) _completion(nil, _request.URL, YYWebImageFromNone, YYWebImageStageCancelled, nil); [self _endBackgroundTask]; } } // runs on network thread - (void)_didReceiveImageFromDiskCache:(UIImage *)image { @autoreleasepool { [_lock lock]; if (![self isCancelled]) { if (image) { if (_completion) _completion(image, _request.URL, YYWebImageFromDiskCache, YYWebImageStageFinished, nil); [self _finish]; } else { [self _startRequest:nil]; } } [_lock unlock]; } } - (void)_didReceiveImageFromWeb:(UIImage *)image { @autoreleasepool { [_lock lock]; if (![self isCancelled]) { if (_cache) { if (image || (_options & YYWebImageOptionRefreshImageCache)) { NSData *data = _data; dispatch_async([YYWebImageOperation _imageQueue], ^{ [_cache setImage:image imageData:data forKey:_cacheKey withType:YYImageCacheTypeAll]; }); } } _data = nil; NSError *error = nil; if (!image) { error = [NSError errorWithDomain:@"com.ibireme.image" code:-1 userInfo:@{ NSLocalizedDescriptionKey : @"Web image decode fail." }]; if (_options & YYWebImageOptionIgnoreFailedURL) { if (URLBlackListContains(_request.URL)) { error = [NSError errorWithDomain:NSURLErrorDomain code:NSURLErrorFileDoesNotExist userInfo:@{ NSLocalizedDescriptionKey : @"Failed to load URL, blacklisted." }]; } else { URLInBlackListAdd(_request.URL); } } } if (_completion) _completion(image, _request.URL, YYWebImageFromRemote, YYWebImageStageFinished, error); [self _finish]; } [_lock unlock]; } } #pragma mark - NSURLConnectionDelegate runs in operation thread - (BOOL)connectionShouldUseCredentialStorage:(NSURLConnection *)connection { return _shouldUseCredentialStorage; } - (void)connection:(NSURLConnection *)connection willSendRequestForAuthenticationChallenge:(NSURLAuthenticationChallenge *)challenge { @autoreleasepool { if ([challenge.protectionSpace.authenticationMethod isEqualToString:NSURLAuthenticationMethodServerTrust]) { if (!(_options & YYWebImageOptionAllowInvalidSSLCertificates) && [challenge.sender respondsToSelector:@selector(performDefaultHandlingForAuthenticationChallenge:)]) { [challenge.sender performDefaultHandlingForAuthenticationChallenge:challenge]; } else { NSURLCredential *credential = [NSURLCredential credentialForTrust:challenge.protectionSpace.serverTrust]; [[challenge sender] useCredential:credential forAuthenticationChallenge:challenge]; } } else { if ([challenge previousFailureCount] == 0) { if (_credential) { [[challenge sender] useCredential:_credential forAuthenticationChallenge:challenge]; } else { [[challenge sender] continueWithoutCredentialForAuthenticationChallenge:challenge]; } } else { [[challenge sender] continueWithoutCredentialForAuthenticationChallenge:challenge]; } } } } - (NSCachedURLResponse *)connection:(NSURLConnection *)connection willCacheResponse:(NSCachedURLResponse *)cachedResponse { if (!cachedResponse) return cachedResponse; if (_options & YYWebImageOptionUseNSURLCache) { return cachedResponse; } else { // ignore NSURLCache return nil; } } - (void)connection:(NSURLConnection *)connection didReceiveResponse:(NSURLResponse *)response { @autoreleasepool { NSError *error = nil; if ([response isKindOfClass:[NSHTTPURLResponse class]]) { NSHTTPURLResponse *httpResponse = (id) response; NSInteger statusCode = httpResponse.statusCode; if (statusCode >= 400 || statusCode == 304) { error = [NSError errorWithDomain:NSURLErrorDomain code:statusCode userInfo:nil]; } } if (error) { [_connection cancel]; [self connection:_connection didFailWithError:error]; } else { if (response.expectedContentLength) { _expectedSize = (NSInteger)response.expectedContentLength; if (_expectedSize < 0) _expectedSize = -1; } _data = [NSMutableData dataWithCapacity:_expectedSize > 0 ? _expectedSize : 0]; if (_progress) { [_lock lock]; if (![self isCancelled]) _progress(0, _expectedSize); [_lock unlock]; } } } } - (void)connection:(NSURLConnection *)connection didReceiveData:(NSData *)data { @autoreleasepool { [_lock lock]; BOOL canceled = [self isCancelled]; [_lock unlock]; if (canceled) return; if (data) [_data appendData:data]; if (_progress) { [_lock lock]; if (![self isCancelled]) { _progress(_data.length, _expectedSize); } [_lock unlock]; } /*--------------------------- progressive ----------------------------*/ BOOL progressive = (_options & YYWebImageOptionProgressive) > 0; BOOL progressiveBlur = (_options & YYWebImageOptionProgressiveBlur) > 0; if (!_completion || !(progressive || progressiveBlur)) return; if (data.length <= 16) return; if (_expectedSize > 0 && data.length >= _expectedSize * 0.99) return; if (_progressiveIgnored) return; NSTimeInterval min = progressiveBlur ? MIN_PROGRESSIVE_BLUR_TIME_INTERVAL : MIN_PROGRESSIVE_TIME_INTERVAL; NSTimeInterval now = CACurrentMediaTime(); if (now - _lastProgressiveDecodeTimestamp < min) return; if (!_progressiveDecoder) { _progressiveDecoder = [[YYImageDecoder alloc] initWithScale:[UIScreen mainScreen].scale]; } [_progressiveDecoder updateData:_data final:NO]; if ([self isCancelled]) return; if (_progressiveDecoder.type == YYImageTypeUnknown || _progressiveDecoder.type == YYImageTypeWebP || _progressiveDecoder.type == YYImageTypeOther) { _progressiveDecoder = nil; _progressiveIgnored = YES; return; } if (progressiveBlur) { // only support progressive JPEG and interlaced PNG if (_progressiveDecoder.type != YYImageTypeJPEG && _progressiveDecoder.type != YYImageTypePNG) { _progressiveDecoder = nil; _progressiveIgnored = YES; return; } } if (_progressiveDecoder.frameCount == 0) return; if (!progressiveBlur) { YYImageFrame *frame = [_progressiveDecoder frameAtIndex:0 decodeForDisplay:YES]; if (frame.image) { [_lock lock]; if (![self isCancelled]) { _completion(frame.image, _request.URL, YYWebImageFromRemote, YYWebImageStageProgress, nil); _lastProgressiveDecodeTimestamp = now; } [_lock unlock]; } return; } else { if (_progressiveDecoder.type == YYImageTypeJPEG) { if (!_progressiveDetected) { NSDictionary *dic = [_progressiveDecoder framePropertiesAtIndex:0]; NSDictionary *jpeg = dic[(id)kCGImagePropertyJFIFDictionary]; NSNumber *isProg = jpeg[(id)kCGImagePropertyJFIFIsProgressive]; if (!isProg.boolValue) { _progressiveIgnored = YES; _progressiveDecoder = nil; return; } _progressiveDetected = YES; } NSInteger scanLength = (NSInteger)_data.length - (NSInteger)_progressiveScanedLength - 4; if (scanLength <= 2) return; NSRange scanRange = NSMakeRange(_progressiveScanedLength, scanLength); NSRange markerRange = [_data rangeOfData:JPEGSOSMarker() options:kNilOptions range:scanRange]; _progressiveScanedLength = _data.length; if (markerRange.location == NSNotFound) return; if ([self isCancelled]) return; } else if (_progressiveDecoder.type == YYImageTypePNG) { if (!_progressiveDetected) { NSDictionary *dic = [_progressiveDecoder framePropertiesAtIndex:0]; NSDictionary *png = dic[(id)kCGImagePropertyPNGDictionary]; NSNumber *isProg = png[(id)kCGImagePropertyPNGInterlaceType]; if (!isProg.boolValue) { _progressiveIgnored = YES; _progressiveDecoder = nil; return; } _progressiveDetected = YES; } } YYImageFrame *frame = [_progressiveDecoder frameAtIndex:0 decodeForDisplay:YES]; UIImage *image = frame.image; if (!image) return; if ([self isCancelled]) return; if (!YYCGImageLastPixelFilled(image.CGImage)) return; _progressiveDisplayCount++; CGFloat radius = 32; if (_expectedSize > 0) { radius *= 1.0 / (3 * _data.length / (CGFloat)_expectedSize + 0.6) - 0.25; } else { radius /= (_progressiveDisplayCount); } image = [image yy_imageByBlurRadius:radius tintColor:nil tintMode:0 saturation:1 maskImage:nil]; if (image) { [_lock lock]; if (![self isCancelled]) { _completion(image, _request.URL, YYWebImageFromRemote, YYWebImageStageProgress, nil); _lastProgressiveDecodeTimestamp = now; } [_lock unlock]; } } } } - (void)connectionDidFinishLoading:(NSURLConnection *)connection { @autoreleasepool { [_lock lock]; _connection = nil; if (![self isCancelled]) { __weak typeof(self) _self = self; dispatch_async([self.class _imageQueue], ^{ __strong typeof(_self) self = _self; if (!self) return; BOOL shouldDecode = (self.options & YYWebImageOptionIgnoreImageDecoding) == 0; BOOL allowAnimation = (self.options & YYWebImageOptionIgnoreAnimatedImage) == 0; UIImage *image; BOOL hasAnimation = NO; if (allowAnimation) { image = [[YYImage alloc] initWithData:self.data scale:[UIScreen mainScreen].scale]; if (shouldDecode) image = [image yy_imageByDecoded]; if ([((YYImage *)image) animatedImageFrameCount] > 1) { hasAnimation = YES; } } else { YYImageDecoder *decoder = [YYImageDecoder decoderWithData:self.data scale:[UIScreen mainScreen].scale]; image = [decoder frameAtIndex:0 decodeForDisplay:shouldDecode].image; } /* If the image has animation, save the original image data to disk cache. If the image is not PNG or JPEG, re-encode the image to PNG or JPEG for better decoding performance. */ YYImageType imageType = YYImageDetectType((__bridge CFDataRef)self.data); switch (imageType) { case YYImageTypeJPEG: case YYImageTypeGIF: case YYImageTypePNG: case YYImageTypeWebP: { // save to disk cache if (!hasAnimation) { if (imageType == YYImageTypeGIF || imageType == YYImageTypeWebP) { self.data = nil; // clear the data, re-encode for disk cache } } } break; default: { self.data = nil; // clear the data, re-encode for disk cache } break; } if ([self isCancelled]) return; if (self.transform && image) { UIImage *newImage = self.transform(image, self.request.URL); if (newImage != image) { self.data = nil; } image = newImage; if ([self isCancelled]) return; } [self performSelector:@selector(_didReceiveImageFromWeb:) onThread:[self.class _networkThread] withObject:image waitUntilDone:NO]; }); if (![self.request.URL isFileURL] && (self.options & YYWebImageOptionShowNetworkActivity)) { [YYWebImageManager decrementNetworkActivityCount]; } } [_lock unlock]; } } - (void)connection:(NSURLConnection *)connection didFailWithError:(NSError *)error { @autoreleasepool { [_lock lock]; if (![self isCancelled]) { if (_completion) { _completion(nil, _request.URL, YYWebImageFromNone, YYWebImageStageFinished, error); } _connection = nil; _data = nil; if (![_request.URL isFileURL] && (_options & YYWebImageOptionShowNetworkActivity)) { [YYWebImageManager decrementNetworkActivityCount]; } [self _finish]; if (_options & YYWebImageOptionIgnoreFailedURL) { if (error.code != NSURLErrorNotConnectedToInternet && error.code != NSURLErrorCancelled && error.code != NSURLErrorTimedOut && error.code != NSURLErrorUserCancelledAuthentication) { URLInBlackListAdd(_request.URL); } } } [_lock unlock]; } } #pragma mark - Override NSOperation - (void)start { @autoreleasepool { [_lock lock]; self.started = YES; if ([self isCancelled]) { [self performSelector:@selector(_cancelOperation) onThread:[[self class] _networkThread] withObject:nil waitUntilDone:NO modes:@[NSDefaultRunLoopMode]]; self.finished = YES; } else if ([self isReady] && ![self isFinished] && ![self isExecuting]) { if (!_request) { self.finished = YES; if (_completion) { NSError *error = [NSError errorWithDomain:NSURLErrorDomain code:NSURLErrorFileDoesNotExist userInfo:@{NSLocalizedDescriptionKey:@"request in nil"}]; _completion(nil, _request.URL, YYWebImageFromNone, YYWebImageStageFinished, error); } } else { self.executing = YES; [self performSelector:@selector(_startOperation) onThread:[[self class] _networkThread] withObject:nil waitUntilDone:NO modes:@[NSDefaultRunLoopMode]]; if ((_options & YYWebImageOptionAllowBackgroundTask) && _YYSharedApplication()) { __weak __typeof__ (self) _self = self; if (_taskID == UIBackgroundTaskInvalid) { _taskID = [_YYSharedApplication() beginBackgroundTaskWithExpirationHandler:^{ __strong __typeof (_self) self = _self; if (self) { [self cancel]; self.finished = YES; } }]; } } } } [_lock unlock]; } } - (void)cancel { [_lock lock]; if (![self isCancelled]) { [super cancel]; self.cancelled = YES; if ([self isExecuting]) { self.executing = NO; [self performSelector:@selector(_cancelOperation) onThread:[[self class] _networkThread] withObject:nil waitUntilDone:NO modes:@[NSDefaultRunLoopMode]]; } if (self.started) { self.finished = YES; } } [_lock unlock]; } - (void)setExecuting:(BOOL)executing { [_lock lock]; if (_executing != executing) { [self willChangeValueForKey:@"isExecuting"]; _executing = executing; [self didChangeValueForKey:@"isExecuting"]; } [_lock unlock]; } - (BOOL)isExecuting { [_lock lock]; BOOL executing = _executing; [_lock unlock]; return executing; } - (void)setFinished:(BOOL)finished { [_lock lock]; if (_finished != finished) { [self willChangeValueForKey:@"isFinished"]; _finished = finished; [self didChangeValueForKey:@"isFinished"]; } [_lock unlock]; } - (BOOL)isFinished { [_lock lock]; BOOL finished = _finished; [_lock unlock]; return finished; } - (void)setCancelled:(BOOL)cancelled { [_lock lock]; if (_cancelled != cancelled) { [self willChangeValueForKey:@"isCancelled"]; _cancelled = cancelled; [self didChangeValueForKey:@"isCancelled"]; } [_lock unlock]; } - (BOOL)isCancelled { [_lock lock]; BOOL cancelled = _cancelled; [_lock unlock]; return cancelled; } - (BOOL)isConcurrent { return YES; } - (BOOL)isAsynchronous { return YES; } + (BOOL)automaticallyNotifiesObserversForKey:(NSString *)key { if ([key isEqualToString:@"isExecuting"] || [key isEqualToString:@"isFinished"] || [key isEqualToString:@"isCancelled"]) { return NO; } return [super automaticallyNotifiesObserversForKey:key]; } - (NSString *)description { NSMutableString *string = [NSMutableString stringWithFormat:@"<%@: %p ",self.class, self]; [string appendFormat:@" executing:%@", [self isExecuting] ? @"YES" : @"NO"]; [string appendFormat:@" finished:%@", [self isFinished] ? @"YES" : @"NO"]; [string appendFormat:@" cancelled:%@", [self isCancelled] ? @"YES" : @"NO"]; [string appendString:@">"]; return string; } @end ================================================ FILE: README.md ================================================ # [废弃]MFPictureBrowser 类似于<即刻>应用的图片浏览器效果(支持PNG, JPG, GIF, WebP). ![DUB](https://img.shields.io/dub/l/vibe-d.svg) ![Total-downloads](https://img.shields.io/cocoapods/dt/MFPictureBrowser.svg) ![Version](https://img.shields.io/cocoapods/v/MFPictureBrowser.svg?style=flat) ![Platform](https://img.shields.io/cocoapods/p/MFPictureBrowser.svg?style=flat) ![Language](https://img.shields.io/badge/language-objectivec-blue.svg) ## 效果图 ## 集成方式 - cocoapod ``` pod 'MFPictureBrowser' ``` ## 使用方式 - 导入 ```objc #import ``` - 初始化并设置代理 ```objc MFPictureBrowser *brower = [[MFPictureBrowser alloc] init]; brower.delegate = self; ``` - 展示 ```objc //展示图片 [brower showImageFromView:(fromView) picturesCount:(picturesCount) currentPictureIndex:(currentPictureIndex)]; ``` 实现代理方法 ```objc - (FLAnimatedImageView *)pictureView:(MFPictureBrowser *)pictureBrowser imageViewAtIndex:(NSInteger)index { ... } - (id)pictureBrowser:(MFPictureBrowser *)pictureBrowser pictureModelAtIndex:(NSInteger)index { ... } ``` 具体使用方式参见 Demo ## 感谢 [ESPictureBrowser](https://github.com/EnjoySR/ESPictureBrowser) [YYWebImage](https://github.com/ibireme/YYWebImage)