[
  {
    "path": "BlackCamera/SCBlackCameraDetector.h",
    "content": "//\n//  SCBlackCameraDetector.h\n//  Snapchat\n//\n//  Created by Derek Wang on 24/01/2018.\n//\n\n#import \"SCBlackCameraReporter.h\"\n\n#import <Foundation/Foundation.h>\n#import <UIKit/UIKit.h>\n\n@class SCBlackCameraNoOutputDetector;\n\n@interface SCBlackCameraDetector : NSObject\n\n@property (nonatomic, strong) SCBlackCameraNoOutputDetector *blackCameraNoOutputDetector;\n\nSC_INIT_AND_NEW_UNAVAILABLE\n- (instancetype)initWithTicketCreator:(id<SCManiphestTicketCreator>)ticketCreator;\n\n// CameraView visible/invisible\n- (void)onCameraViewVisible:(BOOL)visible;\n\n- (void)onCameraViewVisibleWithTouch:(UIGestureRecognizer *)touch;\n\n// Call this when [AVCaptureSession startRunning] is called\n- (void)sessionWillCallStartRunning;\n- (void)sessionDidCallStartRunning;\n\n// Call this when [AVCaptureSession stopRunning] is called\n- (void)sessionWillCallStopRunning;\n- (void)sessionDidCallStopRunning;\n\n// Call this when [AVCaptureSession commitConfiguration] is called\n- (void)sessionWillCommitConfiguration;\n- (void)sessionDidCommitConfiguration;\n\n- (void)sessionDidChangeIsRunning:(BOOL)running;\n\n// For CapturePreview visibility detector\n- (void)capturePreviewDidBecomeVisible:(BOOL)visible;\n\n/**\n Mark the start of creating new session\n When we fix black camera by creating new session, some detector may report black camera because we called\n [AVCaptureSession stopRunning] on old AVCaptureSession, so we need to tell the detector the session is recreating, so\n it is fine to call [AVCaptureSession stopRunning] on old AVCaptureSession.\n */\n- (void)sessionWillRecreate;\n/**\n Mark the end of creating new session\n */\n- (void)sessionDidRecreate;\n\n@end\n"
  },
  {
    "path": "BlackCamera/SCBlackCameraDetector.m",
    "content": "//\n//  SCBlackCameraDetector.m\n//  Snapchat\n//\n//  Created by Derek Wang on 24/01/2018.\n//\n\n#import \"SCBlackCameraDetector.h\"\n\n#import \"SCBlackCameraNoOutputDetector.h\"\n#import \"SCBlackCameraPreviewDetector.h\"\n#import \"SCBlackCameraRunningDetector.h\"\n#import \"SCBlackCameraSessionBlockDetector.h\"\n#import \"SCBlackCameraViewDetector.h\"\n\n#import <SCFoundation/SCQueuePerformer.h>\n\n#if !TARGET_IPHONE_SIMULATOR\nstatic char *const kSCBlackCameraDetectorQueueLabel = \"com.snapchat.black-camera-detector\";\n#endif\n@interface SCBlackCameraDetector () {\n    BOOL _sessionIsRunning;\n    BOOL _cameraIsVisible;\n    BOOL _previewIsVisible;\n}\n@property (nonatomic, strong) SCQueuePerformer *queuePerformer;\n@property (nonatomic, strong) SCBlackCameraViewDetector *cameraViewDetector;\n@property (nonatomic, strong) SCBlackCameraRunningDetector *sessionRunningDetector;\n@property (nonatomic, strong) SCBlackCameraPreviewDetector *previewDetector;\n@property (nonatomic, strong) SCBlackCameraSessionBlockDetector *sessionBlockDetector;\n\n@end\n\n@implementation SCBlackCameraDetector\n\n- (instancetype)initWithTicketCreator:(id<SCManiphestTicketCreator>)ticketCreator\n{\n#if !TARGET_IPHONE_SIMULATOR\n\n    self = [super init];\n    if (self) {\n        _queuePerformer = [[SCQueuePerformer alloc] initWithLabel:kSCBlackCameraDetectorQueueLabel\n                                                 qualityOfService:QOS_CLASS_BACKGROUND\n                                                        queueType:DISPATCH_QUEUE_SERIAL\n                                                          context:SCQueuePerformerContextCamera];\n\n        SCBlackCameraReporter *reporter = [[SCBlackCameraReporter alloc] initWithTicketCreator:ticketCreator];\n        _cameraViewDetector = [[SCBlackCameraViewDetector alloc] initWithPerformer:_queuePerformer reporter:reporter];\n        _sessionRunningDetector =\n            [[SCBlackCameraRunningDetector alloc] initWithPerformer:_queuePerformer reporter:reporter];\n        _previewDetector = [[SCBlackCameraPreviewDetector alloc] initWithPerformer:_queuePerformer reporter:reporter];\n        _sessionBlockDetector = [[SCBlackCameraSessionBlockDetector alloc] initWithReporter:reporter];\n        _blackCameraNoOutputDetector = [[SCBlackCameraNoOutputDetector alloc] initWithReporter:reporter];\n    }\n    return self;\n#else\n    return nil;\n#endif\n}\n\n#pragma mark - Camera view visibility detector\n- (void)onCameraViewVisible:(BOOL)visible\n{\n    SC_GUARD_ELSE_RETURN(visible != _cameraIsVisible);\n    _cameraIsVisible = visible;\n    [_cameraViewDetector onCameraViewVisible:visible];\n}\n\n- (void)onCameraViewVisibleWithTouch:(UIGestureRecognizer *)gesture\n{\n    [_cameraViewDetector onCameraViewVisibleWithTouch:gesture];\n}\n\n#pragma mark - Track [AVCaptureSession startRunning] call\n- (void)sessionWillCallStartRunning\n{\n    [_cameraViewDetector sessionWillCallStartRunning];\n    [_sessionBlockDetector sessionWillCallStartRunning];\n}\n\n- (void)sessionDidCallStartRunning\n{\n    [_sessionRunningDetector sessionDidCallStartRunning];\n    [_sessionBlockDetector sessionDidCallStartRunning];\n}\n\n#pragma mark - Track [AVCaptureSession stopRunning] call\n- (void)sessionWillCallStopRunning\n{\n    [_cameraViewDetector sessionWillCallStopRunning];\n    [_sessionRunningDetector sessionWillCallStopRunning];\n}\n\n- (void)sessionDidCallStopRunning\n{\n}\n\n- (void)sessionDidChangeIsRunning:(BOOL)running\n{\n    SC_GUARD_ELSE_RETURN(running != _sessionIsRunning);\n    _sessionIsRunning = running;\n    [_sessionRunningDetector sessionDidChangeIsRunning:running];\n    [_previewDetector sessionDidChangeIsRunning:running];\n}\n\n#pragma mark - Capture preview visibility detector\n- (void)capturePreviewDidBecomeVisible:(BOOL)visible\n{\n    SC_GUARD_ELSE_RETURN(visible != _previewIsVisible);\n    _previewIsVisible = visible;\n    [_previewDetector capturePreviewDidBecomeVisible:visible];\n}\n\n#pragma mark - AVCaptureSession block detector\n- (void)sessionWillCommitConfiguration\n{\n    [_sessionBlockDetector sessionWillCommitConfiguration];\n}\n\n- (void)sessionDidCommitConfiguration\n{\n    [_sessionBlockDetector sessionDidCommitConfiguration];\n}\n\n- (void)sessionWillRecreate\n{\n    [_cameraViewDetector sessionWillRecreate];\n}\n\n- (void)sessionDidRecreate\n{\n    [_cameraViewDetector sessionDidRecreate];\n}\n@end\n"
  },
  {
    "path": "BlackCamera/SCBlackCameraNoOutputDetector.h",
    "content": "//\n//  SCBlackCameraNoOutputDetector.h\n//  Snapchat\n//\n//  Created by Derek Wang on 05/12/2017.\n//\n\n#import \"SCManagedCapturerListener.h\"\n\n#import <SCCameraFoundation/SCManagedVideoDataSourceListener.h>\n\n#import <Foundation/Foundation.h>\n\n@class SCBlackCameraNoOutputDetector, SCBlackCameraReporter;\n@protocol SCManiphestTicketCreator;\n\n@protocol SCBlackCameraDetectorDelegate\n- (void)detector:(SCBlackCameraNoOutputDetector *)detector didDetectBlackCamera:(id<SCCapturer>)capture;\n@end\n\n@interface SCBlackCameraNoOutputDetector : NSObject <SCManagedVideoDataSourceListener, SCManagedCapturerListener>\n\n@property (nonatomic, weak) id<SCBlackCameraDetectorDelegate> delegate;\n- (instancetype)initWithReporter:(SCBlackCameraReporter *)reporter;\n\n@end\n"
  },
  {
    "path": "BlackCamera/SCBlackCameraNoOutputDetector.m",
    "content": "//\n//  SCBlackCameraDetectorNoOutput.m\n//  Snapchat\n//\n//  Created by Derek Wang on 05/12/2017.\n//\n//  This detector is used to detect the case that session is running, but there is no sample buffer output\n\n#import \"SCBlackCameraNoOutputDetector.h\"\n\n#import \"SCBlackCameraReporter.h\"\n\n#import <SCFoundation/SCAssertWrapper.h>\n#import <SCFoundation/SCLog.h>\n#import <SCFoundation/SCQueuePerformer.h>\n#import <SCFoundation/SCTraceODPCompatible.h>\n#import <SCFoundation/SCZeroDependencyExperiments.h>\n#import <SCLogger/SCCameraMetrics.h>\n#import <SCLogger/SCLogger.h>\n\nstatic CGFloat const kShortCheckingDelay = 0.5f;\nstatic CGFloat const kLongCheckingDelay = 3.0f;\nstatic char *const kSCBlackCameraDetectorQueueLabel = \"com.snapchat.black-camera-detector\";\n\n@interface SCBlackCameraNoOutputDetector () {\n    BOOL _sampleBufferReceived;\n    BOOL _blackCameraDetected;\n    // Whether we receive first frame after we detected black camera, that's maybe because the checking delay is too\n    // short, and we will switch to kLongCheckingDelay next time we do the checking\n    BOOL _blackCameraRecovered;\n    // Whether checking is scheduled, to avoid duplicated checking\n    BOOL _checkingScheduled;\n    // Whether AVCaptureSession is stopped, if stopped, we don't need to check black camera any more\n    // It is set on main thread, read on background queue\n    BOOL _sessionStoppedRunning;\n}\n@property (nonatomic) SCQueuePerformer *queuePerformer;\n@property (nonatomic) SCBlackCameraReporter *reporter;\n@end\n\n@implementation SCBlackCameraNoOutputDetector\n\n- (instancetype)initWithReporter:(SCBlackCameraReporter *)reporter\n{\n    self = [super init];\n    if (self) {\n        _queuePerformer = [[SCQueuePerformer alloc] initWithLabel:kSCBlackCameraDetectorQueueLabel\n                                                 qualityOfService:QOS_CLASS_BACKGROUND\n                                                        queueType:DISPATCH_QUEUE_SERIAL\n                                                          context:SCQueuePerformerContextCamera];\n        _reporter = reporter;\n    }\n    return self;\n}\n\n- (void)managedVideoDataSource:(id<SCManagedVideoDataSource>)managedVideoDataSource\n         didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer\n                devicePosition:(SCManagedCaptureDevicePosition)devicePosition\n{\n    // The block is very light-weight\n    [self.queuePerformer perform:^{\n        if (_blackCameraDetected) {\n            // Detected a black camera case\n            _blackCameraDetected = NO;\n            _blackCameraRecovered = YES;\n            SCLogCoreCameraInfo(@\"[BlackCamera] Black camera recovered\");\n            if (SCExperimentWithBlackCameraReporting()) {\n                [[SCLogger sharedInstance] logUnsampledEvent:KSCCameraBlackCamera\n                                                  parameters:@{\n                                                      @\"type\" : @\"RECOVERED\"\n                                                  }\n                                            secretParameters:nil\n                                                     metrics:nil];\n            }\n        }\n\n        // Received buffer!\n        _sampleBufferReceived = YES;\n    }];\n}\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer didStartRunning:(SCManagedCapturerState *)state\n{\n    SCAssertMainThread();\n    if ([UIApplication sharedApplication].applicationState == UIApplicationStateBackground) {\n        SCLogCoreCameraInfo(@\"[BlackCamera] In background, skip checking\");\n        return;\n    }\n    _sessionStoppedRunning = NO;\n    [self.queuePerformer perform:^{\n        SCTraceODPCompatibleStart(2);\n        if (_checkingScheduled) {\n            SCLogCoreCameraInfo(@\"[BlackCamera] Checking is scheduled, skip\");\n            return;\n        }\n        if (_sessionStoppedRunning) {\n            SCLogCoreCameraInfo(@\"[BlackCamera] AVCaptureSession stopped, should not check\");\n            return;\n        }\n        _sampleBufferReceived = NO;\n        if (_blackCameraRecovered) {\n            SCLogCoreCameraInfo(@\"[BlackCamera] Last black camera recovered, let's wait longer to check this time\");\n        }\n        SCLogCoreCameraInfo(@\"[BlackCamera] Schedule black camera checking\");\n        [self.queuePerformer perform:^{\n            SCTraceODPCompatibleStart(2);\n            if (!_sessionStoppedRunning) {\n                if (!_sampleBufferReceived) {\n                    _blackCameraDetected = YES;\n                    [_reporter reportBlackCameraWithCause:SCBlackCameraNoOutputData];\n                    [self.delegate detector:self didDetectBlackCamera:managedCapturer];\n                } else {\n                    SCLogCoreCameraInfo(@\"[BlackCamera] No black camera\");\n                    _blackCameraDetected = NO;\n                }\n            } else {\n                SCLogCoreCameraInfo(@\"[BlackCamera] AVCaptureSession stopped\");\n                _blackCameraDetected = NO;\n            }\n            _blackCameraRecovered = NO;\n            _checkingScheduled = NO;\n        }\n                               after:_blackCameraRecovered ? kLongCheckingDelay : kShortCheckingDelay];\n        _checkingScheduled = YES;\n    }];\n}\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer didStopRunning:(SCManagedCapturerState *)state\n{\n    SCAssertMainThread();\n    _sessionStoppedRunning = YES;\n    [self.queuePerformer perform:^{\n        SCTraceODPCompatibleStart(2);\n        _sampleBufferReceived = NO;\n    }];\n}\n\n@end\n"
  },
  {
    "path": "BlackCamera/SCBlackCameraPreviewDetector.h",
    "content": "//\n//  SCBlackCameraPreviewDetector.h\n//  Snapchat\n//\n//  Created by Derek Wang on 25/01/2018.\n//\n\n#import <Foundation/Foundation.h>\n\n@class SCQueuePerformer, SCBlackCameraReporter;\n@protocol SCManiphestTicketCreator;\n\n@interface SCBlackCameraPreviewDetector : NSObject\n\n- (instancetype)initWithPerformer:(SCQueuePerformer *)performer reporter:(SCBlackCameraReporter *)reporter;\n\n- (void)sessionDidChangeIsRunning:(BOOL)running;\n- (void)capturePreviewDidBecomeVisible:(BOOL)visible;\n\n@end\n"
  },
  {
    "path": "BlackCamera/SCBlackCameraPreviewDetector.m",
    "content": "//\n//  SCBlackCameraPreviewDetector.m\n//  Snapchat\n//\n//  Created by Derek Wang on 25/01/2018.\n//\n\n#import \"SCBlackCameraPreviewDetector.h\"\n\n#import \"SCBlackCameraReporter.h\"\n#import \"SCMetalUtils.h\"\n\n#import <SCCrashLogger/SCCrashLogger.h>\n#import <SCFoundation/SCQueuePerformer.h>\n#import <SCFoundation/SCThreadHelpers.h>\n#import <SCFoundation/SCZeroDependencyExperiments.h>\n\n// Check whether preview is visible when AVCaptureSession is running\nstatic CGFloat const kSCBlackCameraCheckingDelay = 0.5;\n\n@interface SCBlackCameraPreviewDetector () {\n    BOOL _previewVisible;\n    dispatch_block_t _checkingBlock;\n}\n@property (nonatomic) SCQueuePerformer *queuePerformer;\n@property (nonatomic) SCBlackCameraReporter *reporter;\n\n@end\n\n@implementation SCBlackCameraPreviewDetector\n\n- (instancetype)initWithPerformer:(SCQueuePerformer *)performer reporter:(SCBlackCameraReporter *)reporter\n{\n    self = [super init];\n    if (self) {\n        _queuePerformer = performer;\n        _reporter = reporter;\n    }\n    return self;\n}\n\n- (void)capturePreviewDidBecomeVisible:(BOOL)visible\n{\n    [_queuePerformer perform:^{\n        _previewVisible = visible;\n    }];\n}\n\n- (void)sessionDidChangeIsRunning:(BOOL)running\n{\n    if (running) {\n        [self _scheduleCheck];\n    } else {\n        [_queuePerformer perform:^{\n            if (_checkingBlock) {\n                dispatch_block_cancel(_checkingBlock);\n                _checkingBlock = nil;\n            }\n        }];\n    }\n}\n\n- (void)_scheduleCheck\n{\n    [_queuePerformer perform:^{\n        @weakify(self);\n        _checkingBlock = dispatch_block_create(0, ^{\n            @strongify(self);\n            SC_GUARD_ELSE_RETURN(self);\n            self->_checkingBlock = nil;\n            [self _checkPreviewState];\n        });\n        [_queuePerformer perform:_checkingBlock after:kSCBlackCameraCheckingDelay];\n    }];\n}\n\n- (void)_checkPreviewState\n{\n    if (!_previewVisible) {\n        runOnMainThreadAsynchronously(^{\n            // Make sure the app is in foreground\n            SC_GUARD_ELSE_RETURN([UIApplication sharedApplication].applicationState == UIApplicationStateActive);\n\n            SCBlackCameraCause cause =\n                SCDeviceSupportsMetal() ? SCBlackCameraRenderingPaused : SCBlackCameraPreviewIsHidden;\n            [_reporter reportBlackCameraWithCause:cause];\n            [_reporter fileShakeTicketWithCause:cause];\n        });\n    }\n}\n\n@end\n"
  },
  {
    "path": "BlackCamera/SCBlackCameraReporter.h",
    "content": "//\n//  SCBlackCameraReporter.h\n//  Snapchat\n//\n//  Created by Derek Wang on 09/01/2018.\n//\n\n#import <SCBase/SCMacros.h>\n\n#import <Foundation/Foundation.h>\n\ntypedef NS_ENUM(NSInteger, SCBlackCameraCause) {\n    SCBlackCameraStartRunningNotCalled,       // 1. View is visible, but session startRunning is not called\n    SCBlackCameraSessionNotRunning,           // 2. Session startRunning is called, but isRunning is still false\n    SCBlackCameraRenderingPaused,             // 3.1 View is visible, but capture preview rendering is paused\n    SCBlackCameraPreviewIsHidden,             // 3.2 For non-metal devices, capture preview is hidden\n    SCBlackCameraSessionStartRunningBlocked,  // 4.1 AVCaptureSession is blocked at startRunning\n    SCBlackCameraSessionConfigurationBlocked, // 4.2 AVCaptureSession is blocked at commitConfiguration\n\n    SCBlackCameraNoOutputData, // 5. Session is running, but no data output\n};\n\n@protocol SCManiphestTicketCreator;\n\n@interface SCBlackCameraReporter : NSObject\n\nSC_INIT_AND_NEW_UNAVAILABLE\n- (instancetype)initWithTicketCreator:(id<SCManiphestTicketCreator>)ticketCreator;\n\n- (NSString *)causeNameFor:(SCBlackCameraCause)cause;\n\n- (void)reportBlackCameraWithCause:(SCBlackCameraCause)cause;\n- (void)fileShakeTicketWithCause:(SCBlackCameraCause)cause;\n\n@end\n"
  },
  {
    "path": "BlackCamera/SCBlackCameraReporter.m",
    "content": "//\n//  SCBlackCameraReporter.m\n//  Snapchat\n//\n//  Created by Derek Wang on 09/01/2018.\n//\n\n#import \"SCBlackCameraReporter.h\"\n\n#import \"SCManiphestTicketCreator.h\"\n\n#import <SCFoundation/SCAssertWrapper.h>\n#import <SCFoundation/SCLog.h>\n#import <SCFoundation/SCLogHelper.h>\n#import <SCFoundation/SCZeroDependencyExperiments.h>\n#import <SCLogger/SCCameraMetrics.h>\n#import <SCLogger/SCLogger.h>\n\n@interface SCBlackCameraReporter ()\n\n@property (nonatomic) id<SCManiphestTicketCreator> ticketCreator;\n\n@end\n\n@implementation SCBlackCameraReporter\n\n- (instancetype)initWithTicketCreator:(id<SCManiphestTicketCreator>)ticketCreator\n{\n    if (self = [super init]) {\n        _ticketCreator = ticketCreator;\n    }\n    return self;\n}\n\n- (NSString *)causeNameFor:(SCBlackCameraCause)cause\n{\n    switch (cause) {\n    case SCBlackCameraStartRunningNotCalled:\n        return @\"StartRunningNotCalled\";\n    case SCBlackCameraSessionNotRunning:\n        return @\"SessionNotRunning\";\n    case SCBlackCameraRenderingPaused:\n        return @\"RenderingPause\";\n    case SCBlackCameraPreviewIsHidden:\n        return @\"PreviewIsHidden\";\n    case SCBlackCameraSessionStartRunningBlocked:\n        return @\"SessionStartRunningBlocked\";\n    case SCBlackCameraSessionConfigurationBlocked:\n        return @\"SessionConfigurationBlocked\";\n    case SCBlackCameraNoOutputData:\n        return @\"NoOutputData\";\n    default:\n        SCAssert(NO, @\"illegate cause\");\n        break;\n    }\n    return nil;\n}\n\n- (void)reportBlackCameraWithCause:(SCBlackCameraCause)cause\n{\n    NSString *causeStr = [self causeNameFor:cause];\n    SCLogCoreCameraError(@\"[BlackCamera] Detected black camera, cause: %@\", causeStr);\n\n    NSDictionary *parameters = @{ @\"type\" : @\"DETECTED\", @\"cause\" : causeStr };\n\n    [_ticketCreator createAndFileBetaReport:JSONStringSerializeObjectForLogging(parameters)];\n\n    if (SCExperimentWithBlackCameraReporting()) {\n        [[SCLogger sharedInstance] logUnsampledEvent:KSCCameraBlackCamera\n                                          parameters:parameters\n                                    secretParameters:nil\n                                             metrics:nil];\n    }\n}\n\n- (void)fileShakeTicketWithCause:(SCBlackCameraCause)cause\n{\n    if (SCExperimentWithBlackCameraExceptionLogging()) {\n        // Log exception with auto S2R\n        NSString *errMsg =\n            [NSString sc_stringWithFormat:@\"[BlackCamera] Detected black camera, cause: %@\", [self causeNameFor:cause]];\n        [_ticketCreator createAndFile:nil creationTime:0 description:errMsg email:nil project:@\"Camera\" subproject:nil];\n    }\n}\n\n@end\n"
  },
  {
    "path": "BlackCamera/SCBlackCameraRunningDetector.h",
    "content": "//\n//  SCBlackCameraRunningDetector.h\n//  Snapchat\n//\n//  Created by Derek Wang on 30/01/2018.\n//\n\n#import <SCBase/SCMacros.h>\n\n#import <Foundation/Foundation.h>\n\n@class SCQueuePerformer, SCBlackCameraReporter;\n@protocol SCManiphestTicketCreator;\n\n@interface SCBlackCameraRunningDetector : NSObject\n\nSC_INIT_AND_NEW_UNAVAILABLE\n- (instancetype)initWithPerformer:(SCQueuePerformer *)performer reporter:(SCBlackCameraReporter *)reporter;\n\n// When session isRunning changed\n- (void)sessionDidChangeIsRunning:(BOOL)running;\n// Call this after [AVCaptureSession startRunning] is called\n- (void)sessionDidCallStartRunning;\n// Call this before [AVCaptureSession stopRunning] is called\n- (void)sessionWillCallStopRunning;\n\n@end\n"
  },
  {
    "path": "BlackCamera/SCBlackCameraRunningDetector.m",
    "content": "//\n//  SCBlackCameraRunningDetector.m\n//  Snapchat\n//\n//  Created by Derek Wang on 30/01/2018.\n//\n\n#import \"SCBlackCameraRunningDetector.h\"\n\n#import \"SCBlackCameraReporter.h\"\n\n#import <SCFoundation/SCAssertWrapper.h>\n#import <SCFoundation/SCQueuePerformer.h>\n#import <SCFoundation/SCTraceODPCompatible.h>\n#import <SCLogger/SCCameraMetrics.h>\n\n// Check whether we called AVCaptureSession isRunning within this period\nstatic CGFloat const kSCBlackCameraCheckingDelay = 5;\n\n@interface SCBlackCameraRunningDetector () {\n    BOOL _isSessionRunning;\n    dispatch_block_t _checkSessionBlock;\n}\n@property (nonatomic) SCQueuePerformer *queuePerformer;\n@property (nonatomic) SCBlackCameraReporter *reporter;\n@end\n\n@implementation SCBlackCameraRunningDetector\n\n- (instancetype)initWithPerformer:(SCQueuePerformer *)performer reporter:(SCBlackCameraReporter *)reporter\n{\n    self = [super init];\n    if (self) {\n        _queuePerformer = performer;\n        _reporter = reporter;\n    }\n    return self;\n}\n\n- (void)sessionDidChangeIsRunning:(BOOL)running\n{\n    [_queuePerformer perform:^{\n        _isSessionRunning = running;\n    }];\n}\n\n- (void)sessionDidCallStartRunning\n{\n    [self _scheduleCheck];\n}\n\n- (void)sessionWillCallStopRunning\n{\n    [_queuePerformer perform:^{\n        if (_checkSessionBlock) {\n            dispatch_block_cancel(_checkSessionBlock);\n            _checkSessionBlock = nil;\n        }\n    }];\n}\n\n- (void)_scheduleCheck\n{\n    [_queuePerformer perform:^{\n        @weakify(self);\n        _checkSessionBlock = dispatch_block_create(0, ^{\n            @strongify(self);\n            SC_GUARD_ELSE_RETURN(self);\n            self->_checkSessionBlock = nil;\n            [self _checkSessionState];\n        });\n\n        [_queuePerformer perform:_checkSessionBlock after:kSCBlackCameraCheckingDelay];\n    }];\n}\n\n- (void)_checkSessionState\n{\n    if (!_isSessionRunning) {\n        [_reporter reportBlackCameraWithCause:SCBlackCameraSessionNotRunning];\n    }\n}\n\n@end\n"
  },
  {
    "path": "BlackCamera/SCBlackCameraSessionBlockDetector.h",
    "content": "//\n//  SCBlackCameraSessionBlockDetector.h\n//  Snapchat\n//\n//  Created by Derek Wang on 25/01/2018.\n//\n\n#import \"SCBlackCameraReporter.h\"\n\n#import <Foundation/Foundation.h>\n\n@interface SCBlackCameraSessionBlockDetector : NSObject\n\nSC_INIT_AND_NEW_UNAVAILABLE\n- (instancetype)initWithReporter:(SCBlackCameraReporter *)reporter;\n\n- (void)sessionWillCallStartRunning;\n- (void)sessionDidCallStartRunning;\n\n- (void)sessionWillCommitConfiguration;\n- (void)sessionDidCommitConfiguration;\n\n@end\n"
  },
  {
    "path": "BlackCamera/SCBlackCameraSessionBlockDetector.m",
    "content": "//\n//  SCBlackCameraSessionBlockDetector.m\n//  Snapchat\n//\n//  Created by Derek Wang on 25/01/2018.\n//\n\n#import \"SCBlackCameraSessionBlockDetector.h\"\n\n#import \"SCBlackCameraReporter.h\"\n\n#import <SCLogger/SCCameraMetrics.h>\n#import <SCLogger/SCLogger.h>\n\n@import CoreGraphics;\n\n// Longer than 5 seconds is considerred as black camera\nstatic CGFloat const kSCBlackCameraBlockingThreshold = 5;\n// Will report if session blocks longer than 1 second\nstatic CGFloat const kSCSessionBlockingLogThreshold = 1;\n\n@interface SCBlackCameraSessionBlockDetector () {\n    NSTimeInterval _startTime;\n}\n@property (nonatomic) SCBlackCameraReporter *reporter;\n\n@end\n\n@implementation SCBlackCameraSessionBlockDetector\n\n- (instancetype)initWithReporter:(SCBlackCameraReporter *)reporter\n{\n    if (self = [super init]) {\n        _reporter = reporter;\n    }\n    return self;\n}\n\n- (void)sessionWillCallStartRunning\n{\n    _startTime = [NSDate timeIntervalSinceReferenceDate];\n}\n\n- (void)sessionDidCallStartRunning\n{\n    [self _reportBlackCameraIfNeededWithCause:SCBlackCameraSessionStartRunningBlocked];\n    [self _reportBlockingIfNeededWithCause:SCBlackCameraSessionStartRunningBlocked];\n}\n\n- (void)sessionWillCommitConfiguration\n{\n    _startTime = [NSDate timeIntervalSinceReferenceDate];\n}\n\n- (void)sessionDidCommitConfiguration\n{\n    [self _reportBlackCameraIfNeededWithCause:SCBlackCameraSessionConfigurationBlocked];\n    [self _reportBlockingIfNeededWithCause:SCBlackCameraSessionConfigurationBlocked];\n}\n\n- (void)_reportBlockingIfNeededWithCause:(SCBlackCameraCause)cause\n{\n    NSTimeInterval duration = [NSDate timeIntervalSinceReferenceDate] - _startTime;\n    if (duration >= kSCSessionBlockingLogThreshold) {\n        NSString *causeStr = [_reporter causeNameFor:cause];\n        [[SCLogger sharedInstance] logEvent:KSCCameraCaptureSessionBlocked\n                                 parameters:@{\n                                     @\"cause\" : causeStr,\n                                     @\"duration\" : @(duration)\n                                 }];\n    }\n}\n\n- (void)_reportBlackCameraIfNeededWithCause:(SCBlackCameraCause)cause\n{\n    NSTimeInterval endTime = [NSDate timeIntervalSinceReferenceDate];\n    if (endTime - _startTime >= kSCBlackCameraBlockingThreshold) {\n        [_reporter reportBlackCameraWithCause:cause];\n    }\n}\n\n@end\n"
  },
  {
    "path": "BlackCamera/SCBlackCameraViewDetector.h",
    "content": "//\n//  SCBlackCameraDetectorCameraView.h\n//  Snapchat\n//\n//  Created by Derek Wang on 24/01/2018.\n//\n\n#import <Foundation/Foundation.h>\n#import <UIKit/UIKit.h>\n\n@class SCQueuePerformer, SCBlackCameraReporter;\n@protocol SCManiphestTicketCreator;\n\n@interface SCBlackCameraViewDetector : NSObject\n\n- (instancetype)initWithPerformer:(SCQueuePerformer *)performer reporter:(SCBlackCameraReporter *)reporter;\n\n// CameraView visible/invisible\n- (void)onCameraViewVisible:(BOOL)visible;\n\n- (void)onCameraViewVisibleWithTouch:(UIGestureRecognizer *)gesture;\n\n// Call this when [AVCaptureSession startRunning] is called\n- (void)sessionWillCallStartRunning;\n// Call this when [AVCaptureSession stopRunning] is called\n- (void)sessionWillCallStopRunning;\n\n- (void)sessionWillRecreate;\n- (void)sessionDidRecreate;\n\n@end\n"
  },
  {
    "path": "BlackCamera/SCBlackCameraViewDetector.m",
    "content": "//\n//  SCBlackCameraDetectorCameraView.m\n//  Snapchat\n//\n//  Created by Derek Wang on 24/01/2018.\n//\n\n#import \"SCBlackCameraViewDetector.h\"\n\n#import \"SCBlackCameraReporter.h\"\n#import \"SCCaptureDeviceAuthorization.h\"\n\n#import <SCFoundation/SCAssertWrapper.h>\n#import <SCFoundation/SCLog.h>\n#import <SCFoundation/SCQueuePerformer.h>\n#import <SCFoundation/SCTraceODPCompatible.h>\n#import <SCLogger/SCCameraMetrics.h>\n\n// Check whether we called [AVCaptureSession startRunning] within this period\nstatic CGFloat const kSCBlackCameraCheckingDelay = 0.5;\n\n@interface SCBlackCameraViewDetector () {\n    BOOL _startRunningCalled;\n    BOOL _sessionIsRecreating;\n    dispatch_block_t _checkSessionBlock;\n}\n@property (nonatomic) SCQueuePerformer *queuePerformer;\n@property (nonatomic) SCBlackCameraReporter *reporter;\n@property (nonatomic, weak) UIGestureRecognizer *cameraViewGesture;\n@end\n\n@implementation SCBlackCameraViewDetector\n\n- (instancetype)initWithPerformer:(SCQueuePerformer *)performer reporter:(SCBlackCameraReporter *)reporter\n{\n    self = [super init];\n    if (self) {\n        _queuePerformer = performer;\n        _reporter = reporter;\n    }\n    return self;\n}\n\n#pragma mark - Camera view visibility change trigger\n- (void)onCameraViewVisible:(BOOL)visible\n{\n    SCTraceODPCompatibleStart(2);\n    SCLogCoreCameraInfo(@\"[BlackCamera] onCameraViewVisible: %d\", visible);\n    BOOL firstTimeAccess = [SCCaptureDeviceAuthorization notDeterminedForVideoCapture];\n    if (firstTimeAccess) {\n        // We don't want to check black camera for firstTimeAccess\n        return;\n    }\n    // Visible and application is active\n    if (visible && [UIApplication sharedApplication].applicationState == UIApplicationStateActive) {\n        // Since this method is usually called before the view is actually visible, leave some margin to check\n        [self _scheduleCheckDelayed:YES];\n    } else {\n        [_queuePerformer perform:^{\n            if (_checkSessionBlock) {\n                dispatch_block_cancel(_checkSessionBlock);\n                _checkSessionBlock = nil;\n            }\n        }];\n    }\n}\n\n// Call this when [AVCaptureSession startRunning] is called\n- (void)sessionWillCallStartRunning\n{\n    [_queuePerformer perform:^{\n        _startRunningCalled = YES;\n    }];\n}\n\n- (void)sessionWillCallStopRunning\n{\n    [_queuePerformer perform:^{\n        _startRunningCalled = NO;\n    }];\n}\n\n- (void)_scheduleCheckDelayed:(BOOL)delay\n{\n    [_queuePerformer perform:^{\n        SC_GUARD_ELSE_RETURN(!_checkSessionBlock);\n        @weakify(self);\n        _checkSessionBlock = dispatch_block_create(0, ^{\n            @strongify(self);\n            SC_GUARD_ELSE_RETURN(self);\n            self->_checkSessionBlock = nil;\n            [self _checkSessionState];\n        });\n\n        if (delay) {\n            [_queuePerformer perform:_checkSessionBlock after:kSCBlackCameraCheckingDelay];\n        } else {\n            [_queuePerformer perform:_checkSessionBlock];\n        }\n    }];\n}\n\n- (void)_checkSessionState\n{\n    SCLogCoreCameraInfo(@\"[BlackCamera] checkSessionState startRunning: %d, sessionIsRecreating: %d\",\n                        _startRunningCalled, _sessionIsRecreating);\n    if (!_startRunningCalled && !_sessionIsRecreating) {\n        [_reporter reportBlackCameraWithCause:SCBlackCameraStartRunningNotCalled];\n        [_reporter fileShakeTicketWithCause:SCBlackCameraStartRunningNotCalled];\n    }\n}\n\n- (void)sessionWillRecreate\n{\n    [_queuePerformer perform:^{\n        _sessionIsRecreating = YES;\n    }];\n}\n\n- (void)sessionDidRecreate\n{\n    [_queuePerformer perform:^{\n        _sessionIsRecreating = NO;\n    }];\n}\n\n- (void)onCameraViewVisibleWithTouch:(UIGestureRecognizer *)gesture\n{\n    if (gesture != _cameraViewGesture) {\n        // Skip repeating gesture\n        self.cameraViewGesture = gesture;\n        [self _scheduleCheckDelayed:NO];\n    }\n}\n\n@end\n"
  },
  {
    "path": "BlackCamera/SCCaptureSessionFixer.h",
    "content": "//\n//  SCCaptureSessionFixer.h\n//  Snapchat\n//\n//  Created by Derek Wang on 05/12/2017.\n//\n\n#import \"SCBlackCameraNoOutputDetector.h\"\n\n#import <Foundation/Foundation.h>\n\n@interface SCCaptureSessionFixer : NSObject <SCBlackCameraDetectorDelegate>\n\n@end\n"
  },
  {
    "path": "BlackCamera/SCCaptureSessionFixer.m",
    "content": "//\n//  SCCaptureSessionFixer.m\n//  Snapchat\n//\n//  Created by Derek Wang on 05/12/2017.\n//\n\n#import \"SCCaptureSessionFixer.h\"\n\n#import \"SCCameraTweaks.h\"\n\n@implementation SCCaptureSessionFixer\n\n- (void)detector:(SCBlackCameraNoOutputDetector *)detector didDetectBlackCamera:(id<SCCapturer>)capture\n{\n    if (SCCameraTweaksBlackCameraRecoveryEnabled()) {\n        [capture recreateAVCaptureSession];\n    }\n}\n\n@end\n"
  },
  {
    "path": "ContextAwareTaskManagement/OWNERS",
    "content": "--- !OWNERS\n\nversion: 2\n\ndefault:\n  jira_project: CCAM\n  owners:\n    num_required_reviewers: 1\n    teams:\n    - Snapchat/core-camera-ios\n    users:\n    - cjiang\n    - ljia\n"
  },
  {
    "path": "ContextAwareTaskManagement/Requests/SCContextAwareSnapCreationThrottleRequest.h",
    "content": "//\n//  SCContextAwareSnapCreationThrottleRequest.h\n//  SCCamera\n//\n//  Created by Cheng Jiang on 4/24/18.\n//\n\n#import <SCFoundation/SCContextAwareThrottleRequester.h>\n\n#import <Foundation/Foundation.h>\n\n@interface SCContextAwareSnapCreationThrottleRequest : NSObject <SCContextAwareThrottleRequest>\n\n- (instancetype)init;\n\n@end\n"
  },
  {
    "path": "ContextAwareTaskManagement/Requests/SCContextAwareSnapCreationThrottleRequest.m",
    "content": "//\n//  SCContextAwareSnapCreationThrottleRequest.m\n//  SCCamera\n//\n//  Created by Cheng Jiang on 4/24/18.\n//\n\n#import \"SCContextAwareSnapCreationThrottleRequest.h\"\n\n#import <SCFoundation/SCAssertWrapper.h>\n#import <SCFoundation/SCContextAwareTaskManagementResourceProvider.h>\n#import <SCFoundation/SCZeroDependencyExperiments.h>\n\n#import <Tweaks/FBTweakInline.h>\n\nBOOL SCCATMSnapCreationEnabled(void)\n{\n    static dispatch_once_t capturingOnceToken;\n    static BOOL capturingImprovementEnabled;\n    dispatch_once(&capturingOnceToken, ^{\n        BOOL enabledWithAB = SCExperimentWithContextAwareTaskManagementCapturingImprovementEnabled();\n        NSInteger tweakOption = [FBTweakValue(@\"CATM\", @\"Performance Improvement\", @\"Capturing\", (id) @0,\n                                              (@{ @0 : @\"Respect A/B\",\n                                                  @1 : @\"YES\",\n                                                  @2 : @\"NO\" })) integerValue];\n        switch (tweakOption) {\n        case 0:\n            capturingImprovementEnabled = enabledWithAB;\n            break;\n        case 1:\n            capturingImprovementEnabled = YES;\n            break;\n        case 2:\n            capturingImprovementEnabled = NO;\n            break;\n        default:\n            SCCAssertFail(@\"Illegal option\");\n        }\n    });\n    return capturingImprovementEnabled;\n}\n\n@implementation SCContextAwareSnapCreationThrottleRequest {\n    NSString *_requestID;\n}\n\n- (instancetype)init\n{\n    if (self = [super init]) {\n        _requestID = @\"SCContextAwareSnapCreationThrottleRequest\";\n    }\n    return self;\n}\n\n- (BOOL)shouldThrottle:(SCApplicationContextState)context\n{\n    return SCCATMSnapCreationEnabled() && context != SCApplicationContextStateCamera;\n}\n\n- (NSString *)requestID\n{\n    return _requestID;\n}\n\n- (BOOL)isEqual:(id<SCContextAwareThrottleRequest>)object\n{\n    return [[object requestID] isEqualToString:_requestID];\n}\n\n@end\n"
  },
  {
    "path": "ContextAwareTaskManagement/Triggers/SCSnapCreationTriggers.h",
    "content": "//\n//  SCSnapCreationTriggers.h\n//  Snapchat\n//\n//  Created by Cheng Jiang on 4/1/18.\n//\n\n#import <Foundation/Foundation.h>\n\n@interface SCSnapCreationTriggers : NSObject\n\n- (void)markSnapCreationStart;\n\n- (void)markSnapCreationPreviewAnimationFinish;\n\n- (void)markSnapCreationPreviewImageSetupFinish;\n\n- (void)markSnapCreationPreviewVideoFirstFrameRenderFinish;\n\n- (void)markSnapCreationEndWithContext:(NSString *)context;\n\n@end\n"
  },
  {
    "path": "ContextAwareTaskManagement/Triggers/SCSnapCreationTriggers.m",
    "content": "//\n//  SCSnapCreationTriggers.m\n//  Snapchat\n//\n//  Created by Cheng Jiang on 3/30/18.\n//\n\n#import \"SCSnapCreationTriggers.h\"\n\n#import \"SCContextAwareSnapCreationThrottleRequest.h\"\n\n#import <SCBase/SCMacros.h>\n#import <SCFoundation/SCContextAwareThrottleRequester.h>\n#import <SCFoundation/SCLog.h>\n#import <SCFoundation/SCQueuePerformer.h>\n\n@implementation SCSnapCreationTriggers {\n    BOOL _snapCreationStarted;\n    BOOL _previewAnimationFinished;\n    BOOL _previewImageSetupFinished;\n    BOOL _previewVideoFirstFrameRendered;\n}\n\n- (void)markSnapCreationStart\n{\n    SC_GUARD_ELSE_RUN_AND_RETURN(\n        !_snapCreationStarted,\n        SCLogCoreCameraWarning(@\"markSnapCreationStart skipped because previous SnapCreation session is not complete\"));\n    @synchronized(self)\n    {\n        _snapCreationStarted = YES;\n    }\n    [[SCContextAwareThrottleRequester shared] submitSuspendRequest:[SCContextAwareSnapCreationThrottleRequest new]];\n}\n\n- (void)markSnapCreationPreviewAnimationFinish\n{\n    @synchronized(self)\n    {\n        _previewAnimationFinished = YES;\n        if (_previewImageSetupFinished || _previewVideoFirstFrameRendered) {\n            [self markSnapCreationEndWithContext:@\"markSnapCreationPreviewAnimationFinish\"];\n        }\n    }\n}\n\n- (void)markSnapCreationPreviewImageSetupFinish\n{\n    @synchronized(self)\n    {\n        _previewImageSetupFinished = YES;\n        if (_previewAnimationFinished) {\n            [self markSnapCreationEndWithContext:@\"markSnapCreationPreviewImageSetupFinish\"];\n        }\n    }\n}\n\n- (void)markSnapCreationPreviewVideoFirstFrameRenderFinish\n{\n    @synchronized(self)\n    {\n        _previewVideoFirstFrameRendered = YES;\n        if (_previewAnimationFinished) {\n            [self markSnapCreationEndWithContext:@\"markSnapCreationPreviewVideoFirstFrameRenderFinish\"];\n        }\n    }\n}\n\n- (void)markSnapCreationEndWithContext:(NSString *)context\n{\n    SC_GUARD_ELSE_RETURN(_snapCreationStarted);\n    SCLogCoreCameraInfo(@\"markSnapCreationEnd triggered with context: %@\", context);\n    @synchronized(self)\n    {\n        _snapCreationStarted = NO;\n        _previewAnimationFinished = NO;\n        _previewImageSetupFinished = NO;\n        _previewVideoFirstFrameRendered = NO;\n    }\n    [[SCContextAwareThrottleRequester shared] submitResumeRequest:[SCContextAwareSnapCreationThrottleRequest new]];\n}\n\n@end\n"
  },
  {
    "path": "Features/Core/SCFeature.h",
    "content": "//\n//  SCFeature.h\n//  SCCamera\n//\n//  Created by Kristian Bauer on 1/4/18.\n//\n\n#import <UIKit/UIKit.h>\n\n/**\n * Top level protocol for UI features\n */\n#define SCLogCameraFeatureInfo(fmt, ...) SCLogCoreCameraInfo(@\"[SCFeature] \" fmt, ##__VA_ARGS__)\n@protocol SCFeatureContainerView;\n@protocol SCFeature <NSObject>\n\n@optional\n- (void)configureWithView:(UIView<SCFeatureContainerView> *)view;\n- (void)forwardCameraTimerGesture:(UIGestureRecognizer *)gestureRecognizer;\n- (void)forwardCameraOverlayTapGesture:(UIGestureRecognizer *)gestureRecognizer;\n- (void)forwardLongPressGesture:(UIGestureRecognizer *)gestureRecognizer;\n- (void)forwardPinchGesture:(UIPinchGestureRecognizer *)gestureRecognizer;\n- (void)forwardPanGesture:(UIPanGestureRecognizer *)gestureRecognizer;\n- (BOOL)shouldBlockTouchAtPoint:(CGPoint)point;\n\n@end\n"
  },
  {
    "path": "Features/Core/SCFeatureContainerView.h",
    "content": "//\n//  SCFeatureContainerView.h\n//  SCCamera\n//\n//  Created by Kristian Bauer on 4/17/18.\n//\n\n#import <UIKit/UIKit.h>\n\n@protocol SCFeatureContainerView\n- (BOOL)isTapGestureRecognizer:(UIGestureRecognizer *)gestureRecognizer;\n- (CGRect)initialCameraTimerFrame;\n@end\n"
  },
  {
    "path": "Features/Core/SCFeatureCoordinator.h",
    "content": "//\n//  SCFeatureCoordinator.h\n//  SCCamera\n//\n//  Created by Kristian Bauer on 1/4/18.\n//\n\n#import \"SCFeature.h\"\n\n#import <SCBase/SCMacros.h>\n\n@protocol SCFeatureProvider;\n@class SCCameraOverlayView;\n\n/**\n * Handles creation of SCFeatures and communication between owner and features.\n */\n@interface SCFeatureCoordinator : NSObject\n\nSC_INIT_AND_NEW_UNAVAILABLE;\n- (instancetype)initWithFeatureContainerView:(SCCameraOverlayView *)containerView\n                                    provider:(id<SCFeatureProvider>)provider;\n\n/**\n * Asks provider for features with given featureTypes specified in initializer.\n */\n- (void)reloadFeatures;\n\n/**\n * Eventually won't need this, but in order to use new framework w/ existing architecture, need a way to forward\n * gestures to individual features.\n */\n- (void)forwardCameraTimerGesture:(UIGestureRecognizer *)gestureRecognizer;\n- (void)forwardCameraOverlayTapGesture:(UIGestureRecognizer *)gestureRecognizer;\n- (void)forwardLongPressGesture:(UIGestureRecognizer *)gestureRecognizer;\n- (void)forwardPinchGesture:(UIPinchGestureRecognizer *)recognizer;\n- (void)forwardPanGesture:(UIPanGestureRecognizer *)recognizer;\n/**\n * To prevent gestures on AVCameraViewController from triggering at the same time as feature controls, need to provide a\n * way for features to indicate that they will block a touch with given point.\n */\n- (BOOL)shouldBlockTouchAtPoint:(CGPoint)point;\n\n@end\n"
  },
  {
    "path": "Features/Core/SCFeatureCoordinator.m",
    "content": "//\n//  SCFeatureCoordinator.m\n//  SCCamera\n//\n//  Created by Kristian Bauer on 1/4/18.\n//\n\n#import \"SCFeatureCoordinator.h\"\n\n#import \"SCFeature.h\"\n#import \"SCFeatureProvider.h\"\n\n#import <SCFoundation/SCAssertWrapper.h>\n#import <SCFoundation/SCTraceODPCompatible.h>\n\ntypedef NSString SCFeatureDictionaryKey;\n\n@interface SCFeatureCoordinator ()\n@property (nonatomic, weak) UIView<SCFeatureContainerView> *containerView;\n@property (nonatomic, strong) id<SCFeatureProvider> provider;\n@end\n\n@implementation SCFeatureCoordinator\n\n- (instancetype)initWithFeatureContainerView:(UIView<SCFeatureContainerView> *)containerView\n                                    provider:(id<SCFeatureProvider>)provider\n{\n    SCTraceODPCompatibleStart(2);\n    SCAssert(containerView, @\"SCFeatureCoordinator containerView must be non-nil\");\n    SCAssert(provider, @\"SCFeatureCoordinator provider must be non-nil\");\n    self = [super init];\n    if (self) {\n        _containerView = containerView;\n        _provider = provider;\n        [self reloadFeatures];\n    }\n    return self;\n}\n\n- (void)reloadFeatures\n{\n    SCTraceODPCompatibleStart(2);\n    [_provider resetInstances];\n    NSMutableArray *features = [NSMutableArray array];\n    for (id<SCFeature> feature in _provider.supportedFeatures) {\n        if ([feature respondsToSelector:@selector(configureWithView:)]) {\n            [feature configureWithView:_containerView];\n        }\n        if (feature) {\n            [features addObject:feature];\n        }\n    }\n}\n\n- (void)forwardCameraTimerGesture:(UIGestureRecognizer *)gestureRecognizer\n{\n    SCTraceODPCompatibleStart(2);\n    for (id<SCFeature> feature in _provider.supportedFeatures) {\n        if ([feature respondsToSelector:@selector(forwardCameraTimerGesture:)]) {\n            [feature forwardCameraTimerGesture:gestureRecognizer];\n        }\n    }\n}\n\n- (void)forwardCameraOverlayTapGesture:(UIGestureRecognizer *)gestureRecognizer\n{\n    SCTraceODPCompatibleStart(2);\n    for (id<SCFeature> feature in _provider.supportedFeatures) {\n        if ([feature respondsToSelector:@selector(forwardCameraOverlayTapGesture:)]) {\n            [feature forwardCameraOverlayTapGesture:gestureRecognizer];\n        }\n    }\n}\n\n- (void)forwardLongPressGesture:(UIGestureRecognizer *)gestureRecognizer\n{\n    SCTraceODPCompatibleStart(2);\n    for (id<SCFeature> feature in _provider.supportedFeatures) {\n        if ([feature respondsToSelector:@selector(forwardLongPressGesture:)]) {\n            [feature forwardLongPressGesture:gestureRecognizer];\n        }\n    }\n}\n\n- (void)forwardPinchGesture:(UIPinchGestureRecognizer *)gestureRecognizer\n{\n    SCTraceODPCompatibleStart(2);\n    for (id<SCFeature> feature in _provider.supportedFeatures) {\n        if ([feature respondsToSelector:@selector(forwardPinchGesture:)]) {\n            [feature forwardPinchGesture:gestureRecognizer];\n        }\n    }\n}\n\n- (void)forwardPanGesture:(UIPanGestureRecognizer *)gestureRecognizer\n{\n    SCTraceODPCompatibleStart(2);\n    for (id<SCFeature> feature in _provider.supportedFeatures) {\n        if ([feature respondsToSelector:@selector(forwardPanGesture:)]) {\n            [feature forwardPanGesture:gestureRecognizer];\n        }\n    }\n}\n\n- (BOOL)shouldBlockTouchAtPoint:(CGPoint)point\n{\n    SCTraceODPCompatibleStart(2);\n    for (id<SCFeature> feature in _provider.supportedFeatures) {\n        if ([feature respondsToSelector:@selector(shouldBlockTouchAtPoint:)] &&\n            [feature shouldBlockTouchAtPoint:point]) {\n            return YES;\n        }\n    }\n    return NO;\n}\n\n@end\n"
  },
  {
    "path": "Features/Core/SCFeatureProvider.h",
    "content": "//\n//  SCFeatureProvider.h\n//  SCCamera\n//\n//  Created by Kristian Bauer on 1/4/18.\n//\n\n#import <SCCamera/AVCameraViewEnums.h>\n\n#import <Foundation/Foundation.h>\n\n@class SCFeatureSettingsManager, SCCapturerToken, SCUserSession;\n\n@protocol SCFeature\n, SCCapturer, SCFeatureFlash, SCFeatureHandsFree, SCFeatureLensSideButton, SCFeatureLensButtonZ, SCFeatureMemories,\n    SCFeatureNightMode, SCFeatureSnapKit, SCFeatureTapToFocusAndExposure, SCFeatureToggleCamera, SCFeatureShazam,\n    SCFeatureImageCapture, SCFeatureScanning, SCFeatureZooming;\n\n/**\n * Provides single location for creating and configuring SCFeatures.\n */\n@protocol SCFeatureProvider <NSObject>\n\n@property (nonatomic) AVCameraViewType cameraViewType;\n\n@property (nonatomic, readonly) id<SCCapturer> capturer;\n@property (nonatomic, strong, readwrite) SCCapturerToken *token;\n@property (nonatomic, readonly) SCUserSession *userSession;\n// TODO: We should not be reusing AVCameraViewController so eventually the\n// context should be removed.\n@property (nonatomic, readonly) AVCameraViewControllerContext context;\n@property (nonatomic) id<SCFeatureHandsFree> handsFreeRecording;\n@property (nonatomic) id<SCFeatureSnapKit> snapKit;\n@property (nonatomic) id<SCFeatureTapToFocusAndExposure> tapToFocusAndExposure;\n@property (nonatomic) id<SCFeatureMemories> memories;\n@property (nonatomic) id<SCFeatureFlash> flash;\n@property (nonatomic) id<SCFeatureLensSideButton> lensSideButton;\n@property (nonatomic) id<SCFeatureLensButtonZ> lensZButton;\n@property (nonatomic) id<SCFeatureNightMode> nightMode;\n@property (nonatomic) id<SCFeatureToggleCamera> toggleCamera;\n@property (nonatomic) id<SCFeatureShazam> shazam;\n@property (nonatomic) id<SCFeatureScanning> scanning;\n@property (nonatomic) id<SCFeatureImageCapture> imageCapture;\n@property (nonatomic) id<SCFeatureZooming> zooming;\n\n@property (nonatomic, readonly) NSArray<id<SCFeature>> *supportedFeatures;\n\n- (void)resetInstances;\n\n@end\n"
  },
  {
    "path": "Features/Flash/SCFeatureFlash.h",
    "content": "//\n//  SCFeatureFlash.h\n//  SCCamera\n//\n//  Created by Kristian Bauer on 3/27/18.\n//\n\n#import \"SCFeature.h\"\n\n@class SCNavigationBarButtonItem;\n\n/**\n * Public interface for interacting with camera flash feature.\n */\n@protocol SCFeatureFlash <SCFeature>\n@property (nonatomic, readonly) SCNavigationBarButtonItem *navigationBarButtonItem;\n\n- (void)interruptGestures;\n\n@end\n"
  },
  {
    "path": "Features/Flash/SCFeatureFlashImpl.h",
    "content": "//\n//  SCFeatureFlashImpl.h\n//  SCCamera\n//\n//  Created by Kristian Bauer on 3/27/18.\n//\n\n#import \"SCFeatureFlash.h\"\n\n#import <SCBase/SCMacros.h>\n\n@class SCLogger;\n@protocol SCCapturer;\n\n/**\n * Interface for camera flash feature. Handles enabling/disabling of camera flash via SCCapturer and UI for displaying\n * flash button.\n * Should only expose initializer. All other vars and methods should be declared in SCFeatureFlash protocol.\n */\n@interface SCFeatureFlashImpl : NSObject <SCFeatureFlash>\nSC_INIT_AND_NEW_UNAVAILABLE\n- (instancetype)initWithCapturer:(id<SCCapturer>)capturer logger:(SCLogger *)logger NS_DESIGNATED_INITIALIZER;\n@end\n"
  },
  {
    "path": "Features/Flash/SCFeatureFlashImpl.m",
    "content": "//\n//  SCFeatureFlashImpl.m\n//  SCCamera\n//\n//  Created by Kristian Bauer on 3/27/18.\n//\n\n#import \"SCFeatureFlashImpl.h\"\n\n#import \"SCCapturer.h\"\n#import \"SCFlashButton.h\"\n#import \"SCManagedCapturerListener.h\"\n#import \"SCManagedCapturerState.h\"\n\n#import <SCFoundation/SCLocale.h>\n#import <SCFoundation/SCLog.h>\n#import <SCFoundation/SCTraceODPCompatible.h>\n#import <SCLogger/SCLogger.h>\n#import <SCUIKit/SCNavigationBarButtonItem.h>\n\nstatic CGFloat const kSCFlashButtonInsets = -2.f;\nstatic CGRect const kSCFlashButtonFrame = {0, 0, 36, 44};\n\nstatic NSString *const kSCFlashEventName = @\"TOGGLE_CAMERA_FLASH_BUTTON\";\nstatic NSString *const kSCFlashEventParameterFlashName = @\"flash_on\";\nstatic NSString *const kSCFlashEventParameterCameraName = @\"front_facing_camera_on\";\n\n@interface SCFeatureFlashImpl ()\n@property (nonatomic, strong, readwrite) id<SCCapturer> capturer;\n@property (nonatomic, strong, readwrite) SCLogger *logger;\n@property (nonatomic, strong, readwrite) SCFlashButton *flashButton;\n@property (nonatomic, weak, readwrite) UIView<SCFeatureContainerView> *containerView;\n@property (nonatomic, strong, readwrite) SCManagedCapturerState *managedCapturerState;\n@property (nonatomic, assign, readwrite) BOOL canEnable;\n@end\n\n@interface SCFeatureFlashImpl (SCManagedCapturerListener) <SCManagedCapturerListener>\n@end\n\n@implementation SCFeatureFlashImpl\n@synthesize navigationBarButtonItem = _navigationBarButtonItem;\n\n- (instancetype)initWithCapturer:(id<SCCapturer>)capturer logger:(SCLogger *)logger\n{\n    SCTraceODPCompatibleStart(2);\n    self = [super init];\n    if (self) {\n        _capturer = capturer;\n        [_capturer addListener:self];\n        _logger = logger;\n    }\n    return self;\n}\n\n- (void)dealloc\n{\n    SCTraceODPCompatibleStart(2);\n    [_capturer removeListener:self];\n}\n\n#pragma mark - SCFeature\n\n- (void)configureWithView:(UIView<SCFeatureContainerView> *)view\n{\n    SCTraceODPCompatibleStart(2);\n    _containerView = view;\n}\n\n- (BOOL)shouldBlockTouchAtPoint:(CGPoint)point\n{\n    SCTraceODPCompatibleStart(2);\n    SC_GUARD_ELSE_RETURN_VALUE(_flashButton.userInteractionEnabled && !_flashButton.hidden, NO);\n    CGPoint convertedPoint = [_flashButton convertPoint:point fromView:_containerView];\n    return [_flashButton pointInside:convertedPoint withEvent:nil];\n}\n\n#pragma mark - SCFeatureFlash\n\n- (void)interruptGestures\n{\n    SCTraceODPCompatibleStart(2);\n    [_flashButton interruptGestures];\n}\n\n- (SCNavigationBarButtonItem *)navigationBarButtonItem\n{\n    SCTraceODPCompatibleStart(2);\n    SC_GUARD_ELSE_RETURN_VALUE(!_navigationBarButtonItem, _navigationBarButtonItem);\n    _navigationBarButtonItem = [[SCNavigationBarButtonItem alloc] initWithCustomView:self.flashButton];\n    return _navigationBarButtonItem;\n}\n\n#pragma mark - Getters\n\n- (SCFlashButton *)flashButton\n{\n    SCTraceODPCompatibleStart(2);\n    SC_GUARD_ELSE_RETURN_VALUE(!_flashButton, _flashButton);\n    _flashButton = [[SCFlashButton alloc] initWithFrame:kSCFlashButtonFrame];\n    _flashButton.layer.sublayerTransform = CATransform3DMakeTranslation(kSCFlashButtonInsets, 0, 0);\n    _flashButton.buttonState = SCFlashButtonStateOff;\n    _flashButton.maximumScale = 1.1111f;\n    [_flashButton addTarget:self action:@selector(_flashTapped)];\n\n    _flashButton.accessibilityIdentifier = @\"flash\";\n    _flashButton.accessibilityLabel = SCLocalizedString(@\"flash\", 0);\n    return _flashButton;\n}\n\n#pragma mark - Setters\n\n- (void)setCanEnable:(BOOL)canEnable\n{\n    SCTraceODPCompatibleStart(2);\n    SCLogCameraFeatureInfo(@\"[%@] setCanEnable new: %@ old: %@\", NSStringFromClass([self class]),\n                           canEnable ? @\"YES\" : @\"NO\", _canEnable ? @\"YES\" : @\"NO\");\n    self.flashButton.userInteractionEnabled = canEnable;\n}\n\n#pragma mark - Internal Helpers\n\n- (void)_flashTapped\n{\n    SCTraceODPCompatibleStart(2);\n    BOOL flashActive = !_managedCapturerState.flashActive;\n\n    SCLogCameraFeatureInfo(@\"[%@] _flashTapped flashActive new: %@ old: %@\", NSStringFromClass([self class]),\n                           flashActive ? @\"YES\" : @\"NO\", !flashActive ? @\"YES\" : @\"NO\");\n    _containerView.userInteractionEnabled = NO;\n    @weakify(self);\n    [_capturer setFlashActive:flashActive\n            completionHandler:^{\n                @strongify(self);\n                SCLogCameraFeatureInfo(@\"[%@] _flashTapped setFlashActive completion\", NSStringFromClass([self class]));\n                self.containerView.userInteractionEnabled = YES;\n            }\n                      context:SCCapturerContext];\n\n    NSDictionary *loggingParameters = @{\n        kSCFlashEventParameterFlashName : @(flashActive),\n        kSCFlashEventParameterCameraName :\n            @(_managedCapturerState.devicePosition == SCManagedCaptureDevicePositionFront)\n    };\n    [_logger logEvent:kSCFlashEventName parameters:loggingParameters];\n}\n\n- (BOOL)_shouldHideForState:(SCManagedCapturerState *)state\n{\n    SCTraceODPCompatibleStart(2);\n    return (!state.flashSupported && !state.torchSupported &&\n            state.devicePosition != SCManagedCaptureDevicePositionFront) ||\n           state.arSessionActive;\n}\n\n@end\n\n@implementation SCFeatureFlashImpl (SCManagedCapturerListener)\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeFlashActive:(SCManagedCapturerState *)state\n{\n    SCTraceODPCompatibleStart(2);\n    SCLogCameraFeatureInfo(@\"[%@] didChangeFlashActive flashActive: %@\", NSStringFromClass([self class]),\n                           state.flashActive ? @\"YES\" : @\"NO\");\n    self.flashButton.buttonState = state.flashActive ? SCFlashButtonStateOn : SCFlashButtonStateOff;\n}\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer\n    didChangeFlashSupportedAndTorchSupported:(SCManagedCapturerState *)state\n{\n    SCTraceODPCompatibleStart(2);\n    SCLogCameraFeatureInfo(\n        @\"[%@] didChangeFlashSupportedAndTorchSupported flashSupported: %@ torchSupported: %@ devicePosition: %@\",\n        NSStringFromClass([self class]), state.flashSupported ? @\"YES\" : @\"NO\", state.torchSupported ? @\"YES\" : @\"NO\",\n        state.devicePosition == SCManagedCaptureDevicePositionFront ? @\"front\" : @\"back\");\n    self.flashButton.hidden = [self _shouldHideForState:state];\n}\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeState:(SCManagedCapturerState *)state\n{\n    SCTraceODPCompatibleStart(2);\n    _managedCapturerState = [state copy];\n}\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeARSessionActive:(SCManagedCapturerState *)state\n{\n    SCTraceODPCompatibleStart(2);\n    SCLogCameraFeatureInfo(@\"[%@] didChangeARSessionActive: %@\", NSStringFromClass([self class]),\n                           state.arSessionActive ? @\"YES\" : @\"NO\");\n    self.flashButton.hidden = [self _shouldHideForState:state];\n}\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer\n didBeginVideoRecording:(SCManagedCapturerState *)state\n                session:(SCVideoCaptureSessionInfo)session\n{\n    SCTraceODPCompatibleStart(2);\n    self.canEnable = NO;\n}\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer\n     didFinishRecording:(SCManagedCapturerState *)state\n                session:(SCVideoCaptureSessionInfo)session\n          recordedVideo:(SCManagedRecordedVideo *)recordedVideo\n{\n    SCTraceODPCompatibleStart(2);\n    self.canEnable = YES;\n}\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer\n       didFailRecording:(SCManagedCapturerState *)state\n                session:(SCVideoCaptureSessionInfo)session\n                  error:(NSError *)error\n{\n    SCTraceODPCompatibleStart(2);\n    self.canEnable = YES;\n}\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer\n     didCancelRecording:(SCManagedCapturerState *)state\n                session:(SCVideoCaptureSessionInfo)session\n{\n    SCTraceODPCompatibleStart(2);\n    self.canEnable = YES;\n}\n\n@end\n"
  },
  {
    "path": "Features/Flash/SCFlashButton.h",
    "content": "//\n//  SCFlashButton.h\n//  SCCamera\n//\n//  Created by Will Wu on 2/13/14.\n//  Copyright (c) 2014 Snapchat, Inc. All rights reserved.\n//\n\n#import <SCUIKit/SCGrowingButton.h>\n\ntypedef NS_ENUM(NSInteger, SCFlashButtonState) { SCFlashButtonStateOn = 0, SCFlashButtonStateOff = 1 };\n\n@interface SCFlashButton : SCGrowingButton\n@property (nonatomic, assign) SCFlashButtonState buttonState;\n@end\n"
  },
  {
    "path": "Features/Flash/SCFlashButton.m",
    "content": "//\n//  SCFlashButton.m\n//  SCCamera\n//\n//  Created by Will Wu on 2/13/14.\n//  Copyright (c) 2014 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCFlashButton.h\"\n\n#import <SCUIKit/SCPixelRounding.h>\n\n@implementation SCFlashButton\n\n- (void)setButtonState:(SCFlashButtonState)buttonState\n{\n    // Don't reset flash button state if it doesn't change.\n    if (_buttonState == buttonState) {\n        return;\n    }\n    _buttonState = buttonState;\n\n    if (buttonState == SCFlashButtonStateOn) {\n        self.image = [UIImage imageNamed:@\"camera_flash_on_v10\"];\n        self.accessibilityValue = @\"on\";\n    } else {\n        self.image = [UIImage imageNamed:@\"camera_flash_off_v10\"];\n        self.accessibilityValue = @\"off\";\n    }\n\n    self.imageInset = SCRoundSizeToPixels(CGSizeMake((CGRectGetWidth(self.bounds) - self.image.size.width) / 2,\n                                                     (CGRectGetHeight(self.bounds) - self.image.size.height) / 2));\n}\n\n@end\n"
  },
  {
    "path": "Features/HandsFree/SCFeatureHandsFree.h",
    "content": "//\n//  SCFeatureHandsFree.h\n//  SCCamera\n//\n//  Created by Kristian Bauer on 2/26/18.\n//\n\n#import \"SCFeature.h\"\n\n#import <SCCamera/AVCameraViewEnums.h>\n\n@class SCLongPressGestureRecognizer, SCPreviewPresenter;\n\n@protocol SCFeatureHandsFree <SCFeature>\n@property (nonatomic, weak) SCPreviewPresenter *previewPresenter;\n@property (nonatomic, strong, readonly) SCLongPressGestureRecognizer *longPressGestureRecognizer;\n\n/**\n * Whether the feature is enabled or not.\n */\n@property (nonatomic) BOOL enabled;\n- (void)setupRecordLifecycleEventsWithMethod:(SCCameraRecordingMethod)method;\n- (BOOL)shouldDisplayMultiSnapTooltip;\n\n/**\n * Block called when user cancels hands-free recording via X button.\n */\n- (void)setCancelBlock:(dispatch_block_t)cancelBlock;\n\n@end\n"
  },
  {
    "path": "Features/ImageCapture/SCFeatureImageCapture.h",
    "content": "//\n//  SCFeatureImageCapture.h\n//  SCCamera\n//\n//  Created by Kristian Bauer on 4/18/18.\n//\n\n#import \"SCFeature.h\"\n\n#import <SCFoundation/SCFuture.h>\n\n@protocol SCFeatureImageCapture;\n\n@protocol SCFeatureImageCaptureDelegate <NSObject>\n- (void)featureImageCapture:(id<SCFeatureImageCapture>)featureImageCapture willCompleteWithImage:(UIImage *)image;\n- (void)featureImageCapture:(id<SCFeatureImageCapture>)featureImageCapture didCompleteWithError:(NSError *)error;\n- (void)featureImageCapturedDidComplete:(id<SCFeatureImageCapture>)featureImageCapture;\n@end\n\n/**\n SCFeature protocol for capturing an image.\n */\n@protocol SCFeatureImageCapture <SCFeature>\n@property (nonatomic, weak, readwrite) id<SCFeatureImageCaptureDelegate> delegate;\n@property (nonatomic, strong, readonly) SCPromise<UIImage *> *imagePromise;\n- (void)captureImage:(NSString *)captureSessionID;\n@end\n"
  },
  {
    "path": "Features/ImageCapture/SCFeatureImageCaptureImpl.h",
    "content": "//\n//  SCFeatureImageCaptureImpl.h\n//  SCCamera\n//\n//  Created by Kristian Bauer on 4/18/18.\n//\n\n#import \"AVCameraViewEnums.h\"\n#import \"SCFeatureImageCapture.h\"\n\n#import <SCBase/SCMacros.h>\n\n@protocol SCCapturer;\n@class SCLogger;\n\n@interface SCFeatureImageCaptureImpl : NSObject <SCFeatureImageCapture>\nSC_INIT_AND_NEW_UNAVAILABLE\n- (instancetype)initWithCapturer:(id<SCCapturer>)capturer\n                          logger:(SCLogger *)logger\n                  cameraViewType:(AVCameraViewType)cameraViewType NS_DESIGNATED_INITIALIZER;\n@end\n"
  },
  {
    "path": "Features/ImageCapture/SCFeatureImageCaptureImpl.m",
    "content": "//\n//  SCFeatureImageCaptureImpl.m\n//  SCCamera\n//\n//  Created by Kristian Bauer on 4/18/18.\n//\n\n#import \"SCFeatureImageCaptureImpl.h\"\n\n#import \"SCLogger+Camera.h\"\n#import \"SCManagedCapturePreviewLayerController.h\"\n#import \"SCManagedCapturerLensAPI.h\"\n#import \"SCManagedCapturerListener.h\"\n#import \"SCManagedCapturerUtils.h\"\n#import \"SCManagedStillImageCapturer.h\"\n\n#import <SCFoundation/SCDeviceName.h>\n#import <SCFoundation/SCLog.h>\n#import <SCFoundation/SCQueuePerformer.h>\n#import <SCFoundation/SCTraceODPCompatible.h>\n#import <SCGhostToSnappable/SCGhostToSnappableSignal.h>\n#import <SCLogger/SCCameraMetrics.h>\n#import <SCLogger/SCLogger+Performance.h>\n\n@interface SCFeatureImageCaptureImpl ()\n@property (nonatomic, strong, readwrite) id<SCCapturer> capturer;\n@property (nonatomic, strong, readwrite) SCLogger *logger;\n@property (nonatomic, assign) AVCameraViewType cameraViewType;\n@property (nonatomic, strong, readwrite) SCManagedCapturerState *managedCapturerState;\n\n/**\n * Whether user has attempted image capture in current session. Reset on foreground of app.\n */\n@property (nonatomic, assign) BOOL hasTriedCapturing;\n@end\n\n@interface SCFeatureImageCaptureImpl (SCManagedCapturerListener) <SCManagedCapturerListener>\n@end\n\n@implementation SCFeatureImageCaptureImpl\n@synthesize delegate = _delegate;\n@synthesize imagePromise = _imagePromise;\n\n- (instancetype)initWithCapturer:(id<SCCapturer>)capturer\n                          logger:(SCLogger *)logger\n                  cameraViewType:(AVCameraViewType)cameraViewType\n{\n    SCTraceODPCompatibleStart(2);\n    self = [super init];\n    if (self) {\n        _capturer = capturer;\n        [_capturer addListener:self];\n        _logger = logger;\n        _cameraViewType = cameraViewType;\n\n        [[NSNotificationCenter defaultCenter] addObserver:self\n                                                 selector:@selector(_viewWillEnterForeground)\n                                                     name:UIApplicationWillEnterForegroundNotification\n                                                   object:nil];\n    }\n    return self;\n}\n\n- (void)dealloc\n{\n    [_capturer removeListener:self];\n}\n\n#pragma mark - SCFeatureImageCapture\n\n- (void)captureImage:(NSString *)captureSessionID\n{\n    SCTraceODPCompatibleStart(2);\n    [_logger logTimedEventStart:kSCCameraMetricsRecordingDelay uniqueId:@\"IMAGE\" isUniqueEvent:NO];\n    BOOL asyncCaptureEnabled = [self _asynchronousCaptureEnabled:_managedCapturerState];\n    SCLogCameraFeatureInfo(@\"[%@] takeImage begin async: %@\", NSStringFromClass([self class]),\n                           asyncCaptureEnabled ? @\"YES\" : @\"NO\");\n\n    if (asyncCaptureEnabled) {\n        SCQueuePerformer *performer = [[SCQueuePerformer alloc] initWithLabel:\"com.snapchat.image-capture-promise\"\n                                                             qualityOfService:QOS_CLASS_USER_INTERACTIVE\n                                                                    queueType:DISPATCH_QUEUE_SERIAL\n                                                                      context:SCQueuePerformerContextCoreCamera];\n        _imagePromise = [[SCPromise alloc] initWithPerformer:performer];\n    }\n\n    @weakify(self);\n    [_capturer captureStillImageAsynchronouslyWithAspectRatio:SCManagedCapturedImageAndVideoAspectRatio()\n                                             captureSessionID:captureSessionID\n                                            completionHandler:^(UIImage *fullScreenImage, NSDictionary *metadata,\n                                                                NSError *error, SCManagedCapturerState *state) {\n                                                @strongify(self);\n                                                SC_GUARD_ELSE_RETURN(self);\n                                                [self _takeImageCallback:fullScreenImage\n                                                                metadata:metadata\n                                                                   error:error\n                                                                   state:state];\n                                            }\n                                                      context:SCCapturerContext];\n    [_logger logCameraCaptureFinishedWithDuration:0];\n}\n\n#pragma mark - Private\n\n- (void)_viewWillEnterForeground\n{\n    SCTraceODPCompatibleStart(2);\n    _hasTriedCapturing = NO;\n}\n\n- (void)_takeImageCallback:(UIImage *)image\n                  metadata:(NSDictionary *)metadata\n                     error:(NSError *)error\n                     state:(SCManagedCapturerState *)state\n{\n    SCTraceODPCompatibleStart(2);\n    [self _logCaptureComplete:state];\n\n    if (image) {\n        [_delegate featureImageCapture:self willCompleteWithImage:image];\n        if (_imagePromise) {\n            [_imagePromise completeWithValue:image];\n        }\n    } else {\n        if (_imagePromise) {\n            [_imagePromise completeWithError:[NSError errorWithDomain:@\"\" code:-1 userInfo:nil]];\n        }\n        [_delegate featureImageCapture:self didCompleteWithError:error];\n    }\n    _imagePromise = nil;\n    [_delegate featureImageCapturedDidComplete:self];\n}\n\n- (BOOL)_asynchronousCaptureEnabled:(SCManagedCapturerState *)state\n{\n    SCTraceODPCompatibleStart(2);\n    BOOL shouldCaptureImageFromVideoBuffer =\n        [SCDeviceName isSimilarToIphone5orNewer] && ![SCDeviceName isSimilarToIphone6orNewer];\n    // Fast image capture is disabled in following cases\n    // (1) flash is on;\n    // (2) lenses are active;\n    // (3) SCPhotoCapturer is not supported;\n    // (4) not main camera for iPhoneX;\n    return !state.flashActive && !state.lensesActive && !_capturer.lensProcessingCore.appliedLens &&\n           (SCPhotoCapturerIsEnabled() || shouldCaptureImageFromVideoBuffer) &&\n           (![SCDeviceName isIphoneX] || (_cameraViewType == AVCameraViewNoReply));\n}\n\n- (void)_logCaptureComplete:(SCManagedCapturerState *)state\n{\n    SCTraceODPCompatibleStart(2);\n    NSDictionary *params = @{\n        @\"type\" : @\"image\",\n        @\"lenses_active\" : @(state.lensesActive),\n        @\"is_back_camera\" : @(state.devicePosition != SCManagedCaptureDevicePositionFront),\n        @\"is_main_camera\" : @(_cameraViewType == AVCameraViewNoReply),\n        @\"is_first_attempt_after_app_startup\" : @(!_hasTriedCapturing),\n        @\"app_startup_type\" : SCLaunchType(),\n        @\"app_startup_time\" : @(SCAppStartupTimeMicros() / 1000.0),\n        @\"time_elapse_after_app_startup\" : @(SCTimeElapseAfterAppStartupMicros() / 1000.0),\n    };\n    [_logger logTimedEventEnd:kSCCameraMetricsRecordingDelay uniqueId:@\"IMAGE\" parameters:params];\n    _hasTriedCapturing = YES;\n}\n\n@end\n\n@implementation SCFeatureImageCaptureImpl (SCManagedCapturerListener)\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeState:(SCManagedCapturerState *)state\n{\n    SCTraceODPCompatibleStart(2);\n    _managedCapturerState = [state copy];\n}\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer didCapturePhoto:(SCManagedCapturerState *)state\n{\n    SCTraceODPCompatibleStart(2);\n    if (_imagePromise) {\n        [[SCManagedCapturePreviewLayerController sharedInstance] pause];\n    }\n}\n\n@end\n"
  },
  {
    "path": "Features/NightMode/SCFeatureNightMode.h",
    "content": "//\n//  SCFeatureNightMode.h\n//  SCCamera\n//\n//  Created by Kristian Bauer on 4/9/18.\n//\n\n#import \"SCFeature.h\"\n\n@class SCNavigationBarButtonItem, SCPreviewPresenter;\n\n/**\n * Public interface for interacting with camera night mode feature.\n * User spec: https://snapchat.quip.com/w4h4ArzcmXCS\n */\n@protocol SCFeatureNightMode <SCFeature>\n@property (nonatomic, weak, readwrite) SCPreviewPresenter *previewPresenter;\n@property (nonatomic, readonly) SCNavigationBarButtonItem *navigationBarButtonItem;\n\n- (void)interruptGestures;\n- (void)hideWithDelayIfNeeded;\n@end\n"
  },
  {
    "path": "Features/NightMode/SCNightModeButton.h",
    "content": "//\n//  SCNightModeButton.h\n//  SCCamera\n//\n//  Created by Liu Liu on 3/19/15.\n//  Copyright (c) 2015 Snapchat, Inc. All rights reserved.\n//\n\n#import <SCBase/SCMacros.h>\n#import <SCUIKit/SCGrowingButton.h>\n\n@interface SCNightModeButton : SCGrowingButton\n@property (nonatomic, assign, getter=isSelected) BOOL selected;\nSC_INIT_AND_NEW_UNAVAILABLE\n- (void)show;\n- (void)hideWithDelay:(BOOL)delay;\n- (BOOL)willHideAfterDelay;\n@end\n"
  },
  {
    "path": "Features/NightMode/SCNightModeButton.m",
    "content": "//\n//  SCNightModeButton.m\n//  SCCamera\n//\n//  Created by Liu Liu on 3/19/15.\n//  Copyright (c) 2015 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCNightModeButton.h\"\n\n#import <SCFoundation/SCAssertWrapper.h>\n\nstatic NSTimeInterval const kSCNightModeButtonHiddenDelay = 2.5;\n\n@implementation SCNightModeButton {\n    dispatch_block_t _delayedHideBlock;\n}\n\n- (instancetype)initWithFrame:(CGRect)frame\n{\n    self = [super initWithFrame:frame];\n    if (self) {\n        self.image = [UIImage imageNamed:@\"camera_nightmode_off_v10\"];\n        self.imageInset = CGSizeMake((CGRectGetWidth(self.bounds) - self.image.size.width) / 2,\n                                     (CGRectGetHeight(self.bounds) - self.image.size.height) / 2);\n    }\n    return self;\n}\n\n- (void)setSelected:(BOOL)selected\n{\n    SC_GUARD_ELSE_RETURN(_selected != selected);\n    if (selected) {\n        [self _cancelDelayedHideAnimation];\n        self.image = [UIImage imageNamed:@\"camera_nightmode_on_v10\"];\n    } else {\n        self.image = [UIImage imageNamed:@\"camera_nightmode_off_v10\"];\n    }\n    self.imageInset = CGSizeMake((CGRectGetWidth(self.bounds) - self.image.size.width) / 2,\n                                 (CGRectGetHeight(self.bounds) - self.image.size.height) / 2);\n    _selected = selected;\n}\n\n- (void)show\n{\n    SC_GUARD_ELSE_RETURN(self.hidden);\n    SCAssertMainThread();\n    [self _cancelDelayedHideAnimation];\n    self.hidden = NO;\n    [self animate];\n}\n\n- (void)hideWithDelay:(BOOL)delay\n{\n    SC_GUARD_ELSE_RETURN(!self.hidden);\n    SCAssertMainThread();\n    [self _cancelDelayedHideAnimation];\n    if (delay) {\n        @weakify(self);\n        _delayedHideBlock = dispatch_block_create(0, ^{\n            @strongify(self);\n            SC_GUARD_ELSE_RETURN(self);\n            [UIView animateWithDuration:0.3\n                animations:^{\n                    self.alpha = 0;\n                }\n                completion:^(BOOL finished) {\n                    self.alpha = 1;\n                    self.hidden = YES;\n                    _delayedHideBlock = nil;\n                }];\n        });\n        dispatch_time_t delayTime =\n            dispatch_time(DISPATCH_TIME_NOW, (int64_t)(kSCNightModeButtonHiddenDelay * NSEC_PER_SEC));\n        dispatch_after(delayTime, dispatch_get_main_queue(), _delayedHideBlock);\n    } else {\n        self.hidden = YES;\n    }\n}\n\n- (BOOL)willHideAfterDelay\n{\n    return _delayedHideBlock != nil;\n}\n\n#pragma mark - Private\n\n- (void)_cancelDelayedHideAnimation\n{\n    SC_GUARD_ELSE_RETURN(_delayedHideBlock);\n    dispatch_cancel(_delayedHideBlock);\n    _delayedHideBlock = nil;\n}\n\n@end\n"
  },
  {
    "path": "Features/Scanning/SCFeatureScanning.h",
    "content": "//\n//  SCFeatureScanning.h\n//  Snapchat\n//\n//  Created by Xiaokang Liu on 2018/4/19.\n//\n\n#import \"SCFeature.h\"\n\n@protocol SCFeatureScanning;\n\n@protocol SCFeatureScanningDelegate <NSObject>\n- (void)featureScanning:(id<SCFeatureScanning>)featureScanning didFinishWithResult:(NSObject *)resultObject;\n@end\n\n/**\n This SCFeature allows the user to long press on the screen to scan a snapcode.\n */\n@protocol SCFeatureScanning <SCFeature>\n@property (nonatomic, weak) id<SCFeatureScanningDelegate> delegate;\n@property (nonatomic, assign) NSTimeInterval lastSuccessfulScanTime;\n- (void)startScanning;\n- (void)stopScanning;\n\n- (void)stopSearch;\n@end\n"
  },
  {
    "path": "Features/Shazam/SCFeatureShazam.h",
    "content": "//\n//  SCFeatureShazam.h\n//  SCCamera\n//\n//  Created by Xiaokang Liu on 2018/4/18.\n//\n\n#import \"SCFeature.h\"\n\n@class SCLens;\n@protocol SCFeatureShazam;\n\n@protocol SCFeatureShazamDelegate <NSObject>\n- (void)featureShazam:(id<SCFeatureShazam>)featureShazam didFinishWithResult:(NSObject *)result;\n- (void)featureShazamDidSubmitSearchRequest:(id<SCFeatureShazam>)featureShazam;\n- (SCLens *)filterLensForFeatureShazam:(id<SCFeatureShazam>)featureShazam;\n@end\n\n@protocol SCFeatureShazam <SCFeature>\n@property (nonatomic, weak) id<SCFeatureShazamDelegate> delegate;\n- (void)stopAudioRecordingAsynchronously;\n- (void)resetInfo;\n@end\n"
  },
  {
    "path": "Features/SnapKit/SCFeatureSnapKit.h",
    "content": "//\n//  SCFeatureSnapKit.h\n//  SCCamera\n//\n//  Created by Michel Loenngren on 3/19/18.\n//\n\n#import \"SCFeature.h\"\n\n@class SCCameraDeepLinkMetadata;\n\n@protocol SCFeatureSnapKit <SCFeature>\n- (void)setDeepLinkMetadata:(SCCameraDeepLinkMetadata *)metadata;\n@end\n"
  },
  {
    "path": "Features/TapToFocus/SCFeatureTapToFocusAndExposure.h",
    "content": "//\n//  SCFeatureTapToFocusAndExposure.h\n//  SCCamera\n//\n//  Created by Michel Loenngren on 4/5/18.\n//\n\n#import \"SCFeature.h\"\n\n/**\n This SCFeature allows the user to tap on the screen to adjust focus and exposure.\n */\n@protocol SCFeatureTapToFocusAndExposure <SCFeature>\n\n- (void)reset;\n\n@end\n"
  },
  {
    "path": "Features/TapToFocus/SCFeatureTapToFocusAndExposureImpl.h",
    "content": "//\n//  SCFeatureTapToFocusAndExposureImpl.h\n//  SCCamera\n//\n//  Created by Michel Loenngren on 4/5/18.\n//\n\n#import \"SCFeatureTapToFocusAndExposure.h\"\n\n#import <SCBase/SCMacros.h>\n\n#import <Foundation/Foundation.h>\n\n@protocol SCCapturer;\n\n/**\n Protocol describing unique camera commands to run when the user taps on screen. These could be focus, exposure or tap\n to portrait mode.\n */\n@protocol SCFeatureCameraTapCommand <NSObject>\n- (void)execute:(CGPoint)pointOfInterest capturer:(id<SCCapturer>)capturer;\n@end\n\n/**\n This is the default implementation of SCFeatureTapToFocusAndExposure allowing the user to tap on the camera overlay\n view in order to adjust focus and exposure.\n */\n@interface SCFeatureTapToFocusAndExposureImpl : NSObject <SCFeatureTapToFocusAndExposure>\nSC_INIT_AND_NEW_UNAVAILABLE\n- (instancetype)initWithCapturer:(id<SCCapturer>)capturer commands:(NSArray<id<SCFeatureCameraTapCommand>> *)commands;\n@end\n\n/**\n Adjust focus on tap.\n */\n@interface SCFeatureCameraFocusTapCommand : NSObject <SCFeatureCameraTapCommand>\n@end\n\n/**\n Adjust exposure on tap.\n */\n@interface SCFeatureCameraExposureTapCommand : NSObject <SCFeatureCameraTapCommand>\n@end\n\n/**\n Adjust portrait mode point of interest on tap.\n */\n@interface SCFeatureCameraPortraitTapCommand : NSObject <SCFeatureCameraTapCommand>\n@end\n"
  },
  {
    "path": "Features/TapToFocus/SCFeatureTapToFocusAndExposureImpl.m",
    "content": "//\n//  SCFeatureTapToFocusImpl.m\n//  SCCamera\n//\n//  Created by Michel Loenngren on 4/5/18.\n//\n\n#import \"SCFeatureTapToFocusAndExposureImpl.h\"\n\n#import \"SCCameraTweaks.h\"\n#import \"SCCapturer.h\"\n#import \"SCFeatureContainerView.h\"\n#import \"SCTapAnimationView.h\"\n\n#import <SCFoundation/SCLog.h>\n#import <SCFoundation/SCTraceODPCompatible.h>\n\n@interface SCFeatureTapToFocusAndExposureImpl ()\n@property (nonatomic, weak) id<SCCapturer> capturer;\n@property (nonatomic, weak) UIView<SCFeatureContainerView> *containerView;\n@property (nonatomic) BOOL userTappedToFocusAndExposure;\n@property (nonatomic) NSArray<id<SCFeatureCameraTapCommand>> *commands;\n@end\n\n@implementation SCFeatureTapToFocusAndExposureImpl\n\n- (instancetype)initWithCapturer:(id<SCCapturer>)capturer commands:(NSArray<id<SCFeatureCameraTapCommand>> *)commands\n{\n    if (self = [super init]) {\n        _capturer = capturer;\n        _commands = commands;\n    }\n    return self;\n}\n\n- (void)reset\n{\n    SC_GUARD_ELSE_RETURN(_userTappedToFocusAndExposure);\n    _userTappedToFocusAndExposure = NO;\n    [_capturer continuousAutofocusAndExposureAsynchronouslyWithCompletionHandler:nil context:SCCapturerContext];\n}\n\n#pragma mark - SCFeature\n\n- (void)configureWithView:(UIView<SCFeatureContainerView> *)view\n{\n    SCTraceODPCompatibleStart(2);\n    _containerView = view;\n}\n\n- (void)forwardCameraOverlayTapGesture:(UIGestureRecognizer *)gestureRecognizer\n{\n    SCTraceODPCompatibleStart(2);\n    CGPoint point = [gestureRecognizer locationInView:gestureRecognizer.view];\n    @weakify(self);\n    [_capturer convertViewCoordinates:[gestureRecognizer locationInView:_containerView]\n                    completionHandler:^(CGPoint pointOfInterest) {\n                        @strongify(self);\n                        SC_GUARD_ELSE_RETURN(self);\n                        SCLogCameraFeatureInfo(@\"Tapped to focus: %@\", NSStringFromCGPoint(pointOfInterest));\n                        [self _applyTapCommands:pointOfInterest];\n                        [self _showTapAnimationAtPoint:point forGesture:gestureRecognizer];\n                    }\n                              context:SCCapturerContext];\n}\n\n#pragma mark - Private helpers\n\n- (void)_applyTapCommands:(CGPoint)pointOfInterest\n{\n    SCTraceODPCompatibleStart(2);\n    for (id<SCFeatureCameraTapCommand> command in _commands) {\n        [command execute:pointOfInterest capturer:_capturer];\n    }\n    self.userTappedToFocusAndExposure = YES;\n}\n\n- (void)_showTapAnimationAtPoint:(CGPoint)point forGesture:(UIGestureRecognizer *)gestureRecognizer\n{\n    SCTraceODPCompatibleStart(2);\n    SC_GUARD_ELSE_RETURN([self.containerView isTapGestureRecognizer:gestureRecognizer])\n    SCTapAnimationView *tapAnimationView = [SCTapAnimationView tapAnimationView];\n    [_containerView addSubview:tapAnimationView];\n    tapAnimationView.center = point;\n    [tapAnimationView showWithCompletion:^(SCTapAnimationView *view) {\n        [view removeFromSuperview];\n    }];\n}\n\n@end\n\n@implementation SCFeatureCameraFocusTapCommand\n- (void)execute:(CGPoint)pointOfInterest capturer:(id<SCCapturer>)capturer\n{\n    [capturer setAutofocusPointOfInterestAsynchronously:pointOfInterest\n                                      completionHandler:nil\n                                                context:SCCapturerContext];\n}\n@end\n\n@implementation SCFeatureCameraExposureTapCommand\n- (void)execute:(CGPoint)pointOfInterest capturer:(id<SCCapturer>)capturer\n{\n    [capturer setExposurePointOfInterestAsynchronously:pointOfInterest\n                                              fromUser:YES\n                                     completionHandler:nil\n                                               context:SCCapturerContext];\n}\n@end\n\n@implementation SCFeatureCameraPortraitTapCommand\n- (void)execute:(CGPoint)pointOfInterest capturer:(id<SCCapturer>)capturer\n{\n    [capturer setPortraitModePointOfInterestAsynchronously:pointOfInterest\n                                         completionHandler:nil\n                                                   context:SCCapturerContext];\n}\n@end\n"
  },
  {
    "path": "Features/TapToFocus/SCTapAnimationView.h",
    "content": "//\n//  SCTapAnimationView.h\n//  SCCamera\n//\n//  Created by Alexander Grytsiuk on 8/26/15.\n//  Copyright (c) 2015 Snapchat, Inc. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n\n@class SCTapAnimationView;\n\ntypedef void (^SCTapAnimationViewCompletion)(SCTapAnimationView *);\n\n@interface SCTapAnimationView : UIView\n\n+ (instancetype)tapAnimationView;\n\n- (void)showWithCompletion:(SCTapAnimationViewCompletion)completion;\n\n@end\n"
  },
  {
    "path": "Features/TapToFocus/SCTapAnimationView.m",
    "content": "//\n//  SCTapAnimationView.m\n//  SCCamera\n//\n//  Created by Alexander Grytsiuk on 8/26/15.\n//  Copyright (c) 2015 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCTapAnimationView.h\"\n\n#import <SCBase/SCMacros.h>\n\n@import QuartzCore;\n\nstatic const CGFloat kSCAnimationStep = 0.167;\nstatic const CGFloat kSCInnerCirclePadding = 2.5;\nstatic const CGFloat kSCTapAnimationViewWidth = 55;\nstatic const CGFloat kSCOuterRingBorderWidth = 1;\n\nstatic NSString *const kSCOpacityAnimationKey = @\"opacity\";\nstatic NSString *const kSCScaleAnimationKey = @\"scale\";\n\n@implementation SCTapAnimationView {\n    CALayer *_outerRing;\n    CALayer *_innerCircle;\n}\n\n#pragma mark Class Methods\n\n+ (instancetype)tapAnimationView\n{\n    return [[self alloc] initWithFrame:CGRectMake(0, 0, kSCTapAnimationViewWidth, kSCTapAnimationViewWidth)];\n}\n\n#pragma mark Life Cycle\n\n- (instancetype)initWithFrame:(CGRect)frame\n{\n    self = [super initWithFrame:frame];\n    if (self) {\n        self.userInteractionEnabled = NO;\n        _outerRing = [CALayer layer];\n        _outerRing.backgroundColor = [UIColor clearColor].CGColor;\n        _outerRing.borderColor = [UIColor whiteColor].CGColor;\n        _outerRing.borderWidth = kSCOuterRingBorderWidth;\n        _outerRing.shadowColor = [UIColor blackColor].CGColor;\n        _outerRing.shadowOpacity = 0.4;\n        _outerRing.shadowOffset = CGSizeMake(0.5, 0.5);\n        _outerRing.opacity = 0.0;\n        _outerRing.frame = self.bounds;\n        _outerRing.cornerRadius = CGRectGetMidX(_outerRing.bounds);\n        [self.layer addSublayer:_outerRing];\n\n        _innerCircle = [CALayer layer];\n        _innerCircle.backgroundColor = [UIColor whiteColor].CGColor;\n        _innerCircle.opacity = 0.0;\n        _innerCircle.frame = CGRectInset(self.bounds, kSCInnerCirclePadding, kSCInnerCirclePadding);\n        _innerCircle.cornerRadius = CGRectGetMidX(_innerCircle.bounds);\n        [self.layer addSublayer:_innerCircle];\n    }\n    return self;\n}\n\n#pragma mark Public\n\n- (void)showWithCompletion:(SCTapAnimationViewCompletion)completion\n{\n    [_outerRing removeAllAnimations];\n    [_innerCircle removeAllAnimations];\n\n    [CATransaction begin];\n    [CATransaction setCompletionBlock:^{\n        if (completion) {\n            completion(self);\n        }\n    }];\n    [self addOuterRingOpacityAnimation];\n    [self addOuterRingScaleAnimation];\n    [self addInnerCircleOpacityAnimation];\n    [self addInnerCircleScaleAnimation];\n    [CATransaction commit];\n}\n\n#pragma mark Private\n\n- (CAKeyframeAnimation *)keyFrameAnimationWithKeyPath:(NSString *)keyPath\n                                             duration:(CGFloat)duration\n                                               values:(NSArray *)values\n                                             keyTimes:(NSArray *)keyTimes\n                                      timingFunctions:(NSArray *)timingFunctions\n{\n    CAKeyframeAnimation *keyframeAnimation = [CAKeyframeAnimation animationWithKeyPath:keyPath];\n    keyframeAnimation.duration = duration;\n    keyframeAnimation.values = values;\n    keyframeAnimation.keyTimes = keyTimes;\n    keyframeAnimation.timingFunctions = timingFunctions;\n    keyframeAnimation.fillMode = kCAFillModeForwards;\n    keyframeAnimation.removedOnCompletion = NO;\n\n    return keyframeAnimation;\n}\n\n- (CABasicAnimation *)animationWithKeyPath:(NSString *)keyPath\n                                  duration:(CGFloat)duration\n                                 fromValue:(NSValue *)fromValue\n                                   toValue:(NSValue *)toValue\n                            timingFunction:(CAMediaTimingFunction *)timingFunction\n{\n    CABasicAnimation *animation = [CABasicAnimation animationWithKeyPath:keyPath];\n    animation.duration = duration;\n    animation.fromValue = fromValue;\n    animation.toValue = toValue;\n    animation.timingFunction = timingFunction;\n    animation.fillMode = kCAFillModeForwards;\n    animation.removedOnCompletion = NO;\n\n    return animation;\n}\n\n- (void)addOuterRingOpacityAnimation\n{\n    CAKeyframeAnimation *animation =\n        [self keyFrameAnimationWithKeyPath:@keypath(_outerRing, opacity)\n                                  duration:kSCAnimationStep * 5\n                                    values:@[ @0.0, @1.0, @1.0, @0.0 ]\n                                  keyTimes:@[ @0.0, @0.2, @0.8, @1.0 ]\n                           timingFunctions:@[\n                               [CAMediaTimingFunction functionWithControlPoints:0.0:0.0:0.0:1.0],\n                               [CAMediaTimingFunction functionWithName:kCAMediaTimingFunctionLinear],\n                               [CAMediaTimingFunction functionWithName:kCAMediaTimingFunctionEaseInEaseOut],\n                           ]];\n    [_outerRing addAnimation:animation forKey:kSCOpacityAnimationKey];\n}\n\n- (void)addOuterRingScaleAnimation\n{\n    CAKeyframeAnimation *animation =\n        [self keyFrameAnimationWithKeyPath:@keypath(_innerCircle, transform)\n                                  duration:kSCAnimationStep * 3\n                                    values:@[\n                                        [NSValue valueWithCATransform3D:CATransform3DMakeScale(0.50, 0.50, 1.0)],\n                                        [NSValue valueWithCATransform3D:CATransform3DIdentity],\n                                        [NSValue valueWithCATransform3D:CATransform3DMakeScale(0.83, 0.83, 1.0)],\n                                    ]\n                                  keyTimes:@[ @0.0, @0.66, @1.0 ]\n                           timingFunctions:@[\n                               [CAMediaTimingFunction functionWithControlPoints:0.0:0.0:0.0:1.0],\n                               [CAMediaTimingFunction functionWithName:kCAMediaTimingFunctionEaseInEaseOut],\n                           ]];\n    [_outerRing addAnimation:animation forKey:kSCScaleAnimationKey];\n}\n\n- (void)addInnerCircleOpacityAnimation\n{\n    CAKeyframeAnimation *animation =\n        [self keyFrameAnimationWithKeyPath:@keypath(_innerCircle, opacity)\n                                  duration:kSCAnimationStep * 3\n                                    values:@[ @0.0, @0.40, @0.0 ]\n                                  keyTimes:@[ @0.0, @0.33, @1.0 ]\n                           timingFunctions:@[\n                               [CAMediaTimingFunction functionWithName:kCAMediaTimingFunctionEaseIn],\n                               [CAMediaTimingFunction functionWithName:kCAMediaTimingFunctionEaseInEaseOut],\n                           ]];\n    [_innerCircle addAnimation:animation forKey:kSCOpacityAnimationKey];\n}\n\n- (void)addInnerCircleScaleAnimation\n{\n    CABasicAnimation *animation =\n        [self animationWithKeyPath:@keypath(_innerCircle, transform)\n                          duration:kSCAnimationStep * 2\n                         fromValue:[NSValue valueWithCATransform3D:CATransform3DMakeScale(0.0, 0.0, 1.0)]\n                           toValue:[NSValue valueWithCATransform3D:CATransform3DIdentity]\n                    timingFunction:[CAMediaTimingFunction functionWithControlPoints:0.0:0.0:0.0:1.0]];\n    [_innerCircle addAnimation:animation forKey:kSCScaleAnimationKey];\n}\n\n@end\n"
  },
  {
    "path": "Features/ToggleCamera/SCFeatureToggleCamera.h",
    "content": "//\n//  SCFeatureToggleCamera.h\n//  SCCamera\n//\n//  Created by Michel Loenngren on 4/17/18.\n//\n\n#import <SCCamera/SCFeature.h>\n#import <SCCameraFoundation/SCManagedCaptureDevicePosition.h>\n\n@protocol SCCapturer\n, SCFeatureToggleCamera, SCLensCameraScreenDataProviderProtocol;\n\n@protocol SCFeatureToggleCameraDelegate <NSObject>\n\n- (void)featureToggleCamera:(id<SCFeatureToggleCamera>)feature\n willToggleToDevicePosition:(SCManagedCaptureDevicePosition)devicePosition;\n- (void)featureToggleCamera:(id<SCFeatureToggleCamera>)feature\n  didToggleToDevicePosition:(SCManagedCaptureDevicePosition)devicePosition;\n\n@end\n\n/**\n SCFeature protocol for toggling the camera.\n */\n@protocol SCFeatureToggleCamera <SCFeature>\n\n@property (nonatomic, weak) id<SCFeatureToggleCameraDelegate> delegate;\n\n- (void)toggleCameraWithRecording:(BOOL)isRecording\n                    takingPicture:(BOOL)isTakingPicture\n                 lensDataProvider:(id<SCLensCameraScreenDataProviderProtocol>)lensDataProvider\n                       completion:(void (^)(BOOL success))completion;\n\n- (void)reset;\n\n@end\n"
  },
  {
    "path": "Features/Zooming/SCFeatureZooming.h",
    "content": "//\n//  SCFeatureZooming.h\n//  SCCamera\n//\n//  Created by Xiaokang Liu on 2018/4/19.\n//\n\n#import \"SCFeature.h\"\n\n#import <SCCameraFoundation/SCManagedCaptureDevicePosition.h>\n#import <SCSearch/SCSearchSnapZoomLevelProviding.h>\n\n@class SCPreviewPresenter;\n@protocol SCFeatureZooming;\n\n@protocol SCFeatureZoomingDelegate <NSObject>\n- (void)featureZoomingForceTouchedWhileRecording:(id<SCFeatureZooming>)featureZooming;\n- (BOOL)featureZoomingIsInitiatedRecording:(id<SCFeatureZooming>)featureZooming;\n@end\n\n@protocol SCFeatureZooming <SCFeature, SCSearchSnapZoomLevelProviding>\n@property (nonatomic, weak) id<SCFeatureZoomingDelegate> delegate;\n@property (nonatomic, weak) SCPreviewPresenter *previewPresenter;\n\n- (void)resetOffset;\n- (void)resetScale;\n\n- (void)cancelPreview;\n- (void)flipOffset;\n\n- (void)resetBeginningScale;\n- (void)toggleCameraForReset:(SCManagedCaptureDevicePosition)devicePosition;\n- (void)recordCurrentZoomStateForReset;\n@end\n"
  },
  {
    "path": "Lens/SCManagedCapturerARImageCaptureProvider.h",
    "content": "//\n//  SCManagedCapturerARImageCaptureProvider.h\n//  SCCamera\n//\n//  Created by Michel Loenngren on 4/11/18.\n//\n\n#import <Foundation/Foundation.h>\n\n@class SCManagedStillImageCapturer;\n@protocol SCManagedCapturerLensAPI\n, SCPerforming;\n\n/**\n Bridging protocol providing the ARImageCapturer subclass of SCManagedStillImageCapturer\n to capture core.\n */\n@protocol SCManagedCapturerARImageCaptureProvider <NSObject>\n\n- (SCManagedStillImageCapturer *)arImageCapturerWith:(id<SCPerforming>)performer\n                                  lensProcessingCore:(id<SCManagedCapturerLensAPI>)lensProcessingCore;\n\n@end\n"
  },
  {
    "path": "Lens/SCManagedCapturerGLViewManagerAPI.h",
    "content": "//\n//  SCManagedCapturerGLViewManagerAPI.h\n//  SCCamera\n//\n//  Created by Michel Loenngren on 4/11/18.\n//\n\n#import <Looksery/LSAGLView.h>\n\n#import <Foundation/Foundation.h>\n\n@class SCCaptureResource;\n\n/**\n Bridging protocol for providing a glViewManager to capture core.\n */\n@protocol SCManagedCapturerGLViewManagerAPI <NSObject>\n\n@property (nonatomic, readonly, strong) LSAGLView *view;\n\n- (void)configureWithCaptureResource:(SCCaptureResource *)captureResource;\n\n- (void)setLensesActive:(BOOL)active;\n\n- (void)prepareViewIfNecessary;\n\n@end\n"
  },
  {
    "path": "Lens/SCManagedCapturerLSAComponentTrackerAPI.h",
    "content": "//\n//  SCManagedCapturerLSAComponentTrackerAPI.h\n//  SCCamera\n//\n//  Created by Michel Loenngren on 4/11/18.\n//\n\n#import <Foundation/Foundation.h>\n\n@class SCCaptureResource;\n\n/**\n SCCamera protocol providing LSA tracking logic.\n */\n@protocol SCManagedCapturerLSAComponentTrackerAPI <NSObject>\n\n- (void)configureWithCaptureResource:(SCCaptureResource *)captureResource;\n\n@end\n"
  },
  {
    "path": "Lens/SCManagedCapturerLensAPI.h",
    "content": "//\n//  SCManagedCapturerLensAPI.h\n//  SCCamera\n//\n//  Created by Michel Loenngren on 4/11/18.\n//\n\n#import \"SCManagedCapturerListener.h\"\n#import \"SCManagedVideoARDataSource.h\"\n\n#import <SCCameraFoundation/SCManagedCaptureDevicePosition.h>\n#import <SCLenses/SCLens.h>\n\n#import <Foundation/Foundation.h>\n\n@protocol SCManagedAudioDataSourceListener\n, SCManagedVideoARDataSource;\n@class LSAComponentManager;\n\n/**\n Encapsulation of LensesProcessingCore for use in SCCamera.\n */\n@protocol SCManagedCapturerLensAPI <SCManagedCapturerListener>\n\n@property (nonatomic, strong, readonly) LSAComponentManager *componentManager;\n@property (nonatomic, strong) NSString *activeLensId;\n@property (nonatomic, readonly) BOOL isLensApplied;\n@property (nonatomic, strong, readonly)\n    id<SCManagedAudioDataSourceListener, SCManagedVideoDataSourceListener> capturerListener;\n\ntypedef void (^SCManagedCapturerLensAPIPointOfInterestCompletion)(SCLensCategory *category, NSInteger categoriesCount);\n\n- (void)setAspectRatio:(BOOL)isLiveStreaming;\n\n- (SCLens *)appliedLens;\n\n- (void)setFieldOfView:(float)fieldOfView;\n\n- (void)setAsFieldOfViewListenerForDevice:(SCManagedCaptureDevice *)captureDevice;\n\n- (void)setAsFieldOfViewListenerForARDataSource:(id<SCManagedVideoARDataSource>)arDataSource NS_AVAILABLE_IOS(11_0);\n\n- (void)removeFieldOfViewListener;\n\n- (void)setModifySource:(BOOL)modifySource;\n\n- (void)setLensesActive:(BOOL)lensesActive\n       videoOrientation:(AVCaptureVideoOrientation)videoOrientation\n          filterFactory:(SCLookseryFilterFactory *)filterFactory;\n\n- (void)detectLensCategoryOnNextFrame:(CGPoint)point\n                     videoOrientation:(AVCaptureVideoOrientation)videoOrientation\n                               lenses:(NSArray<SCLens *> *)lenses\n                           completion:(SCManagedCapturerLensAPIPointOfInterestCompletion)completion;\n\n- (void)setShouldMuteAllSounds:(BOOL)shouldMuteAllSounds;\n\n- (UIImage *)processImage:(UIImage *)image\n             maxPixelSize:(NSInteger)maxPixelSize\n           devicePosition:(SCManagedCaptureDevicePosition)position\n              fieldOfView:(float)fieldOfView;\n\n- (void)setShouldProcessARFrames:(BOOL)shouldProcessARFrames;\n\n- (NSInteger)maxPixelSize;\n\n@end\n"
  },
  {
    "path": "Lens/SCManagedCapturerLensAPIProvider.h",
    "content": "//\n//  SCManagedCapturerLensAPIProvider.h\n//  SCCamera\n//\n//  Created by Michel Loenngren on 4/12/18.\n//\n\n#import <Foundation/Foundation.h>\n\n@protocol SCManagedCapturerLensAPI;\n@class SCCaptureResource;\n\n/**\n Provider for creating new instances of SCManagedCapturerLensAPI within SCCamera.\n */\n@protocol SCManagedCapturerLensAPIProvider <NSObject>\n\n- (id<SCManagedCapturerLensAPI>)lensAPIForCaptureResource:(SCCaptureResource *)captureResouce;\n\n@end\n"
  },
  {
    "path": "Logging/SCCoreCameraLogger.h",
    "content": "//\n//  SCCoreCameraLogger.h\n//  Snapchat\n//\n//  Created by Chao Pang on 3/6/18.\n//\n\n#import <Foundation/Foundation.h>\n\n/**\n *  CAMERA_CREATION_DELAY event\n */\nextern NSString *const kSCCameraCreationDelayEventStartTimeKey;\nextern NSString *const kSCCameraCreationDelayEventStartTimeAdjustmentKey;\nextern NSString *const kSCCameraCreationDelayEventEndTimeKey;\nextern NSString *const kSCCameraCreationDelayEventCaptureSessionIdKey;\nextern NSString *const kSCCameraCreationDelayEventFilterLensIdKey;\nextern NSString *const kSCCameraCreationDelayEventNightModeDetectedKey;\nextern NSString *const kSCCameraCreationDelayEventNightModeActiveKey;\nextern NSString *const kSCCameraCreationDelayEventCameraApiKey;\nextern NSString *const kSCCameraCreationDelayEventCameraLevelKey;\nextern NSString *const kSCCameraCreationDelayEventCameraPositionKey;\nextern NSString *const kSCCameraCreationDelayEventCameraOpenSourceKey;\nextern NSString *const kSCCameraCreationDelayEventContentDurationKey;\nextern NSString *const kSCCameraCreationDelayEventMediaTypeKey;\nextern NSString *const kSCCameraCreationDelayEventStartTypeKey;\nextern NSString *const kSCCameraCreationDelayEventStartSubTypeKey;\nextern NSString *const kSCCameraCreationDelayEventAnalyticsVersion;\n\n@interface SCCoreCameraLogger : NSObject\n\n+ (instancetype)sharedInstance;\n\n/**\n *  CAMERA_CREATION_DELAY event\n */\n- (void)logCameraCreationDelayEventStartWithCaptureSessionId:(NSString *)captureSessionId\n                                                filterLensId:(NSString *)filterLensId\n                                      underLowLightCondition:(BOOL)underLowLightCondition\n                                           isNightModeActive:(BOOL)isNightModeActive\n                                                isBackCamera:(BOOL)isBackCamera\n                                                isMainCamera:(BOOL)isMainCamera;\n\n- (void)logCameraCreationDelaySplitPointRecordingGestureFinished;\n\n- (void)logCameraCreationDelaySplitPointStillImageCaptureApi:(NSString *)api;\n\n- (void)logCameraCreationDelaySplitPointPreCaptureOperationRequested;\n\n- (void)logCameraCreationDelaySplitPointPreCaptureOperationFinishedAt:(CFTimeInterval)time;\n\n- (void)updatedCameraCreationDelayWithContentDuration:(CFTimeInterval)duration;\n\n- (void)logCameraCreationDelaySplitPointCameraCaptureContentReady;\n\n- (void)logCameraCreationDelaySplitPointPreviewFinishedPreparation;\n\n- (void)logCameraCreationDelaySplitPointPreviewDisplayedForImage:(BOOL)isImage;\n\n- (void)logCameraCreationDelaySplitPointPreviewAnimationComplete:(BOOL)isImage;\n\n- (void)logCameraCreationDelaySplitPointPreviewFirstFramePlayed:(BOOL)isImage;\n\n- (void)cancelCameraCreationDelayEvent;\n\n@end\n"
  },
  {
    "path": "Logging/SCCoreCameraLogger.m",
    "content": "//\n//  SCCoreCameraLogger.m\n//  Snapchat\n//\n//  Created by Chao Pang on 3/6/18.\n//\n\n#import \"SCCoreCameraLogger.h\"\n\n#import <BlizzardSchema/SCAEvents.h>\n#import <SCFoundation/SCQueuePerformer.h>\n#import <SCGhostToSnappable/SCGhostToSnappableSignal.h>\n#import <SCLogger/SCCameraMetrics+CameraCreationDelay.h>\n\nstatic const char *kSCCoreCameraLoggerQueueLabel = \"com.snapchat.core-camera-logger-queue\";\n\nNSString *const kSCCameraCreationDelayEventStartTimeKey = @\"start_time\";\nNSString *const kSCCameraCreationDelayEventStartTimeAdjustmentKey = @\"start_time_adjustment\";\nNSString *const kSCCameraCreationDelayEventEndTimeKey = @\"end_time\";\nNSString *const kSCCameraCreationDelayEventCaptureSessionIdKey = @\"capture_session_id\";\nNSString *const kSCCameraCreationDelayEventFilterLensIdKey = @\"filter_lens_id\";\nNSString *const kSCCameraCreationDelayEventNightModeDetectedKey = @\"night_mode_detected\";\nNSString *const kSCCameraCreationDelayEventNightModeActiveKey = @\"night_mode_active\";\nNSString *const kSCCameraCreationDelayEventCameraApiKey = @\"camera_api\";\nNSString *const kSCCameraCreationDelayEventCameraLevelKey = @\"camera_level\";\nNSString *const kSCCameraCreationDelayEventCameraPositionKey = @\"camera_position\";\nNSString *const kSCCameraCreationDelayEventCameraOpenSourceKey = @\"camera_open_source\";\nNSString *const kSCCameraCreationDelayEventContentDurationKey = @\"content_duration\";\nNSString *const kSCCameraCreationDelayEventMediaTypeKey = @\"media_type\";\nNSString *const kSCCameraCreationDelayEventStartTypeKey = @\"start_type\";\nNSString *const kSCCameraCreationDelayEventStartSubTypeKey = @\"start_sub_type\";\nNSString *const kSCCameraCreationDelayEventAnalyticsVersion = @\"ios_v1\";\n\nstatic inline NSUInteger SCTimeToMS(CFTimeInterval time)\n{\n    return (NSUInteger)(time * 1000);\n}\n\nstatic NSString *SCDictionaryToJSONString(NSDictionary *dictionary)\n{\n    NSData *dictData = [NSJSONSerialization dataWithJSONObject:dictionary options:0 error:nil];\n    return [[NSString alloc] initWithData:dictData encoding:NSUTF8StringEncoding];\n}\n\n@implementation SCCoreCameraLogger {\n    SCQueuePerformer *_performer;\n    NSMutableDictionary *_cameraCreationDelayParameters;\n    NSMutableDictionary *_cameraCreationDelaySplits;\n}\n\n- (instancetype)init\n{\n    self = [super init];\n    if (self) {\n        _cameraCreationDelayParameters = [NSMutableDictionary dictionary];\n        _cameraCreationDelaySplits = [NSMutableDictionary dictionary];\n        _performer = [[SCQueuePerformer alloc] initWithLabel:kSCCoreCameraLoggerQueueLabel\n                                            qualityOfService:QOS_CLASS_UNSPECIFIED\n                                                   queueType:DISPATCH_QUEUE_SERIAL\n                                                     context:SCQueuePerformerContextCoreCamera];\n    }\n    return self;\n}\n\n+ (instancetype)sharedInstance\n{\n    static SCCoreCameraLogger *sharedInstance;\n    static dispatch_once_t onceToken;\n    dispatch_once(&onceToken, ^{\n        sharedInstance = [[SCCoreCameraLogger alloc] init];\n    });\n    return sharedInstance;\n}\n\n// Camera creation delay metrics\n\n- (void)logCameraCreationDelayEventStartWithCaptureSessionId:(NSString *)captureSessionId\n                                                filterLensId:(NSString *)filterLensId\n                                      underLowLightCondition:(BOOL)underLowLightCondition\n                                           isNightModeActive:(BOOL)isNightModeActive\n                                                isBackCamera:(BOOL)isBackCamera\n                                                isMainCamera:(BOOL)isMainCamera\n{\n    CFTimeInterval startTime = CACurrentMediaTime();\n    [_performer perform:^{\n        [_cameraCreationDelayParameters removeAllObjects];\n        [_cameraCreationDelaySplits removeAllObjects];\n        _cameraCreationDelayParameters[kSCCameraCreationDelayEventStartTimeKey] = @(startTime);\n        _cameraCreationDelayParameters[kSCCameraCreationDelayEventCaptureSessionIdKey] = captureSessionId ?: @\"null\";\n        _cameraCreationDelayParameters[kSCCameraCreationDelayEventFilterLensIdKey] = filterLensId ?: @\"null\";\n        _cameraCreationDelayParameters[kSCCameraCreationDelayEventNightModeDetectedKey] = @(underLowLightCondition);\n        _cameraCreationDelayParameters[kSCCameraCreationDelayEventNightModeActiveKey] = @(isNightModeActive);\n        _cameraCreationDelayParameters[kSCCameraCreationDelayEventCameraPositionKey] =\n            isBackCamera ? @\"back\" : @\"front\";\n        _cameraCreationDelayParameters[kSCCameraCreationDelayEventCameraOpenSourceKey] =\n            isMainCamera ? @\"main_camera\" : @\"reply_camera\";\n        _cameraCreationDelayParameters[kSCCameraCreationDelayEventStartTypeKey] = SCLaunchType() ?: @\"null\";\n        _cameraCreationDelayParameters[kSCCameraCreationDelayEventStartSubTypeKey] = SCLaunchSubType() ?: @\"null\";\n    }];\n}\n\n- (void)logCameraCreationDelaySplitPointRecordingGestureFinished\n{\n    CFTimeInterval time = CACurrentMediaTime();\n    [_performer perform:^{\n        CFTimeInterval endRecordingTimeOffset =\n            time - [_cameraCreationDelayParameters[kSCCameraCreationDelayEventStartTimeKey] doubleValue];\n        NSNumber *recordStartTimeMillis =\n            (NSNumber *)_cameraCreationDelaySplits[kSCCameraSubmetricsPreCaptureOperationFinished];\n        if (recordStartTimeMillis) {\n            CFTimeInterval timeDisplacement = ([recordStartTimeMillis doubleValue] / 1000.0) - endRecordingTimeOffset;\n            _cameraCreationDelayParameters[kSCCameraCreationDelayEventStartTimeAdjustmentKey] = @(timeDisplacement);\n        }\n        [self _addSplitPointForKey:kSCCameraSubmetricsRecordingGestureFinished atTime:time];\n    }];\n}\n\n- (void)logCameraCreationDelaySplitPointStillImageCaptureApi:(NSString *)api\n{\n    CFTimeInterval time = CACurrentMediaTime();\n    [_performer perform:^{\n        if (api) {\n            _cameraCreationDelayParameters[kSCCameraCreationDelayEventCameraApiKey] = api;\n        }\n        [self _addSplitPointForKey:kSCCameraSubmetricsPreCaptureOperationRequested atTime:time];\n    }];\n}\n\n- (void)logCameraCreationDelaySplitPointPreCaptureOperationRequested\n{\n    CFTimeInterval time = CACurrentMediaTime();\n    [_performer perform:^{\n        [self _addSplitPointForKey:kSCCameraSubmetricsPreCaptureOperationRequested atTime:time];\n    }];\n}\n\n- (void)logCameraCreationDelaySplitPointPreCaptureOperationFinishedAt:(CFTimeInterval)time\n{\n    [_performer perform:^{\n        [self _addSplitPointForKey:kSCCameraSubmetricsPreCaptureOperationFinished atTime:time];\n    }];\n}\n\n- (void)updatedCameraCreationDelayWithContentDuration:(CFTimeInterval)duration\n{\n    [_performer perform:^{\n        _cameraCreationDelayParameters[kSCCameraCreationDelayEventContentDurationKey] = @(SCTimeToMS(duration));\n    }];\n}\n\n- (void)logCameraCreationDelaySplitPointCameraCaptureContentReady\n{\n    CFTimeInterval time = CACurrentMediaTime();\n    [_performer perform:^{\n        [self _addSplitPointForKey:kSCCameraSubmetricsCameraCaptureContentReady atTime:time];\n    }];\n}\n\n- (void)logCameraCreationDelaySplitPointPreviewFinishedPreparation\n{\n    CFTimeInterval time = CACurrentMediaTime();\n    [_performer perform:^{\n        [self _addSplitPointForKey:kSCCameraSubmetricsCameraCaptureContentReady atTime:time];\n    }];\n}\n\n- (void)logCameraCreationDelaySplitPointPreviewDisplayedForImage:(BOOL)isImage\n{\n    CFTimeInterval time = CACurrentMediaTime();\n    [_performer perform:^{\n        [self _addSplitPointForKey:kSCCameraSubmetricsPreviewLayoutReady atTime:time];\n    }];\n}\n\n- (void)logCameraCreationDelaySplitPointPreviewAnimationComplete:(BOOL)isImage\n{\n    CFTimeInterval time = CACurrentMediaTime();\n    [_performer perform:^{\n        [self _addSplitPointForKey:kSCCameraSubmetricsPreviewAnimationFinish atTime:time];\n        if (_cameraCreationDelaySplits[kSCCameraSubmetricsPreviewPlayerReady]) {\n            [self _completeLogCameraCreationDelayEventWithIsImage:isImage atTime:time];\n        }\n    }];\n}\n\n- (void)logCameraCreationDelaySplitPointPreviewFirstFramePlayed:(BOOL)isImage\n{\n    CFTimeInterval time = CACurrentMediaTime();\n    [_performer perform:^{\n        [self _addSplitPointForKey:kSCCameraSubmetricsPreviewPlayerReady atTime:time];\n        if (_cameraCreationDelaySplits[kSCCameraSubmetricsPreviewAnimationFinish]) {\n            [self _completeLogCameraCreationDelayEventWithIsImage:isImage atTime:time];\n        }\n    }];\n}\n\n- (void)cancelCameraCreationDelayEvent\n{\n    [_performer perform:^{\n        [_cameraCreationDelayParameters removeAllObjects];\n        [_cameraCreationDelaySplits removeAllObjects];\n    }];\n}\n\n#pragma - Private methods\n\n- (void)_completeLogCameraCreationDelayEventWithIsImage:(BOOL)isImage atTime:(CFTimeInterval)time\n{\n    SCAssertPerformer(_performer);\n    if (_cameraCreationDelayParameters[kSCCameraCreationDelayEventCaptureSessionIdKey]) {\n        _cameraCreationDelayParameters[kSCCameraCreationDelayEventMediaTypeKey] = isImage ? @\"image\" : @\"video\";\n        _cameraCreationDelayParameters[kSCCameraCreationDelayEventEndTimeKey] = @(time);\n        [self _logCameraCreationDelayBlizzardEvent];\n    }\n    [_cameraCreationDelayParameters removeAllObjects];\n    [_cameraCreationDelaySplits removeAllObjects];\n}\n\n- (void)_addSplitPointForKey:(NSString *)key atTime:(CFTimeInterval)time\n{\n    SCAssertPerformer(_performer);\n    if (key) {\n        CFTimeInterval timeOffset =\n            time - [_cameraCreationDelayParameters[kSCCameraCreationDelayEventStartTimeKey] doubleValue];\n        NSNumber *timeAdjustment =\n            _cameraCreationDelayParameters[kSCCameraCreationDelayEventStartTimeAdjustmentKey] ?: @(0);\n        _cameraCreationDelaySplits[key] = @(SCTimeToMS(timeOffset + [timeAdjustment doubleValue]));\n    }\n}\n\n- (void)_logCameraCreationDelayBlizzardEvent\n{\n    SCAssertPerformer(_performer);\n    SCASharedCameraMetricParams *sharedCameraMetricsParams = [[SCASharedCameraMetricParams alloc] init];\n    [sharedCameraMetricsParams setAnalyticsVersion:kSCCameraCreationDelayEventAnalyticsVersion];\n    NSString *mediaType = _cameraCreationDelayParameters[kSCCameraCreationDelayEventMediaTypeKey];\n    if (mediaType) {\n        if ([mediaType isEqualToString:@\"image\"]) {\n            [sharedCameraMetricsParams setMediaType:SCAMediaType_IMAGE];\n        } else if ([mediaType isEqualToString:@\"video\"]) {\n            [sharedCameraMetricsParams setMediaType:SCAMediaType_VIDEO];\n        }\n    }\n    if (_cameraCreationDelayParameters[kSCCameraCreationDelayEventNightModeDetectedKey] &&\n        _cameraCreationDelayParameters[kSCCameraCreationDelayEventNightModeActiveKey]) {\n        BOOL isNightModeDetected =\n            [_cameraCreationDelayParameters[kSCCameraCreationDelayEventNightModeDetectedKey] boolValue];\n        BOOL isNightModeActive =\n            [_cameraCreationDelayParameters[kSCCameraCreationDelayEventNightModeActiveKey] boolValue];\n        if (!isNightModeDetected) {\n            [sharedCameraMetricsParams setLowLightStatus:SCALowLightStatus_NOT_DETECTED];\n        } else if (!isNightModeActive) {\n            [sharedCameraMetricsParams setLowLightStatus:SCALowLightStatus_DETECTED];\n        } else if (isNightModeActive) {\n            [sharedCameraMetricsParams setLowLightStatus:SCALowLightStatus_ENABLED];\n        }\n    }\n\n    [sharedCameraMetricsParams setPowerMode:[[NSProcessInfo processInfo] isLowPowerModeEnabled]\n                                                ? @\"LOW_POWER_MODE_ENABLED\"\n                                                : @\"LOW_POWER_MODE_DISABLED\"];\n    [sharedCameraMetricsParams\n        setFilterLensId:_cameraCreationDelayParameters[kSCCameraCreationDelayEventFilterLensIdKey] ?: @\"null\"];\n    [sharedCameraMetricsParams\n        setCaptureSessionId:_cameraCreationDelayParameters[kSCCameraCreationDelayEventCaptureSessionIdKey] ?: @\"null\"];\n    [sharedCameraMetricsParams\n        setCameraApi:_cameraCreationDelayParameters[kSCCameraCreationDelayEventCameraApiKey] ?: @\"null\"];\n    [sharedCameraMetricsParams\n        setCameraPosition:_cameraCreationDelayParameters[kSCCameraCreationDelayEventCameraPositionKey] ?: @\"null\"];\n    [sharedCameraMetricsParams\n        setCameraOpenSource:_cameraCreationDelayParameters[kSCCameraCreationDelayEventCameraOpenSourceKey] ?: @\"null\"];\n    [sharedCameraMetricsParams\n        setCameraLevel:_cameraCreationDelayParameters[kSCCameraCreationDelayEventCameraLevelKey] ?: @\"null\"];\n    [sharedCameraMetricsParams\n        setStartType:_cameraCreationDelayParameters[kSCCameraCreationDelayEventStartTypeKey] ?: @\"null\"];\n    [sharedCameraMetricsParams\n        setStartSubType:_cameraCreationDelayParameters[kSCCameraCreationDelayEventStartSubTypeKey] ?: @\"null\"];\n    [sharedCameraMetricsParams setSplits:SCDictionaryToJSONString(_cameraCreationDelaySplits)];\n\n    SCACameraSnapCreateDelay *creationDelay = [[SCACameraSnapCreateDelay alloc] init];\n    if (_cameraCreationDelayParameters[kSCCameraCreationDelayEventStartTimeKey] &&\n        _cameraCreationDelayParameters[kSCCameraCreationDelayEventEndTimeKey]) {\n        double startTime = [_cameraCreationDelayParameters[kSCCameraCreationDelayEventStartTimeKey] doubleValue];\n        double endTime = [_cameraCreationDelayParameters[kSCCameraCreationDelayEventEndTimeKey] doubleValue];\n        NSNumber *timeAdjustment =\n            _cameraCreationDelayParameters[kSCCameraCreationDelayEventStartTimeAdjustmentKey] ?: @(0);\n        [creationDelay setLatencyMillis:SCTimeToMS(endTime - startTime + [timeAdjustment doubleValue])];\n    } else {\n        [creationDelay setLatencyMillis:0];\n    }\n\n    if (_cameraCreationDelayParameters[kSCCameraCreationDelayEventContentDurationKey]) {\n        [creationDelay\n            setContentDurationMillis:SCTimeToMS(\n                                         [_cameraCreationDelayParameters[kSCCameraCreationDelayEventContentDurationKey]\n                                             doubleValue])];\n    } else {\n        [creationDelay setContentDurationMillis:0];\n    }\n    [creationDelay setSharedCameraMetricParams:sharedCameraMetricsParams];\n    [[SCLogger sharedInstance] logUserTrackedEvent:creationDelay];\n}\n\n@end\n"
  },
  {
    "path": "Logging/SCLogger+Camera.h",
    "content": "//\n//  SCLogger+Camera.h\n//  Snapchat\n//\n//  Created by Derek Peirce on 5/8/17.\n//  Copyright © 2017 Snapchat, Inc. All rights reserved.\n//\n\n#import \"AVCameraViewEnums.h\"\n\n#import <SCBase/SCSignPost.h>\n#import <SCLogger/SCLogger.h>\n\n#import <CoreMedia/CoreMedia.h>\n\ntypedef NS_ENUM(NSUInteger, CameraCreationDelayLoggingStatus) {\n    CAMERA_CREATION_DELAY_LOGGING_START,\n    CAMERA_CREATION_DELAY_LOGGINT_LAST_STEP,\n    CAMERA_CREATION_DELAY_LOGGING_END,\n};\n\n@interface SCLogger (Camera)\n\n@property (nonatomic, strong) NSNumber *cameraCreationDelayLoggingStatus;\n\n- (void)logCameraCreationStartWithMethod:(SCCameraRecordingMethod)method\n                           lensesEnabled:(BOOL)lensesEnabled\n                            activeLensId:(NSString *)activeLensId\n                        captureSessionId:(NSString *)captureSessionId;\n- (void)logStillImageCaptureApi:(NSString *)api;\n- (void)logPreCaptureOperationRequestedAt:(CFTimeInterval)requestTime;\n- (void)logPreCaptureOperationFinishedAt:(CFTimeInterval)time;\n- (void)logCameraCaptureRecordingGestureFinishedAtTime:(CFTimeInterval)endRecordingTime;\n- (void)logCameraCaptureFinishedWithDuration:(CFTimeInterval)duration;\n- (void)logCameraCaptureContentReady;\n- (void)logPreviewFinishedPreparation;\n- (void)logPreviewDisplayedForImage:(BOOL)isImage;\n- (void)logPreviewAnimationComplete:(BOOL)isImage;\n- (void)logPreviewFirstFramePlayed:(BOOL)isImage;\n- (void)cancelCameraCreationEvent;\n\n- (void)logRecordingMayBeTooShortWithMethod:(SCCameraRecordingMethod)method;\n- (void)logRecordingWasTooShortWithFirstFrame:(CMTime)firstFrame\n                            frontFacingCamera:(BOOL)isFrontFacing\n                                  cameraFlips:(NSInteger)cameraFlips;\n\n- (void)logManagedCapturerSettingFailure:(NSString *)settingTask error:(NSError *)error;\n- (void)logCameraExposureAdjustmentDelayStart;\n- (void)logCameraExposureAdjustmentDelayEndWithStrategy:(NSString *)strategy;\n- (void)logCameraCreationDelaySubMetricsStartWithSignCode:(kSCSignPostCodeEnum)signPostCode;\n- (void)logCameraCreationDelaySubMetricsEndWithSignCode:(kSCSignPostCodeEnum)signPostCod;\n\n@end\n"
  },
  {
    "path": "Logging/SCLogger+Camera.m",
    "content": "//\n//  SCLogger+Camera.m\n//  Snapchat\n//\n//  Created by Derek Peirce on 5/8/17.\n//  Copyright © 2017 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCLogger+Camera.h\"\n\n#import \"SCCameraTweaks.h\"\n\n#import <SCFoundation/SCTrace.h>\n#import <SCFoundation/SCTraceODPCompatible.h>\n#import <SCLogger/SCCameraMetrics+CameraCreationDelay.h>\n#import <SCLogger/SCCameraMetrics.h>\n#import <SCLogger/SCLogger+Performance.h>\n\n#import <objc/runtime.h>\n\n@implementation SCLogger (Camera)\n\n@dynamic cameraCreationDelayLoggingStatus;\n\n- (NSNumber *)cameraCreationDelayLoggingStatus\n{\n    return objc_getAssociatedObject(self, @selector(cameraCreationDelayLoggingStatus));\n}\n\n- (void)setCameraCreationDelayLoggingStatus:(NSNumber *)status\n{\n    objc_setAssociatedObject(self, @selector(cameraCreationDelayLoggingStatus), status,\n                             OBJC_ASSOCIATION_RETAIN_NONATOMIC);\n}\n\n- (BOOL)shouldLogCameraCreationDelay\n{\n    return [[self cameraCreationDelayLoggingStatus] intValue] != CAMERA_CREATION_DELAY_LOGGING_END;\n}\n\n- (void)logCameraCreationDelayEnd\n{\n    if ([[self cameraCreationDelayLoggingStatus] intValue] == CAMERA_CREATION_DELAY_LOGGINT_LAST_STEP) {\n        SCTraceSignPostEndForMetrics(kSCSignPostCameraCreationDelay, 0, 0, 0, 0);\n        [self setCameraCreationDelayLoggingStatus:@(CAMERA_CREATION_DELAY_LOGGING_END)];\n    } else {\n        [self setCameraCreationDelayLoggingStatus:@(CAMERA_CREATION_DELAY_LOGGINT_LAST_STEP)];\n    }\n}\n\n- (void)logCameraCreationStartWithMethod:(SCCameraRecordingMethod)method\n                           lensesEnabled:(BOOL)lensesEnabled\n                            activeLensId:(NSString *)activeLensId\n                        captureSessionId:(NSString *)captureSessionId\n{\n    NSMutableDictionary *parameters = [@{\n        @\"lens_ui_enabled\" : @(lensesEnabled),\n        @\"analytics_version\" : kSCCameraDelayEventVersion,\n        @\"method\" : @(method),\n    } mutableCopy];\n    if (lensesEnabled && activeLensId) {\n        [parameters setObject:activeLensId forKey:@\"lens_id\"];\n    }\n    if (captureSessionId) {\n        [parameters setObject:captureSessionId forKey:@\"capture_session_id\"];\n    }\n    [self setCameraCreationDelayLoggingStatus:@(CAMERA_CREATION_DELAY_LOGGING_START)];\n    [self logCameraCreationDelaySubMetricsStartWithSignCode:kSCSignPostCameraCreationDelay];\n    [self logCameraCreationDelaySubMetricsStartWithSignCode:kSCSignPostCameraRecordingGestureFinished];\n    [self logCameraCreationDelaySubMetricsStartWithSignCode:kSCSignPostCameraPreCaptureOperationRequested];\n    [[SCLogger sharedInstance] logTimedEventStart:kSCCameraCaptureDelayEvent\n                                         uniqueId:@\"\"\n                                    isUniqueEvent:NO\n                                       parameters:parameters\n                               shouldLogStartTime:YES];\n}\n\n- (void)logCameraExposureAdjustmentDelayStart\n{\n    [[SCLogger sharedInstance] logTimedEventStart:kSCCameraExposureAdjustmentDelay\n                                         uniqueId:@\"\"\n                                    isUniqueEvent:NO\n                                       parameters:nil\n                               shouldLogStartTime:YES];\n}\n\n- (void)logCameraExposureAdjustmentDelayEndWithStrategy:(NSString *)strategy\n{\n    [[SCLogger sharedInstance] logTimedEventEnd:kSCCameraExposureAdjustmentDelay\n                                       uniqueId:@\"\"\n                                     parameters:@{\n                                         @\"strategy\" : strategy\n                                     }];\n}\n\n- (void)logCameraCaptureRecordingGestureFinishedAtTime:(CFTimeInterval)endRecordingTime\n{\n    [self logCameraCreationDelaySubMetricsEndWithSignCode:kSCSignPostCameraRecordingGestureFinished];\n    [[SCLogger sharedInstance]\n        updateLogTimedEvent:kSCCameraCaptureDelayEvent\n                   uniqueId:@\"\"\n                     update:^(NSMutableDictionary *startParameters) {\n                         NSMutableDictionary *eventParameters =\n                             startParameters[SCPerformanceMetricsKey.kSCLoggerStartEventParametersKey];\n                         NSNumber *recordStartTime =\n                             (NSNumber *)eventParameters[kSCCameraSubmetricsPreCaptureOperationFinished];\n                         CFTimeInterval endRecordingTimeOffset =\n                             endRecordingTime -\n                             [startParameters[SCPerformanceMetricsKey.kSCLoggerStartEventTimeKey] doubleValue];\n                         if (recordStartTime) {\n                             CFTimeInterval timeDisplacement =\n                                 ([recordStartTime doubleValue] / 1000.0) - endRecordingTimeOffset;\n                             [eventParameters setObject:@(timeDisplacement)\n                                                 forKey:SCPerformanceMetricsKey.kSCLoggerStartEventTimeAdjustmentKey];\n                         }\n                         [self addSplitPoint:kSCCameraSubmetricsRecordingGestureFinished\n                                      atTime:endRecordingTime\n                                     toEvent:startParameters];\n                     }];\n}\n\n- (void)logStillImageCaptureApi:(NSString *)api\n{\n    [self logCameraCreationDelaySubMetricsEndWithSignCode:kSCSignPostCameraPreCaptureOperationRequested];\n    [self logCameraCreationDelaySubMetricsStartWithSignCode:kSCSignPostCameraPreCaptureOperationFinished];\n    [self logCameraCreationDelaySubMetricsStartWithSignCode:kSCSignPostCameraCaptureContentReady];\n    CFTimeInterval requestTime = CACurrentMediaTime();\n    [self updateLogTimedEvent:kSCCameraCaptureDelayEvent\n                     uniqueId:@\"\"\n                       update:^(NSMutableDictionary *startParameters) {\n                           NSMutableDictionary *eventParameters =\n                               startParameters[SCPerformanceMetricsKey.kSCLoggerStartEventParametersKey];\n                           [eventParameters setObject:api forKey:@\"api_type\"];\n                           [eventParameters setObject:@(1) forKey:@\"camera_api_level\"];\n                           [self addSplitPoint:@\"PRE_CAPTURE_OPERATION_REQUESTED\"\n                                        atTime:requestTime\n                                       toEvent:startParameters];\n                       }];\n}\n\n- (void)logPreCaptureOperationRequestedAt:(CFTimeInterval)requestTime\n{\n    [self logCameraCreationDelaySubMetricsEndWithSignCode:kSCSignPostCameraPreCaptureOperationRequested];\n    [self logCameraCreationDelaySubMetricsStartWithSignCode:kSCSignPostCameraPreCaptureOperationFinished];\n    [self logCameraCreationDelaySubMetricsStartWithSignCode:kSCSignPostCameraCaptureContentReady];\n    [self updateLogTimedEvent:kSCCameraCaptureDelayEvent\n                     uniqueId:@\"\"\n                   splitPoint:kSCCameraSubmetricsPreCaptureOperationRequested\n                         time:requestTime];\n}\n\n- (void)logPreCaptureOperationFinishedAt:(CFTimeInterval)time\n{\n    [self logCameraCreationDelaySubMetricsEndWithSignCode:kSCSignPostCameraPreCaptureOperationFinished];\n    [self logCameraCreationDelaySubMetricsStartWithSignCode:kSCSignPostCameraPreviewPlayerReady];\n    [self updateLogTimedEvent:kSCCameraCaptureDelayEvent\n                     uniqueId:@\"\"\n                   splitPoint:kSCCameraSubmetricsPreCaptureOperationFinished\n                         time:time];\n}\n\n- (void)logCameraCaptureFinishedWithDuration:(CFTimeInterval)duration\n{\n    [[SCLogger sharedInstance]\n        updateLogTimedEvent:kSCCameraCaptureDelayEvent\n                   uniqueId:@\"\"\n                     update:^(NSMutableDictionary *startParameters) {\n                         NSMutableDictionary *eventParameters =\n                             startParameters[SCPerformanceMetricsKey.kSCLoggerStartEventParametersKey];\n                         [eventParameters setObject:@(SCTimeInMillisecond(duration)) forKey:@\"content_duration\"];\n                     }];\n}\n\n- (void)logCameraCaptureContentReady\n{\n    [self logCameraCreationDelaySubMetricsEndWithSignCode:kSCSignPostCameraCaptureContentReady];\n    [[SCLogger sharedInstance] updateLogTimedEvent:kSCCameraCaptureDelayEvent\n                                          uniqueId:@\"\"\n                                        splitPoint:kSCCameraSubmetricsCameraCaptureContentReady];\n}\n\n- (void)logPreviewFinishedPreparation\n{\n    [self logCameraCreationDelaySubMetricsEndWithSignCode:kSCSignPostCameraPreviewFinishPreparation];\n    [self logCameraCreationDelaySubMetricsStartWithSignCode:kSCSignPostCameraPreviewAnimationFinish];\n    [self updateLogTimedEvent:kSCCameraCaptureDelayEvent\n                     uniqueId:@\"\"\n                   splitPoint:kSCCameraSubmetricsPreviewFinishPreparation];\n}\n\n- (void)logPreviewDisplayedForImage:(BOOL)isImage\n{\n    [self logCameraCreationDelaySubMetricsEndWithSignCode:kSCSignPostCameraPreviewLayoutReady];\n    [self updateLogTimedEvent:kSCCameraCaptureDelayEvent uniqueId:@\"\" splitPoint:kSCCameraSubmetricsPreviewLayoutReady];\n}\n\n- (void)logPreviewAnimationComplete:(BOOL)isImage\n{\n    [self updateLogTimedEvent:kSCCameraCaptureDelayEvent\n                     uniqueId:@\"\"\n                   splitPoint:kSCCameraSubmetricsPreviewAnimationFinish];\n    [self logCameraCreationDelaySubMetricsEndWithSignCode:kSCSignPostCameraPreviewAnimationFinish];\n    [self logCameraCreationDelayEnd];\n    [self conditionallyLogTimedEventEnd:kSCCameraCaptureDelayEvent\n                               uniqueId:@\"\"\n                             parameters:@{\n                                 @\"type\" : isImage ? @\"image\" : @\"video\",\n                             }\n                              shouldLog:^BOOL(NSDictionary *startParameters) {\n                                  // For video, PREVIEW_PLAYER_READY and PREVIEW_ANIMATION_FINISH can happen in either\n                                  // order. So here we check for existence of this key, and end timer if the other\n                                  // event have happened.\n                                  NSMutableDictionary *eventParameters =\n                                      startParameters[SCPerformanceMetricsKey.kSCLoggerStartEventParametersKey];\n                                  return eventParameters[kSCCameraSubmetricsPreviewPlayerReady] != nil;\n                              }];\n}\n\n- (void)logPreviewFirstFramePlayed:(BOOL)isImage\n{\n    [self updateLogTimedEvent:kSCCameraCaptureDelayEvent uniqueId:@\"\" splitPoint:kSCCameraSubmetricsPreviewPlayerReady];\n    [self logCameraCreationDelaySubMetricsEndWithSignCode:kSCSignPostCameraPreviewPlayerReady];\n    [self logCameraCreationDelayEnd];\n    [self conditionallyLogTimedEventEnd:kSCCameraCaptureDelayEvent\n                               uniqueId:@\"\"\n                             parameters:@{\n                                 @\"type\" : isImage ? @\"image\" : @\"video\",\n                             }\n                              shouldLog:^BOOL(NSDictionary *startParameters) {\n                                  NSMutableDictionary *eventParameters =\n                                      startParameters[SCPerformanceMetricsKey.kSCLoggerStartEventParametersKey];\n                                  // See the comment above for PREVIEW_PLAYER_READY and PREVIEW_ANIMATION_FINISH.\n                                  return eventParameters[kSCCameraSubmetricsPreviewAnimationFinish] != nil;\n                              }];\n}\n\n- (void)cancelCameraCreationEvent\n{\n    [self cancelLogTimedEvent:kSCCameraCaptureDelayEvent uniqueId:@\"\"];\n}\n\n- (void)logRecordingMayBeTooShortWithMethod:(SCCameraRecordingMethod)method\n{\n    [[SCLogger sharedInstance] cancelLogTimedEvent:kSCCameraMetricsRecordingTooShort uniqueId:@\"\"];\n    [[SCLogger sharedInstance] logTimedEventStart:kSCCameraMetricsRecordingTooShort\n                                         uniqueId:@\"\"\n                                    isUniqueEvent:NO\n                                       parameters:@{\n                                           @\"method\" : @(method),\n                                           @\"analytics_version\" : kSCCameraRecordingTooShortVersion,\n                                       }\n                               shouldLogStartTime:YES];\n}\n\n- (void)logRecordingWasTooShortWithFirstFrame:(CMTime)firstFrame\n                            frontFacingCamera:(BOOL)isFrontFacing\n                                  cameraFlips:(NSInteger)cameraFlips\n{\n    [self logTimedEventEnd:kSCCameraMetricsRecordingTooShort\n                  uniqueId:@\"\"\n                    update:^(NSDictionary *startParameters, CFTimeInterval eventEndTime, CFTimeInterval adjustedTime) {\n                        NSMutableDictionary *eventParameters =\n                            startParameters[SCPerformanceMetricsKey.kSCLoggerStartEventParametersKey];\n                        if (CMTIME_IS_VALID(firstFrame)) {\n                            CFTimeInterval startTime =\n                                [startParameters[SCPerformanceMetricsKey.kSCLoggerStartEventTimeKey] doubleValue];\n                            CFTimeInterval firstFrameRelative = CMTimeGetSeconds(firstFrame) - startTime;\n                            [eventParameters setObject:@(firstFrameRelative) forKey:@\"first_frame_s\"];\n                        }\n                        [eventParameters setObject:@(isFrontFacing) forKey:@\"is_front_facing\"];\n                        if (cameraFlips) {\n                            [eventParameters setObject:@(cameraFlips > 0) forKey:@\"has_camera_been_flipped\"];\n                        }\n                    }];\n}\n\n- (void)logManagedCapturerSettingFailure:(NSString *)settingTask error:(NSError *)error\n{\n    NSMutableDictionary *parameters = [[NSMutableDictionary alloc] init];\n    parameters[@\"setting_task\"] = settingTask;\n    if (error) {\n        parameters[@\"setting error\"] = error;\n    }\n    [[SCLogger sharedInstance] logTimedEventEnd:kSCCameraManagedCaptureSettingFailure\n                                       uniqueId:@\"\"\n                                     parameters:parameters];\n}\n\n- (void)logCameraCreationDelaySubMetricsStartWithSignCode:(kSCSignPostCodeEnum)signPostCode\n{\n    if ([self shouldLogCameraCreationDelay]) {\n        SCTraceSignPostStartForMetrics(signPostCode, 0, 0, 0, 0);\n    }\n}\n\n- (void)logCameraCreationDelaySubMetricsEndWithSignCode:(kSCSignPostCodeEnum)signPostCode\n{\n    if ([self shouldLogCameraCreationDelay]) {\n        SCTraceSignPostEndForMetrics(signPostCode, 0, 0, 0, 0);\n    }\n}\n\n@end\n"
  },
  {
    "path": "Logging/SCManiphestTicketCreator.h",
    "content": "//\n//  SCManiphestTicketCreator.h\n//  SCCamera\n//\n//  Created by Michel Loenngren on 4/16/18.\n//\n\n#import <Foundation/Foundation.h>\n\n/**\n Protocol for filing jira tickets and beta s2r.\n */\n@protocol SCManiphestTicketCreator\n\n- (void)createAndFile:(NSData *)image\n         creationTime:(long)reportCreationTime\n          description:(NSString *)bugDescription\n                email:(NSString *)otherEmail\n              project:(NSString *)projectName\n           subproject:(NSString *)subprojectName;\n\n- (void)createAndFileBetaReport:(NSString *)msg;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/ARConfiguration+SCConfiguration.h",
    "content": "//\n//  ARConfiguration+SCConfiguration.h\n//  Snapchat\n//\n//  Created by Max Goedjen on 11/7/17.\n//\n\n#import \"SCManagedCaptureDevice.h\"\n\n#import <ARKit/ARKit.h>\n\n@interface ARConfiguration (SCConfiguration)\n\n+ (BOOL)sc_supportedForDevicePosition:(SCManagedCaptureDevicePosition)position;\n+ (ARConfiguration *_Nullable)sc_configurationForDevicePosition:(SCManagedCaptureDevicePosition)position;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/ARConfiguration+SCConfiguration.m",
    "content": "//\n//  ARConfiguration+SCConfiguration.m\n//  Snapchat\n//\n//  Created by Max Goedjen on 11/7/17.\n//\n\n#import \"ARConfiguration+SCConfiguration.h\"\n\n#import \"SCCapturerDefines.h\"\n\n@implementation ARConfiguration (SCConfiguration)\n\n+ (BOOL)sc_supportedForDevicePosition:(SCManagedCaptureDevicePosition)position\n{\n    return [[[self sc_configurationForDevicePosition:position] class] isSupported];\n}\n\n+ (ARConfiguration *)sc_configurationForDevicePosition:(SCManagedCaptureDevicePosition)position\n{\n    if (@available(iOS 11.0, *)) {\n        if (position == SCManagedCaptureDevicePositionBack) {\n            ARWorldTrackingConfiguration *config = [[ARWorldTrackingConfiguration alloc] init];\n            config.planeDetection = ARPlaneDetectionHorizontal;\n            config.lightEstimationEnabled = NO;\n            return config;\n        } else {\n#ifdef SC_USE_ARKIT_FACE\n            return [[ARFaceTrackingConfiguration alloc] init];\n#endif\n        }\n    }\n    return nil;\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/AVCaptureConnection+InputDevice.h",
    "content": "//\n//  AVCaptureConnection+InputDevice.h\n//  Snapchat\n//\n//  Created by William Morriss on 1/20/15\n//  Copyright (c) 2015 Snapchat, Inc. All rights reserved.\n//\n\n#import <AVFoundation/AVFoundation.h>\n\n@interface AVCaptureConnection (InputDevice)\n\n- (AVCaptureDevice *)inputDevice;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/AVCaptureConnection+InputDevice.m",
    "content": "//\n//  AVCaptureConnection+InputDevice.m\n//  Snapchat\n//\n//  Created by William Morriss on 1/20/15\n//  Copyright (c) 2015 Snapchat, Inc. All rights reserved.\n//\n\n#import \"AVCaptureConnection+InputDevice.h\"\n\n#import <SCFoundation/SCAssertWrapper.h>\n\n@implementation AVCaptureConnection (InputDevice)\n\n- (AVCaptureDevice *)inputDevice\n{\n    NSArray *inputPorts = self.inputPorts;\n    AVCaptureInputPort *port = [inputPorts firstObject];\n    SCAssert([port.input isKindOfClass:[AVCaptureDeviceInput class]], @\"unexpected port\");\n    AVCaptureDeviceInput *deviceInput = (AVCaptureDeviceInput *)port.input;\n    AVCaptureDevice *device = deviceInput.device;\n    return device;\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/AVCaptureDevice+ConfigurationLock.h",
    "content": "//\n//  AVCaptureDevice+ConfigurationLock.h\n//  Snapchat\n//\n//  Created by Derek Peirce on 4/19/17.\n//  Copyright © 2017 Snapchat, Inc. All rights reserved.\n//\n\n#import <AVFoundation/AVFoundation.h>\n#import <Foundation/Foundation.h>\n\n@interface AVCaptureDevice (ConfigurationLock)\n\n/*\n The following method will lock this AVCaptureDevice, run the task, then unlock the device.\n The task is usually related to set AVCaptureDevice.\n It will return a boolean telling you whether or not your task ran successfully. You can use the boolean to adjust your\n strategy to handle this failure. For some cases, we don't have a good mechanism to handle the failure. E.g. if we want\n to re-focus, but failed to do so. What is next step? Pop up a alert view to user? If yes, it is intrusive, if not, user\n will get confused. Just because the error handling is difficulty, we would like to notify you if the task fails.\n If the task does not run successfully. We will log an event using SCLogger for better visibility.\n */\n- (BOOL)runTask:(NSString *)taskName withLockedConfiguration:(void (^)(void))task;\n\n/*\n The following method has the same function as the above one.\n The difference is that it retries the operation for certain times. Please give a number below or equal 2.\n When retry equals 0, we will only try to lock for once.\n When retry equals 1, we will retry once if the 1st try fails.\n ....\n */\n- (BOOL)runTask:(NSString *)taskName withLockedConfiguration:(void (^)(void))task retry:(NSUInteger)retryTimes;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/AVCaptureDevice+ConfigurationLock.m",
    "content": "//\n//  AVCaptureDevice+ConfigurationLock.m\n//  Snapchat\n//\n//  Created by Derek Peirce on 4/19/17.\n//  Copyright © 2017 Snapchat, Inc. All rights reserved.\n//\n\n#import \"AVCaptureDevice+ConfigurationLock.h\"\n\n#import \"SCLogger+Camera.h\"\n\n#import <SCFoundation/SCAssertWrapper.h>\n#import <SCFoundation/SCLog.h>\n#import <SCLogger/SCLogger.h>\n\n@implementation AVCaptureDevice (ConfigurationLock)\n\n- (BOOL)runTask:(NSString *)taskName withLockedConfiguration:(void (^)(void))task\n{\n    return [self runTask:taskName withLockedConfiguration:task retry:0];\n}\n\n- (BOOL)runTask:(NSString *)taskName withLockedConfiguration:(void (^)(void))task retry:(NSUInteger)retryTimes\n{\n    SCAssert(taskName, @\"camera logger taskString should not be empty\");\n    SCAssert(retryTimes <= 2 && retryTimes >= 0, @\"retry times should be equal to or below 2.\");\n    NSError *error = nil;\n    BOOL deviceLockSuccess = NO;\n    NSUInteger retryCounter = 0;\n    while (retryCounter <= retryTimes && !deviceLockSuccess) {\n        deviceLockSuccess = [self lockForConfiguration:&error];\n        retryCounter++;\n    }\n    if (deviceLockSuccess) {\n        task();\n        [self unlockForConfiguration];\n        SCLogCoreCameraInfo(@\"AVCapture Device setting success, task:%@ tryCount:%zu\", taskName,\n                            (unsigned long)retryCounter);\n    } else {\n        SCLogCoreCameraError(@\"AVCapture Device Encountered error when %@ %@\", taskName, error);\n        [[SCLogger sharedInstance] logManagedCapturerSettingFailure:taskName error:error];\n    }\n    return deviceLockSuccess;\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/CapturerV2/Configuration/SCCaptureConfiguration.h",
    "content": "//\n//  SCCaptureConfiguration.h\n//  Snapchat\n//\n//  Created by Lin Jia on 10/3/17.\n//\n//\n\n#import \"SCCaptureConfigurationAnnouncer.h\"\n#import \"SCManagedCaptureDevice.h\"\n#import \"SCManagedCapturerState.h\"\n#import \"SCVideoCaptureSessionInfo.h\"\n\n#import <SCFoundation/SCQueuePerformer.h>\n\n#import <Looksery/LSAGLView.h>\n\n#import <Foundation/Foundation.h>\n\n/*\n SCCaptureConfiguration is the configuration class which is going to be used for customer to configure camera. This is\n how to use it:\n\n SCCaptureConfiguration *configuration = [SCCaptureConfiguration new];\n\n // Conduct the setting here.\n e.g:\n configuration.torchActive = YES;\n\n // Commit your configuration\n [captureConfigurator commitConfiguration:configuration\n                        completionHandler:handler]\n\n Here are several interesting facts about SCCaptureConfiguration:\n 1) Though SCCaptureConfiguration has so many parameters, you don't need to care the parameters which you do not intend\nto set. For example, if you only want to set night mode active, here is the code:\n\n SCCaptureConfiguration *configuration = [SCCaptureConfiguration new];\n\n configuration.isNightModeActive = YES;\n\n [captureConfigurator commitConfiguration:configuration\n                        completionHandler:handler]\n\n That is it.\n\n 2) you can set multiple configuration settings, then commit, before you commit, nothing will happen, e.g.:\n\n SCCaptureConfiguration *configuration = [SCCaptureConfiguration new];\n\n configuration.isNightModeActive = YES;\n configuration.zoomFactor = 5;\n configuration.lensesActive = YES;\n\n [captureConfigurator commitConfiguration:configuration\n                        completionHandler:handler]\n\n 3) commit a configuration means the configuration is gone. If you set parameters on configuration after it is commited,\nit will crash on debug build, and on other builds such as production, the setting will be ignored, e.g.:\n\n SCCaptureConfiguration *configuration = [SCCaptureConfiguration new];\n\n configuration.isNightModeActive = YES;\n\n [captureConfigurator commitConfiguration:configuration\n                        completionHandler:handler]\n\n // The line below will crash on debug, and ignored on other builds.\n configuration.zoomFactor = 5;\n\n 4) commiting a configuration is an atomic action. That means all changes customers want to have on camera will happen\nin a group. If 2 customers commit at the same time, we will handle them one by one.\n\n 5) We are still figuring out what parameters should be in this configuration, parameters could be added or deleted\n later. In the end, the configuration is going to be the only way customers confige the camera.\n\n */\n\n@interface SCCaptureConfiguration : NSObject\n\n@property (nonatomic, assign) BOOL isRunning;\n\n@property (nonatomic, assign) BOOL isNightModeActive;\n\n@property (nonatomic, assign) BOOL lowLightCondition;\n\n@property (nonatomic, assign) BOOL adjustingExposure;\n\n@property (nonatomic, assign) SCManagedCaptureDevicePosition devicePosition;\n\n@property (nonatomic, assign) CGFloat zoomFactor;\n\n@property (nonatomic, assign) BOOL flashSupported;\n\n@property (nonatomic, assign) BOOL torchSupported;\n\n@property (nonatomic, assign) BOOL flashActive;\n\n@property (nonatomic, assign) BOOL torchActive;\n\n@property (nonatomic, assign) BOOL lensesActive;\n\n@property (nonatomic, assign) BOOL arSessionActive;\n\n@property (nonatomic, assign) BOOL liveVideoStreaming;\n\n@property (nonatomic, strong) AVCaptureVideoPreviewLayer *videoPreviewLayer;\n\n@property (nonatomic, strong) LSAGLView *videoPreviewGLView;\n\n@property (nonatomic, assign) SCVideoCaptureSessionInfo captureSessionInfo;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/CapturerV2/Configuration/SCCaptureConfiguration.m",
    "content": "//\n//  SCCaptureConfiguration.m\n//  Snapchat\n//\n//  Created by Lin Jia on 10/3/17.\n//\n//\n\n#import \"SCCaptureConfiguration.h\"\n#import \"SCCaptureConfiguration_Private.h\"\n\n#import <SCFoundation/SCAppEnvironment.h>\n#import <SCFoundation/SCAssertWrapper.h>\n\n@interface SCCaptureConfiguration () {\n    BOOL _sealed;\n    NSMutableSet<SCCaptureConfigurationDirtyKey *> *_dirtyKeys;\n}\n@end\n\n@implementation SCCaptureConfiguration\n\n- (instancetype)init\n{\n    self = [super init];\n    if (self) {\n        _dirtyKeys = [[NSMutableSet<SCCaptureConfigurationDirtyKey *> alloc] init];\n        _sealed = NO;\n    }\n    return self;\n}\n\n- (void)setIsRunning:(BOOL)running\n{\n    if ([self _configurationSealed]) {\n        return;\n    }\n    _isRunning = running;\n    [_dirtyKeys addObject:@(SCCaptureConfigurationKeyIsRunning)];\n}\n\n/*\n All set methods will be added later. They follow the format of setIsRunning.\n */\n\n@end\n\n@implementation SCCaptureConfiguration (privateMethods)\n\n- (NSArray *)dirtyKeys\n{\n    if (!_sealed && SCIsDebugBuild()) {\n        SCAssert(NO, @\"Configuration not sealed yet, setting is still happening!\");\n    }\n    return [_dirtyKeys allObjects];\n}\n\n- (void)seal\n{\n    _sealed = YES;\n}\n\n- (BOOL)_configurationSealed\n{\n    if (_sealed) {\n        if (SCIsDebugBuild()) {\n            SCAssert(NO, @\"Try to set property after commit configuration to configurator\");\n        }\n        return YES;\n    } else {\n        return NO;\n    }\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/CapturerV2/Configuration/SCCaptureConfigurationAnnouncer.h",
    "content": "//\n//  SCCaptureConfigurationAnnouncer.h\n//  Snapchat\n//\n//  Created by Lin Jia on 10/2/17.\n//\n//\n\n#import \"SCCaptureConfigurationListener.h\"\n\n#import <Foundation/Foundation.h>\n\n/*\n All APIs are thread safe. Announcer will not retain your object. So even if customer forgets to call remove listener,\n it will not create zombie objects.\n */\n@interface SCCaptureConfigurationAnnouncer : NSObject\n\n/*\n When customer adds an object to be a listener, that object will receive an update of current truth. That is the chance\n for the object to do adjustment according to the current configuration of the camera.\n */\n- (void)addListener:(id<SCCaptureConfigurationListener>)listener;\n\n- (void)removeListener:(id<SCCaptureConfigurationListener>)listener;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/CapturerV2/Configuration/SCCaptureConfigurationAnnouncer.m",
    "content": "//\n//  SCCaptureConfigurationAnnouncer.m\n//  Snapchat\n//\n//  Created by Lin Jia on 10/2/17.\n//\n//\n\n#import \"SCCaptureConfigurationAnnouncer.h\"\n#import \"SCCaptureConfigurationAnnouncer_Private.h\"\n\n#import \"SCCaptureConfigurator.h\"\n\n#import <SCFoundation/SCAssertWrapper.h>\n#import <SCFoundation/SCPerforming.h>\n\n@interface SCCaptureConfigurationAnnouncer () {\n    NSHashTable<id<SCCaptureConfigurationListener>> *_listeners;\n    SCQueuePerformer *_performer;\n    __weak SCCaptureConfigurator *_configurator;\n}\n@end\n\n@implementation SCCaptureConfigurationAnnouncer\n\n- (instancetype)initWithPerformer:(SCQueuePerformer *)performer configurator:(SCCaptureConfigurator *)configurator\n{\n    self = [super init];\n    if (self) {\n        _listeners = [NSHashTable<id<SCCaptureConfigurationListener>> hashTableWithOptions:NSHashTableWeakMemory];\n        SCAssert(performer, @\"performer should not be nil\");\n        _performer = performer;\n        _configurator = configurator;\n    }\n    return self;\n}\n\n- (void)addListener:(id<SCCaptureConfigurationListener>)listener\n{\n    [_performer perform:^{\n        SCAssert(listener, @\"listener should not be nil\");\n        [_listeners addObject:listener];\n        [listener captureConfigurationDidChangeTo:_configurator.currentConfiguration];\n    }];\n}\n\n- (void)removeListener:(id<SCCaptureConfigurationListener>)listener\n{\n    [_performer perform:^{\n        SCAssert(listener, @\"listener should not be nil\");\n        [_listeners removeObject:listener];\n    }];\n}\n\n- (void)deliverConfigurationChange:(id<SCManagedCapturerState>)configuration\n{\n    SCAssertPerformer(_performer);\n    for (id<SCCaptureConfigurationListener> listener in _listeners) {\n        [listener captureConfigurationDidChangeTo:configuration];\n    }\n}\n\n- (void)dealloc\n{\n    [_listeners removeAllObjects];\n}\n@end\n"
  },
  {
    "path": "ManagedCapturer/CapturerV2/Configuration/SCCaptureConfigurationAnnouncer_Private.h",
    "content": "//\n//  SCCaptureConfigurationAnnouncer_Private.h\n//  Snapchat\n//\n//  Created by Lin Jia on 10/2/17.\n//\n//\n\n#import \"SCCaptureConfigurationAnnouncer.h\"\n#import \"SCManagedCapturerState.h\"\n\n#import <SCFoundation/SCQueuePerformer.h>\n\n@class SCCaptureConfigurator;\n\n/*\n This private header is only going to be used by SCCaptureConfigurator. Other customers should only use the public\n header.\n */\n@interface SCCaptureConfigurationAnnouncer ()\n/*\n The announcer is going to be instantiated by SCCaptureConfigurator. It will take in a queue performer. The design is\n that announcer and configurator is going to share the same serial queue to avoid racing. This is something we could\n change later.\n */\n- (instancetype)initWithPerformer:(SCQueuePerformer *)performer configurator:(SCCaptureConfigurator *)configurator;\n\n/*\n The API below is called by configurator to notify listener that configuration has changed.\n */\n- (void)deliverConfigurationChange:(id<SCManagedCapturerState>)configuration;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/CapturerV2/Configuration/SCCaptureConfigurationListener.h",
    "content": "//\n//  SCCaptureConfigurationListener.h\n//  Snapchat\n//\n//  Created by Lin Jia on 10/2/17.\n//\n\n#import \"SCManagedCapturerState.h\"\n\n#import <Foundation/Foundation.h>\n\n@class SCCaptureConfiguration;\n\n/*\n As a listener to configuration of camera core, you will get an update whenever the configuration changes, and you will\n receive an immutable state object for the current truth.\n */\n\n@protocol SCCaptureConfigurationListener <NSObject>\n\n- (void)captureConfigurationDidChangeTo:(id<SCManagedCapturerState>)state;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/CapturerV2/Configuration/SCCaptureConfiguration_Private.h",
    "content": "//\n//  SCCaptureConfiguration_Private.h\n//  Snapchat\n//\n//  Created by Lin Jia on 10/3/17.\n//\n//\n\n#import \"SCCaptureConfiguration_Private.h\"\n\ntypedef NSNumber SCCaptureConfigurationDirtyKey;\n\n/*\n The key values to identify dirty keys in SCCaptureConfiguration.\n Dirty key is defined as the key customer changes.\n\n e.g. if customer toggle device position. Dirty keys will have SCCaptureConfigurationKeyDevicePosition.\n\n It is not complete, and it is only a draft now. It\n will be gradually tuned while we work on the APIs.\n */\n\ntypedef NS_ENUM(NSUInteger, SCCaptureConfigurationKey) {\n    SCCaptureConfigurationKeyIsRunning,\n    SCCaptureConfigurationKeyIsNightModeActive,\n    SCCaptureConfigurationKeyLowLightCondition,\n    SCCaptureConfigurationKeyDevicePosition,\n    SCCaptureConfigurationKeyZoomFactor,\n    SCCaptureConfigurationKeyFlashActive,\n    SCCaptureConfigurationKeyTorchActive,\n    SCCaptureConfigurationKeyARSessionActive,\n    SCCaptureConfigurationKeyLensesActive,\n    SCCaptureConfigurationKeyVideoRecording,\n};\n\n@interface SCCaptureConfiguration (internalMethods)\n\n// Return dirtyKeys, which identify the parameters customer want to set.\n- (NSArray *)dirtyKeys;\n\n// Called by SCCaptureConfigurator to seal a configuration, so future changes are ignored.\n- (void)seal;\n\n- (BOOL)_configurationSealed;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/CapturerV2/Configuration/SCCaptureConfigurator.h",
    "content": "//\n//  SCCaptureConfigurator.h\n//  Snapchat\n//\n//  Created by Lin Jia on 10/2/17.\n//\n//\n\n#import \"SCCaptureConfiguration.h\"\n#import \"SCCaptureConfigurationAnnouncer.h\"\n#import \"SCManagedCaptureDevice.h\"\n#import \"SCVideoCaptureSessionInfo.h\"\n\n#import <SCFoundation/SCQueuePerformer.h>\n\n#import <Looksery/LSAGLView.h>\n\n#import <Foundation/Foundation.h>\n\n/*\n SCCaptureConfigurator is the class you use to config the setting of the camera hardware. Such as setting the camera to\n be front or back, setting camera hardware to be certain resolution, or to activate night mode.\n\n You can use this class for many things:\n\n a) do 1 time poking to checkout the current camera configuration via the currentConfiguration.\n\n Note that we represent configuration via id<SCManagedCapturerState>. It is going to be an immutable object.\n\n b) register to be the listener of the configuration change via the announcer.\n Every time a camera configuration change, you will receive an update.\n\n c) set the configuration via commitConfiguration API. You convey your setting intention via SCCaptureConfiguration.\n\n You can register a completionHandler to be called after your configuration gets done.\n\n Inside the completionHandler, we will pass you an error if it happens, and there will be a boolean cameraChanged. If\n your configuration already equals the current configuration of the camera, we will not change the camera, the boolean\n will be true.\n\n d) All APIs are thread safe.\n */\n\ntypedef void (^SCCaptureConfigurationCompletionHandler)(NSError *error, BOOL cameraChanged);\n\n@interface SCCaptureConfigurator : NSObject\n\n@property (nonatomic, strong, readonly) SCCaptureConfigurationAnnouncer *announcer;\n\n@property (nonatomic, strong, readonly) id<SCManagedCapturerState> currentConfiguration;\n\n- (instancetype)init NS_UNAVAILABLE;\n\n- (instancetype)initWithPerformer:(SCQueuePerformer *)performer;\n\n- (void)commitConfiguration:(SCCaptureConfiguration *)configuration\n          completionHandler:(SCCaptureConfigurationCompletionHandler)completionHandler;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/CapturerV2/Configuration/SCCaptureConfigurator.m",
    "content": "//\n//  SCCaptureConfiguration.m\n//  Snapchat\n//\n//  Created by Lin Jia on 10/2/17.\n//\n//\n\n#import \"SCCaptureConfigurator.h\"\n\n#import \"SCCaptureConfigurationAnnouncer_Private.h\"\n#import \"SCCaptureConfiguration_Private.h\"\n\n#import <SCFoundation/SCAssertWrapper.h>\n\n@interface SCCaptureConfigurator () {\n    SCQueuePerformer *_performer;\n}\n@end\n\n@implementation SCCaptureConfigurator\n\n- (instancetype)initWithPerformer:(SCQueuePerformer *)performer\n{\n    self = [super init];\n    if (self) {\n        _announcer = [[SCCaptureConfigurationAnnouncer alloc] initWithPerformer:performer configurator:self];\n        _performer = performer;\n        // TODO: initialize _currentConfiguration\n    }\n    return self;\n}\n\n- (void)commitConfiguration:(SCCaptureConfiguration *)configuration\n          completionHandler:(SCCaptureConfigurationCompletionHandler)completionHandler\n{\n    [configuration seal];\n    [_performer perform:^() {\n        SCAssert(configuration, @\"Configuration must be a valid input parameter\");\n        NSArray<SCCaptureConfigurationDirtyKey *> *dirtyKeys = [configuration dirtyKeys];\n        for (SCCaptureConfigurationDirtyKey *key in dirtyKeys) {\n            [self _processKey:[key integerValue] configuration:configuration];\n        }\n        if (completionHandler) {\n            // TODO: passing in right parameters.\n            completionHandler(NULL, YES);\n        }\n    }];\n}\n\n- (void)_processKey:(SCCaptureConfigurationKey)key configuration:(SCCaptureConfiguration *)configuration\n{\n    // Tune the hardware depending on what key is dirty, and what is the value is inside configuration.\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/CapturerV2/Core/SCCaptureCore.h",
    "content": "//\n//  SCCaptureCore.h\n//  Snapchat\n//\n//  Created by Lin Jia on 10/2/17.\n//\n//\n\n#import \"SCCaptureStateMachineContext.h\"\n#import \"SCCapturer.h\"\n\n#import <SCFoundation/SCPerforming.h>\n\n#import <Foundation/Foundation.h>\n\n@class SCCaptureConfigurator;\n\n/*\n SCCaptureCore abstracts away the hardware aspect of a camera. SCCaptureCore is the V2 version of the\n SCManagedCapturerV1.\n\n SCCaptureCore itself does very little things actually. Its main job is to expose APIs of camera hardware to outside\n customers. The actual heavy lifting is done via delegating the jobs to multiple worker classes.\n\n We generally categorize the operation of camera hardware into 2 categories:\n\n 1) make camera hardware do state transition. Such as what is shown in this graph:\n https://docs.google.com/presentation/d/1KWk-XSgO0wFAjBZXsl_OnHBGpi_pd9-ds6Wje8vX-0s/edit#slide=id.g2017e46295_1_10\n\n 2) config camera hardware setting, such as setting the camera to be front or back, such as setting camera hardware to\n be certain resolution, or to activate night mode.\n\n Indeed, we create 2 working classes to do the heavy lifting. Both of them are under construction. Feel free to checkout\n SCCaptureConfigurator, which is responsible for 2).\n\n */\n\n@interface SCCaptureCore : NSObject <SCCapturer>\n\n@property (nonatomic, strong, readonly) SCCaptureStateMachineContext *stateMachine;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/CapturerV2/Core/SCCaptureCore.m",
    "content": "//\n//  SCCaptureCore.m\n//  Snapchat\n//\n//  Created by Lin Jia on 10/2/17.\n//\n//\n\n#import \"SCCaptureCore.h\"\n\n#import \"SCCaptureDeviceAuthorizationChecker.h\"\n#import \"SCCaptureResource.h\"\n#import \"SCCaptureWorker.h\"\n#import \"SCManagedCapturePreviewLayerController.h\"\n#import \"SCManagedCapturerGLViewManagerAPI.h\"\n#import \"SCManagedCapturerLSAComponentTrackerAPI.h\"\n#import \"SCManagedCapturerV1_Private.h\"\n\n#import <SCAudio/SCAudioConfiguration.h>\n#import <SCFoundation/SCAssertWrapper.h>\n\nstatic const char *kSCCaptureDeviceAuthorizationManagerQueueLabel =\n    \"com.snapchat.capture_device_authorization_checker_queue\";\n\n@implementation SCCaptureCore {\n    SCManagedCapturerV1 *_managedCapturerV1;\n    SCQueuePerformer *_queuePerformer;\n    SCCaptureDeviceAuthorizationChecker *_authorizationChecker;\n}\n@synthesize blackCameraDetector = _blackCameraDetector;\n\n- (instancetype)init\n{\n    SCTraceStart();\n    SCAssertMainThread();\n    self = [super init];\n    if (self) {\n        _managedCapturerV1 = [SCManagedCapturerV1 sharedInstance];\n        SCCaptureResource *resource = _managedCapturerV1.captureResource;\n        _queuePerformer = resource.queuePerformer;\n        _stateMachine = [[SCCaptureStateMachineContext alloc] initWithResource:resource];\n        SCQueuePerformer *authorizationCheckPerformer =\n            [[SCQueuePerformer alloc] initWithLabel:kSCCaptureDeviceAuthorizationManagerQueueLabel\n                                   qualityOfService:QOS_CLASS_USER_INTERACTIVE\n                                          queueType:DISPATCH_QUEUE_SERIAL\n                                            context:SCQueuePerformerContextCamera];\n        _authorizationChecker =\n            [[SCCaptureDeviceAuthorizationChecker alloc] initWithPerformer:authorizationCheckPerformer];\n    }\n    return self;\n}\n\n- (id<SCManagedCapturerLensAPI>)lensProcessingCore\n{\n    return _managedCapturerV1.lensProcessingCore;\n}\n\n// For APIs inside protocol SCCapture, if they are related to capture state machine, we delegate to state machine.\n- (void)setupWithDevicePositionAsynchronously:(SCManagedCaptureDevicePosition)devicePosition\n                            completionHandler:(dispatch_block_t)completionHandler\n                                      context:(NSString *)context\n{\n    [_stateMachine initializeCaptureWithDevicePositionAsynchronously:devicePosition\n                                                   completionHandler:completionHandler\n                                                             context:context];\n}\n\n- (SCCapturerToken *)startRunningAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler\n                                                             context:(NSString *)context\n{\n    return [_stateMachine startRunningWithContext:context completionHandler:completionHandler];\n}\n\n#pragma mark - Recording / Capture\n\n- (void)captureStillImageAsynchronouslyWithAspectRatio:(CGFloat)aspectRatio\n                                      captureSessionID:(NSString *)captureSessionID\n                                     completionHandler:\n                                         (sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler\n                                               context:(NSString *)context\n{\n    [_stateMachine captureStillImageAsynchronouslyWithAspectRatio:aspectRatio\n                                                 captureSessionID:captureSessionID\n                                                completionHandler:completionHandler\n                                                          context:context];\n}\n\n- (void)stopRunningAsynchronously:(SCCapturerToken *)token\n                completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler\n                          context:(NSString *)context\n{\n    [_stateMachine stopRunningWithCapturerToken:token completionHandler:completionHandler context:context];\n}\n\n- (void)stopRunningAsynchronously:(SCCapturerToken *)token\n                completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler\n                            after:(NSTimeInterval)delay\n                          context:(NSString *)context\n{\n    [_stateMachine stopRunningWithCapturerToken:token after:delay completionHandler:completionHandler context:context];\n}\n\n#pragma mark - Scanning\n\n- (void)startScanAsynchronouslyWithScanConfiguration:(SCScanConfiguration *)configuration context:(NSString *)context\n{\n    [_stateMachine startScanAsynchronouslyWithScanConfiguration:configuration context:context];\n}\n\n- (void)stopScanAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler context:(NSString *)context\n{\n    [_stateMachine stopScanAsynchronouslyWithCompletionHandler:completionHandler context:context];\n}\n\n- (void)prepareForRecordingAsynchronouslyWithContext:(NSString *)context\n                                  audioConfiguration:(SCAudioConfiguration *)configuration\n{\n    [_stateMachine prepareForRecordingAsynchronouslyWithAudioConfiguration:configuration context:context];\n}\n\n- (void)startRecordingAsynchronouslyWithOutputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings\n                                    audioConfiguration:(SCAudioConfiguration *)configuration\n                                           maxDuration:(NSTimeInterval)maxDuration\n                                               fileURL:(NSURL *)fileURL\n                                      captureSessionID:(NSString *)captureSessionID\n                                     completionHandler:\n                                         (sc_managed_capturer_start_recording_completion_handler_t)completionHandler\n                                               context:(NSString *)context\n{\n    [_stateMachine startRecordingWithOutputSettings:outputSettings\n                                 audioConfiguration:configuration\n                                        maxDuration:maxDuration\n                                            fileURL:fileURL\n                                   captureSessionID:captureSessionID\n                                  completionHandler:completionHandler\n                                            context:context];\n}\n\n- (void)stopRecordingAsynchronouslyWithContext:(NSString *)context\n{\n    [_stateMachine stopRecordingWithContext:context];\n}\n\n- (void)cancelRecordingAsynchronouslyWithContext:(NSString *)context\n{\n    [_stateMachine cancelRecordingWithContext:context];\n    [[self snapCreationTriggers] markSnapCreationEndWithContext:context];\n}\n\n#pragma mark -\n\n- (void)startStreamingAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler\n                                                  context:(NSString *)context\n{\n    [_managedCapturerV1 startStreamingAsynchronouslyWithCompletionHandler:completionHandler context:context];\n}\n- (void)addSampleBufferDisplayController:(id<SCManagedSampleBufferDisplayController>)sampleBufferDisplayController\n                                 context:(NSString *)context\n{\n    [_managedCapturerV1 addSampleBufferDisplayController:sampleBufferDisplayController context:context];\n}\n\n#pragma mark - Utilities\n\n- (void)convertViewCoordinates:(CGPoint)viewCoordinates\n             completionHandler:(sc_managed_capturer_convert_view_coordniates_completion_handler_t)completionHandler\n                       context:(NSString *)context\n{\n    [_managedCapturerV1 convertViewCoordinates:viewCoordinates completionHandler:completionHandler context:context];\n}\n\n- (void)detectLensCategoryOnNextFrame:(CGPoint)point\n                               lenses:(NSArray<SCLens *> *)lenses\n                           completion:(sc_managed_lenses_processor_category_point_completion_handler_t)completion\n                              context:(NSString *)context\n{\n    [_managedCapturerV1 detectLensCategoryOnNextFrame:point lenses:lenses completion:completion context:context];\n}\n\n#pragma mark - Configurations\n\n- (void)setDevicePositionAsynchronously:(SCManagedCaptureDevicePosition)devicePosition\n                      completionHandler:(dispatch_block_t)completionHandler\n                                context:(NSString *)context\n{\n    [_managedCapturerV1 setDevicePositionAsynchronously:devicePosition\n                                      completionHandler:completionHandler\n                                                context:context];\n}\n\n- (void)setFlashActive:(BOOL)flashActive\n     completionHandler:(dispatch_block_t)completionHandler\n               context:(NSString *)context\n{\n    [_managedCapturerV1 setFlashActive:flashActive completionHandler:completionHandler context:context];\n}\n\n- (void)setLensesActive:(BOOL)lensesActive\n      completionHandler:(dispatch_block_t)completionHandler\n                context:(NSString *)context\n{\n    [_managedCapturerV1 setLensesActive:lensesActive completionHandler:completionHandler context:context];\n}\n\n- (void)setLensesActive:(BOOL)lensesActive\n          filterFactory:(SCLookseryFilterFactory *)filterFactory\n      completionHandler:(dispatch_block_t)completionHandler\n                context:(NSString *)context\n{\n    [_managedCapturerV1 setLensesActive:lensesActive\n                          filterFactory:filterFactory\n                      completionHandler:completionHandler\n                                context:context];\n}\n\n- (void)setLensesInTalkActive:(BOOL)lensesActive\n            completionHandler:(dispatch_block_t)completionHandler\n                      context:(NSString *)context\n{\n    [_managedCapturerV1 setLensesInTalkActive:lensesActive completionHandler:completionHandler context:context];\n}\n\n- (void)setTorchActiveAsynchronously:(BOOL)torchActive\n                   completionHandler:(dispatch_block_t)completionHandler\n                             context:(NSString *)context\n{\n    [_managedCapturerV1 setTorchActiveAsynchronously:torchActive completionHandler:completionHandler context:context];\n}\n\n- (void)setNightModeActiveAsynchronously:(BOOL)active\n                       completionHandler:(dispatch_block_t)completionHandler\n                                 context:(NSString *)context\n{\n    [_managedCapturerV1 setNightModeActiveAsynchronously:active completionHandler:completionHandler context:context];\n}\n\n- (void)lockZoomWithContext:(NSString *)context\n{\n    [_managedCapturerV1 lockZoomWithContext:context];\n}\n\n- (void)unlockZoomWithContext:(NSString *)context\n{\n    [_managedCapturerV1 unlockZoomWithContext:context];\n}\n\n- (void)setZoomFactorAsynchronously:(CGFloat)zoomFactor context:(NSString *)context\n{\n    [_managedCapturerV1 setZoomFactorAsynchronously:zoomFactor context:context];\n}\n\n- (void)resetZoomFactorAsynchronously:(CGFloat)zoomFactor\n                       devicePosition:(SCManagedCaptureDevicePosition)devicePosition\n                              context:(NSString *)context\n{\n    [_managedCapturerV1 resetZoomFactorAsynchronously:zoomFactor devicePosition:devicePosition context:context];\n}\n\n- (void)setExposurePointOfInterestAsynchronously:(CGPoint)pointOfInterest\n                                        fromUser:(BOOL)fromUser\n                               completionHandler:(dispatch_block_t)completionHandler\n                                         context:(NSString *)context\n{\n    [_managedCapturerV1 setExposurePointOfInterestAsynchronously:pointOfInterest\n                                                        fromUser:fromUser\n                                               completionHandler:completionHandler\n                                                         context:context];\n}\n\n- (void)setAutofocusPointOfInterestAsynchronously:(CGPoint)pointOfInterest\n                                completionHandler:(dispatch_block_t)completionHandler\n                                          context:(NSString *)context\n{\n    [_managedCapturerV1 setAutofocusPointOfInterestAsynchronously:pointOfInterest\n                                                completionHandler:completionHandler\n                                                          context:context];\n}\n\n- (void)setPortraitModePointOfInterestAsynchronously:(CGPoint)pointOfInterest\n                                   completionHandler:(dispatch_block_t)completionHandler\n                                             context:(NSString *)context\n{\n    [_managedCapturerV1 setPortraitModePointOfInterestAsynchronously:pointOfInterest\n                                                   completionHandler:completionHandler\n                                                             context:context];\n}\n\n- (void)continuousAutofocusAndExposureAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler\n                                                                  context:(NSString *)context\n{\n    [_managedCapturerV1 continuousAutofocusAndExposureAsynchronouslyWithCompletionHandler:completionHandler\n                                                                                  context:context];\n}\n\n// I need to call these three methods from SCAppDelegate explicitly so that I get the latest information.\n- (void)applicationDidEnterBackground\n{\n    [_managedCapturerV1 applicationDidEnterBackground];\n}\n\n- (void)applicationWillEnterForeground\n{\n    [_managedCapturerV1 applicationWillEnterForeground];\n}\n\n- (void)applicationDidBecomeActive\n{\n    [_managedCapturerV1 applicationDidBecomeActive];\n}\n- (void)applicationWillResignActive\n{\n    [_managedCapturerV1 applicationWillResignActive];\n}\n\n- (void)mediaServicesWereReset\n{\n    [_managedCapturerV1 mediaServicesWereReset];\n}\n\n- (void)mediaServicesWereLost\n{\n    [_managedCapturerV1 mediaServicesWereLost];\n}\n\n#pragma mark - Add / Remove Listener\n\n- (void)addListener:(id<SCManagedCapturerListener>)listener\n{\n    [_managedCapturerV1 addListener:listener];\n}\n\n- (void)removeListener:(id<SCManagedCapturerListener>)listener\n{\n    [_managedCapturerV1 removeListener:listener];\n}\n\n- (void)addVideoDataSourceListener:(id<SCManagedVideoDataSourceListener>)listener\n{\n    [_managedCapturerV1 addVideoDataSourceListener:listener];\n}\n\n- (void)removeVideoDataSourceListener:(id<SCManagedVideoDataSourceListener>)listener\n{\n    [_managedCapturerV1 removeVideoDataSourceListener:listener];\n}\n\n- (void)addDeviceCapacityAnalyzerListener:(id<SCManagedDeviceCapacityAnalyzerListener>)listener\n{\n    [_managedCapturerV1 addDeviceCapacityAnalyzerListener:listener];\n}\n\n- (void)removeDeviceCapacityAnalyzerListener:(id<SCManagedDeviceCapacityAnalyzerListener>)listener\n{\n    [_managedCapturerV1 removeDeviceCapacityAnalyzerListener:listener];\n}\n\n- (NSString *)debugInfo\n{\n    return [_managedCapturerV1 debugInfo];\n}\n\n- (id<SCManagedVideoDataSource>)currentVideoDataSource\n{\n    return [_managedCapturerV1 currentVideoDataSource];\n}\n\n// For APIs inside protocol SCCapture, if they are not related to capture state machine, we directly delegate to V1.\n- (void)checkRestrictedCamera:(void (^)(BOOL, BOOL, AVAuthorizationStatus))callback\n{\n    [_managedCapturerV1 checkRestrictedCamera:callback];\n}\n\n- (void)recreateAVCaptureSession\n{\n    [_managedCapturerV1 recreateAVCaptureSession];\n}\n\n#pragma mark -\n- (CMTime)firstWrittenAudioBufferDelay\n{\n    return [SCCaptureWorker firstWrittenAudioBufferDelay:_managedCapturerV1.captureResource];\n}\n\n- (BOOL)audioQueueStarted\n{\n    return [SCCaptureWorker audioQueueStarted:_managedCapturerV1.captureResource];\n}\n\n- (BOOL)isLensApplied\n{\n    return [SCCaptureWorker isLensApplied:_managedCapturerV1.captureResource];\n}\n\n- (BOOL)isVideoMirrored\n{\n    return [SCCaptureWorker isVideoMirrored:_managedCapturerV1.captureResource];\n}\n\n- (SCVideoCaptureSessionInfo)activeSession\n{\n    return _managedCapturerV1.activeSession;\n}\n\n- (void)setBlackCameraDetector:(SCBlackCameraDetector *)blackCameraDetector\n                             deviceMotionProvider:(id<SCDeviceMotionProvider>)deviceMotionProvider\n                                 fileInputDecider:(id<SCFileInputDecider>)fileInputDecider\n                           arImageCaptureProvider:(id<SCManagedCapturerARImageCaptureProvider>)arImageCaptureProvider\n                                    glviewManager:(id<SCManagedCapturerGLViewManagerAPI>)glViewManager\n                                  lensAPIProvider:(id<SCManagedCapturerLensAPIProvider>)lensAPIProvider\n                              lsaComponentTracker:(id<SCManagedCapturerLSAComponentTrackerAPI>)lsaComponentTracker\n    managedCapturerPreviewLayerControllerDelegate:\n        (id<SCManagedCapturePreviewLayerControllerDelegate>)previewLayerControllerDelegate\n{\n    _managedCapturerV1.captureResource.blackCameraDetector = blackCameraDetector;\n    _managedCapturerV1.captureResource.deviceMotionProvider = deviceMotionProvider;\n    _managedCapturerV1.captureResource.fileInputDecider = fileInputDecider;\n    _managedCapturerV1.captureResource.arImageCaptureProvider = arImageCaptureProvider;\n    _managedCapturerV1.captureResource.videoPreviewGLViewManager = glViewManager;\n    [_managedCapturerV1.captureResource.videoPreviewGLViewManager\n        configureWithCaptureResource:_managedCapturerV1.captureResource];\n    _managedCapturerV1.captureResource.lensAPIProvider = lensAPIProvider;\n    _managedCapturerV1.captureResource.lsaTrackingComponentHandler = lsaComponentTracker;\n    [_managedCapturerV1.captureResource.lsaTrackingComponentHandler\n        configureWithCaptureResource:_managedCapturerV1.captureResource];\n    _managedCapturerV1.captureResource.previewLayerControllerDelegate = previewLayerControllerDelegate;\n    [SCManagedCapturePreviewLayerController sharedInstance].delegate =\n        _managedCapturerV1.captureResource.previewLayerControllerDelegate;\n}\n\n- (SCBlackCameraDetector *)blackCameraDetector\n{\n    return _managedCapturerV1.captureResource.blackCameraDetector;\n}\n\n- (void)captureSingleVideoFrameAsynchronouslyWithCompletionHandler:\n            (sc_managed_capturer_capture_video_frame_completion_handler_t)completionHandler\n                                                           context:(NSString *)context\n{\n    [_managedCapturerV1 captureSingleVideoFrameAsynchronouslyWithCompletionHandler:completionHandler context:context];\n}\n\n- (void)sampleFrameWithCompletionHandler:(void (^)(UIImage *frame, CMTime presentationTime))completionHandler\n                                 context:(NSString *)context\n{\n    [_managedCapturerV1 sampleFrameWithCompletionHandler:completionHandler context:context];\n}\n\n- (void)addTimedTask:(SCTimedTask *)task context:(NSString *)context\n{\n    [_managedCapturerV1 addTimedTask:task context:context];\n}\n\n- (void)clearTimedTasksWithContext:(NSString *)context\n{\n    [_managedCapturerV1 clearTimedTasksWithContext:context];\n}\n\n- (BOOL)authorizedForVideoCapture\n{\n    return [_authorizationChecker authorizedForVideoCapture];\n}\n\n- (void)preloadVideoCaptureAuthorization\n{\n    [_authorizationChecker preloadVideoCaptureAuthorization];\n}\n\n#pragma mark - Snap Creation triggers\n\n- (SCSnapCreationTriggers *)snapCreationTriggers\n{\n    return [_managedCapturerV1 snapCreationTriggers];\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/ImageProcessing/SCDepthBlurMetalModule.metal",
    "content": "//\n//  SCDepthBlurMetalModule.metal\n//  Snapchat\n//\n//  Created by Brian Ng on 10/31/17.\n//\n\n#include <metal_stdlib>\nusing namespace metal;\n\nstruct DepthBlurRenderData {\n    float depthRange;\n    float depthOffset;\n    float depthBlurForegroundThreshold;\n    float depthBlurBackgroundThreshold;\n};\n\nkernel void kernel_depth_blur(texture2d<float, access::read> sourceYTexture [[texture(0)]],\n                              texture2d<float, access::read> sourceUVTexture [[texture(1)]],\n                              texture2d<float, access::read> sourceDepthTexture[[texture(2)]],\n                              texture2d<float, access::read> sourceBlurredYTexture [[texture(3)]],\n                              texture2d<float, access::write> destinationYTexture [[texture(4)]],\n                              texture2d<float, access::write> destinationUVTexture [[texture(5)]],\n                              constant DepthBlurRenderData &renderData [[buffer(0)]],\n                              uint2 gid [[thread_position_in_grid]],\n                              uint2 size [[threads_per_grid]]) {\n    float2 valueUV = sourceUVTexture.read(gid).rg;\n    float depthValue = sourceDepthTexture.read(uint2(gid.x/4, gid.y/4)).r;\n    float normalizedDepthValue = (depthValue - renderData.depthOffset) / renderData.depthRange;\n    float valueYUnblurred = sourceYTexture.read(gid).r;\n    float valueYBlurred = sourceBlurredYTexture.read(gid).r;\n    \n    float valueY = 0;\n    if (normalizedDepthValue > renderData.depthBlurForegroundThreshold) {\n        valueY = valueYUnblurred;\n    } else if (normalizedDepthValue < renderData.depthBlurBackgroundThreshold) {\n        valueY = valueYBlurred;\n    } else {\n        float blendRange = renderData.depthBlurForegroundThreshold - renderData.depthBlurBackgroundThreshold;\n        float normalizedBlendDepthValue = (normalizedDepthValue - renderData.depthBlurBackgroundThreshold) / blendRange;\n        valueY = valueYUnblurred * normalizedBlendDepthValue + valueYBlurred * (1 - normalizedBlendDepthValue);\n    }\n    \n    destinationYTexture.write(valueY, gid);\n    destinationUVTexture.write(float4(valueUV.r, valueUV.g, 0, 0), gid);\n}\n\n"
  },
  {
    "path": "ManagedCapturer/ImageProcessing/SCDepthBlurMetalRenderCommand.h",
    "content": "//\n//  SCDepthBlurMetalRenderCommand.h\n//  Snapchat\n//\n//  Created by Brian Ng on 11/8/17.\n//\n//\n\n#import \"SCMetalModule.h\"\n\n#import <Foundation/Foundation.h>\n\n/*\n @class SCDepthBlurMetalRenderCommand\n    Prepares the command buffer for the SCDepthBlurMetalModule.metal shader.\n */\n@interface SCDepthBlurMetalRenderCommand : NSObject <SCMetalRenderCommand>\n\n@property (nonatomic, readonly) NSString *functionName;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/ImageProcessing/SCDepthBlurMetalRenderCommand.m",
    "content": "//\n//  SCDepthBlurMetalRenderCommand.m\n//  Snapchat\n//\n//  Created by Brian Ng on 11/8/17.\n//\n//\n\n#import \"SCDepthBlurMetalRenderCommand.h\"\n\n#import \"SCCameraTweaks.h\"\n#import \"SCMetalUtils.h\"\n\n#import <SCFoundation/NSString+SCFormat.h>\n\n@import MetalPerformanceShaders;\n\n@implementation SCDepthBlurMetalRenderCommand\n\ntypedef struct DepthBlurRenderData {\n    float depthRange;\n    float depthOffset;\n    float depthBlurForegroundThreshold;\n    float depthBlurBackgroundThreshold;\n} DepthBlurRenderData;\n\n#pragma mark - SCMetalRenderCommand\n\n- (id<MTLComputeCommandEncoder>)encodeMetalCommand:(id<MTLCommandBuffer>)commandBuffer\n                                     pipelineState:(id<MTLComputePipelineState>)pipelineState\n                                   textureResource:(SCMetalTextureResource *)textureResource\n{\n#if !TARGET_IPHONE_SIMULATOR\n    CGFloat depthBlurForegroundThreshold = textureResource.depthBlurForegroundThreshold;\n    CGFloat depthBlurBackgroundThreshold =\n        textureResource.depthBlurForegroundThreshold > SCCameraTweaksDepthBlurBackgroundThreshold()\n            ? SCCameraTweaksDepthBlurBackgroundThreshold()\n            : 0;\n    DepthBlurRenderData depthBlurRenderData = {\n        .depthRange = textureResource.depthRange,\n        .depthOffset = textureResource.depthOffset,\n        .depthBlurBackgroundThreshold = depthBlurBackgroundThreshold,\n        .depthBlurForegroundThreshold = depthBlurForegroundThreshold,\n    };\n    id<MTLBuffer> depthBlurRenderDataBuffer =\n        [textureResource.device newBufferWithLength:sizeof(DepthBlurRenderData)\n                                            options:MTLResourceOptionCPUCacheModeDefault];\n    memcpy(depthBlurRenderDataBuffer.contents, &depthBlurRenderData, sizeof(DepthBlurRenderData));\n\n    MPSImageGaussianBlur *kernel =\n        [[MPSImageGaussianBlur alloc] initWithDevice:textureResource.device sigma:SCCameraTweaksBlurSigma()];\n    [kernel encodeToCommandBuffer:commandBuffer\n                    sourceTexture:textureResource.sourceYTexture\n               destinationTexture:textureResource.sourceBlurredYTexture];\n\n    id<MTLComputeCommandEncoder> commandEncoder = [commandBuffer computeCommandEncoder];\n    [commandEncoder setComputePipelineState:pipelineState];\n\n    [commandEncoder setTexture:textureResource.sourceYTexture atIndex:0];\n    [commandEncoder setTexture:textureResource.sourceUVTexture atIndex:1];\n    [commandEncoder setTexture:textureResource.sourceDepthTexture atIndex:2];\n    [commandEncoder setTexture:textureResource.sourceBlurredYTexture atIndex:3];\n    [commandEncoder setTexture:textureResource.destinationYTexture atIndex:4];\n    [commandEncoder setTexture:textureResource.destinationUVTexture atIndex:5];\n    [commandEncoder setBuffer:depthBlurRenderDataBuffer offset:0 atIndex:0];\n\n    return commandEncoder;\n#else\n    return nil;\n#endif\n}\n\n- (BOOL)requiresDepthData\n{\n    return YES;\n}\n\n#pragma mark - SCMetalModuleFunctionProvider\n\n- (NSString *)functionName\n{\n    return @\"kernel_depth_blur\";\n}\n\n- (NSString *)description\n{\n    return [NSString sc_stringWithFormat:@\"SCDepthBlurMetalRenderCommand (shader function = %@)\", self.functionName];\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/ImageProcessing/SCDepthToGrayscaleMetalModule.metal",
    "content": "//\n//  SCDepthToGrayscaleMetalModule.metal\n//  Snapchat\n//\n//  Created by Brian Ng on 12/7/17.\n//\n\n#include <metal_stdlib>\nusing namespace metal;\n\ntypedef struct DepthToGrayscaleRenderData {\n    float depthRange;\n    float depthOffset;\n} DepthToGrayscaleRenderData;\n\nkernel void kernel_depth_to_grayscale(texture2d<float, access::read> sourceDepthTexture[[texture(0)]],\n                                      texture2d<float, access::write> destinationYTexture [[texture(1)]],\n                                      texture2d<float, access::write> destinationUVTexture [[texture(2)]],\n                                      constant DepthToGrayscaleRenderData &renderData [[buffer(0)]],\n                                      uint2 gid [[thread_position_in_grid]],\n                                      uint2 size [[threads_per_grid]]) {\n    float depthValue = sourceDepthTexture.read(uint2(gid.x/4, gid.y/4)).r;\n    float normalizedDepthValue = (depthValue - renderData.depthOffset) / renderData.depthRange;\n    \n    destinationYTexture.write(normalizedDepthValue, gid);\n    destinationUVTexture.write(float4(0.5, 0.5, 0, 0), gid);\n}\n\n\n"
  },
  {
    "path": "ManagedCapturer/ImageProcessing/SCDepthToGrayscaleMetalRenderCommand.h",
    "content": "//\n//  SCDepthToGrayscaleMetalRenderCommand.h\n//  Snapchat\n//\n//  Created by Brian Ng on 12/7/17.\n//\n//\n\n#import \"SCMetalModule.h\"\n\n#import <Foundation/Foundation.h>\n\n/*\n @class SCDepthToGrayscaleMetalRenderCommand\n Prepares the command buffer for the SCDepthToGrayscaleMetalModule.metal shader.\n */\n@interface SCDepthToGrayscaleMetalRenderCommand : NSObject <SCMetalRenderCommand>\n\n@property (nonatomic, readonly) NSString *functionName;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/ImageProcessing/SCDepthToGrayscaleMetalRenderCommand.m",
    "content": "//\n//  SCDepthToGrayscaleMetalRenderCommand.m\n//  Snapchat\n//\n//  Created by Brian Ng on 12/7/17.\n//\n//\n\n#import \"SCDepthToGrayscaleMetalRenderCommand.h\"\n\n#import \"SCCameraTweaks.h\"\n#import \"SCMetalUtils.h\"\n\n#import <SCFoundation/NSString+SCFormat.h>\n\n@import MetalPerformanceShaders;\n\n@implementation SCDepthToGrayscaleMetalRenderCommand\n\ntypedef struct DepthToGrayscaleRenderData {\n    float depthRange;\n    float depthOffset;\n} DepthToGrayscaleRenderData;\n\n#pragma mark - SCMetalRenderCommand\n\n- (id<MTLComputeCommandEncoder>)encodeMetalCommand:(id<MTLCommandBuffer>)commandBuffer\n                                     pipelineState:(id<MTLComputePipelineState>)pipelineState\n                                   textureResource:(SCMetalTextureResource *)textureResource\n{\n#if !TARGET_IPHONE_SIMULATOR\n    DepthToGrayscaleRenderData depthToGrayscaleRenderData = {\n        .depthRange = textureResource.depthRange, .depthOffset = textureResource.depthOffset,\n    };\n    id<MTLBuffer> depthToGrayscaleDataBuffer =\n        [textureResource.device newBufferWithLength:sizeof(DepthToGrayscaleRenderData)\n                                            options:MTLResourceOptionCPUCacheModeDefault];\n    memcpy(depthToGrayscaleDataBuffer.contents, &depthToGrayscaleRenderData, sizeof(DepthToGrayscaleRenderData));\n\n    id<MTLComputeCommandEncoder> commandEncoder = [commandBuffer computeCommandEncoder];\n    [commandEncoder setComputePipelineState:pipelineState];\n\n    [commandEncoder setTexture:textureResource.sourceDepthTexture atIndex:0];\n    [commandEncoder setTexture:textureResource.destinationYTexture atIndex:1];\n    [commandEncoder setTexture:textureResource.destinationUVTexture atIndex:2];\n    [commandEncoder setBuffer:depthToGrayscaleDataBuffer offset:0 atIndex:0];\n\n    return commandEncoder;\n#else\n    return nil;\n#endif\n}\n\n- (BOOL)requiresDepthData\n{\n    return YES;\n}\n\n#pragma mark - SCMetalModuleFunctionProvider\n\n- (NSString *)functionName\n{\n    return @\"kernel_depth_to_grayscale\";\n}\n\n- (NSString *)description\n{\n    return [NSString\n        sc_stringWithFormat:@\"SCDepthToGrayscaleMetalRenderCommand (shader function = %@)\", self.functionName];\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/ImageProcessing/SCDigitalExposureHandler.h",
    "content": "//\n//  SCDigitalExposureHandler.h\n//  Snapchat\n//\n//  Created by Yu-Kuan (Anthony) Lai on 6/15/17.\n//  Copyright © 2017 Snapchat, Inc. All rights reserved.\n//\n\n#import <CoreGraphics/CoreGraphics.h>\n#import <Foundation/Foundation.h>\n\n@class SCExposureAdjustProcessingModule;\n\n/*\n @class SCDigitalExposureHandler\n    The SCDigitalExposureHandler will be built by the SCProcessingBuilder when the user indicates that he/she\n        wants to add SCExposureAdjustProcessingModule to the processing pipeline. The builder will take care\n        of initializing the handler by linking the processing module. Caller of the builder can then link up\n        the handler to the UI element (in this case, SCExposureSlider) so that user's control is hooked up to\n        the processing module.\n\n */\n@interface SCDigitalExposureHandler : NSObject\n\n- (instancetype)initWithProcessingModule:(SCExposureAdjustProcessingModule *)processingModule;\n- (void)setExposureParameter:(CGFloat)value;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/ImageProcessing/SCDigitalExposureHandler.m",
    "content": "//\n//  SCDigitalExposureHandler.m\n//  Snapchat\n//\n//  Created by Yu-Kuan (Anthony) Lai on 6/15/17.\n//  Copyright © 2017 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCDigitalExposureHandler.h\"\n\n#import \"SCExposureAdjustProcessingModule.h\"\n\n@implementation SCDigitalExposureHandler {\n    __weak SCExposureAdjustProcessingModule *_processingModule;\n}\n\n- (instancetype)initWithProcessingModule:(SCExposureAdjustProcessingModule *)processingModule\n{\n    if (self = [super init]) {\n        _processingModule = processingModule;\n    }\n    return self;\n}\n\n- (void)setExposureParameter:(CGFloat)value\n{\n    [_processingModule setEVValue:value];\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/ImageProcessing/SCExposureAdjustMetalModule.metal",
    "content": "//\n//  SCExposureAdjustMetalModule.metal\n//  Snapchat\n//\n//  Created by Michel Loenngren on 7/11/17.\n//\n//\n\n#include <metal_stdlib>\nusing namespace metal;\n\nkernel void kernel_exposure_adjust(texture2d<float, access::read> sourceYTexture [[texture(0)]],\n                                   texture2d<float, access::read> sourceUVTexture [[texture(1)]],\n                                   texture2d<float, access::write> destinationYTexture [[texture(2)]],\n                                   texture2d<float, access::write> destinationUVTexture [[texture(3)]],\n                                   uint2 gid [[thread_position_in_grid]],\n                                   uint2 size [[threads_per_grid]]) {\n    float valueY = sourceYTexture.read(gid).r;\n    float2 valueUV = sourceUVTexture.read(gid).rg;\n\n    float factor = 1.0 / pow(1.0 + valueY, 5) + 1.0;\n    valueY *= factor;\n    destinationYTexture.write(valueY, gid);\n    destinationUVTexture.write(float4(valueUV.r, valueUV.g, 0, 0), gid);\n\n}\n\nkernel void kernel_exposure_adjust_nightvision(texture2d<float, access::read> sourceYTexture [[texture(0)]],\n                                   texture2d<float, access::read> sourceUVTexture [[texture(1)]],\n                                   texture2d<float, access::write> destinationYTexture [[texture(2)]],\n                                   texture2d<float, access::write> destinationUVTexture [[texture(3)]],\n                                   uint2 gid [[thread_position_in_grid]],\n                                   uint2 size [[threads_per_grid]]) {\n    float valueY = sourceYTexture.read(gid).r;\n    \n    float u =  0.5 - 0.368;\n    float v = 0.5 - 0.291;\n    \n    destinationYTexture.write(valueY, gid);\n    destinationUVTexture.write(float4(u, v, 0, 0), gid);\n    \n}\n\nkernel void kernel_exposure_adjust_inverted_nightvision(texture2d<float, access::read> sourceYTexture [[texture(0)]],\n                                               texture2d<float, access::read> sourceUVTexture [[texture(1)]],\n                                               texture2d<float, access::write> destinationYTexture [[texture(2)]],\n                                               texture2d<float, access::write> destinationUVTexture [[texture(3)]],\n                                               uint2 gid [[thread_position_in_grid]],\n                                               uint2 size [[threads_per_grid]]) {\n    float valueY = sourceYTexture.read(gid).r;\n    \n    valueY = 1.0 - valueY;\n    \n    float u =  0.5 - 0.368;\n    float v = 0.5 - 0.291;\n    \n    destinationYTexture.write(valueY, gid);\n    destinationUVTexture.write(float4(u, v, 0, 0), gid);\n    \n}\n"
  },
  {
    "path": "ManagedCapturer/ImageProcessing/SCExposureAdjustMetalRenderCommand.h",
    "content": "//\n//  SCExposureAdjustMetalRenderCommand.h\n//  Snapchat\n//\n//  Created by Michel Loenngren on 7/11/17.\n//\n//\n\n#import \"SCMetalModule.h\"\n\n#import <Foundation/Foundation.h>\n\n/*\n @class SCExposureAdjustProcessingModule\n    Prepares the command buffer for the SCExposureAdjustProcessingModule.metal shader.\n */\n@interface SCExposureAdjustMetalRenderCommand : SCMetalModule <SCMetalRenderCommand>\n\n@property (nonatomic, readonly) NSString *functionName;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/ImageProcessing/SCExposureAdjustMetalRenderCommand.m",
    "content": "//\n//  SCExposureAdjustMetalRenderCommand.m\n//  Snapchat\n//\n//  Created by Michel Loenngren on 7/11/17.\n//\n//\n\n#import \"SCExposureAdjustMetalRenderCommand.h\"\n\n#import \"SCCameraTweaks.h\"\n#import \"SCMetalUtils.h\"\n\n#import <SCFoundation/SCAssertWrapper.h>\n\n@import Metal;\n\n@implementation SCExposureAdjustMetalRenderCommand\n\n#pragma mark - SCMetalRenderCommand\n\n- (id<MTLComputeCommandEncoder>)encodeMetalCommand:(id<MTLCommandBuffer>)commandBuffer\n                                     pipelineState:(id<MTLComputePipelineState>)pipelineState\n                                   textureResource:(SCMetalTextureResource *)textureResource\n{\n    id<MTLComputeCommandEncoder> commandEncoder = [commandBuffer computeCommandEncoder];\n    [commandEncoder setComputePipelineState:pipelineState];\n#if !TARGET_IPHONE_SIMULATOR\n    [commandEncoder setTexture:textureResource.sourceYTexture atIndex:0];\n    [commandEncoder setTexture:textureResource.sourceUVTexture atIndex:1];\n    [commandEncoder setTexture:textureResource.destinationYTexture atIndex:2];\n    [commandEncoder setTexture:textureResource.destinationUVTexture atIndex:3];\n#endif\n\n    return commandEncoder;\n}\n\n#pragma mark - SCMetalModuleFunctionProvider\n\n- (NSString *)functionName\n{\n    if (SCCameraExposureAdjustmentMode() == 1) {\n        return @\"kernel_exposure_adjust\";\n    } else if (SCCameraExposureAdjustmentMode() == 2) {\n        return @\"kernel_exposure_adjust_nightvision\";\n    } else if (SCCameraExposureAdjustmentMode() == 3) {\n        return @\"kernel_exposure_adjust_inverted_nightvision\";\n    } else {\n        SCAssertFail(@\"Incorrect value from SCCameraExposureAdjustmentMode() %ld\",\n                     (long)SCCameraExposureAdjustmentMode());\n        return nil;\n    }\n}\n\n- (BOOL)requiresDepthData\n{\n    return NO;\n}\n\n- (NSString *)description\n{\n    return\n        [NSString sc_stringWithFormat:@\"SCExposureAdjustMetalRenderCommand (shader function = %@)\", self.functionName];\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/ImageProcessing/SCExposureAdjustProcessingModule.h",
    "content": "//\n//  SCExposureAdjustProcessingModule.h\n//  Snapchat\n//\n//  Created by Yu-Kuan (Anthony) Lai on 6/1/17.\n//  Copyright © 2017 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCProcessingModule.h\"\n\n#import <Foundation/Foundation.h>\n\n/**\n NOTE: If we start chaining multiple CIImage modules we should\n not run them back to back but instead in one CIImage pass\n as CoreImage will merge the shaders for best performance\n*/\n\n/*\n @class SCExposureAdjustProcessingModule\n    This module use the CIExposureAdjust CIFilter to process the frames. It use the value provided by\n    the SCDigitalExposurehandler as evValue (default is 0).\n  */\n@interface SCExposureAdjustProcessingModule : NSObject <SCProcessingModule>\n\n- (void)setEVValue:(CGFloat)value;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/ImageProcessing/SCExposureAdjustProcessingModule.m",
    "content": "//\n//  SCExposureAdjustProcessingModule.m\n//  Snapchat\n//\n//  Created by Yu-Kuan (Anthony) Lai on 6/1/17.\n//  Copyright © 2017 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCExposureAdjustProcessingModule.h\"\n\n#import \"SCProcessingModuleUtils.h\"\n\n@import CoreImage;\n@import CoreMedia;\n\nstatic const CGFloat kSCExposureAdjustProcessingModuleMaxEVValue = 2.0;\n\n@implementation SCExposureAdjustProcessingModule {\n    CIContext *_context;\n    CIFilter *_filter;\n    CFMutableDictionaryRef _attributes;\n    CVPixelBufferPoolRef _bufferPool;\n}\n\n- (instancetype)init\n{\n    if (self = [super init]) {\n        _context = [CIContext context];\n        _filter = [CIFilter filterWithName:@\"CIExposureAdjust\"];\n        [_filter setValue:@0.0 forKey:@\"inputEV\"];\n    }\n    return self;\n}\n\n- (void)setEVValue:(CGFloat)value\n{\n    CGFloat newEVValue = value * kSCExposureAdjustProcessingModuleMaxEVValue;\n    [_filter setValue:@(newEVValue) forKey:@\"inputEV\"];\n}\n\n- (void)dealloc\n{\n    CVPixelBufferPoolFlush(_bufferPool, kCVPixelBufferPoolFlushExcessBuffers);\n    CVPixelBufferPoolRelease(_bufferPool);\n}\n\n- (BOOL)requiresDepthData\n{\n    return NO;\n}\n\n- (CMSampleBufferRef)render:(RenderData)renderData\n{\n    CMSampleBufferRef input = renderData.sampleBuffer;\n    CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(input);\n    CIImage *image = [CIImage imageWithCVPixelBuffer:pixelBuffer];\n\n    [_filter setValue:image forKey:kCIInputImageKey];\n    CIImage *result = [_filter outputImage];\n\n    return [SCProcessingModuleUtils sampleBufferFromImage:result\n                                          oldSampleBuffer:input\n                                               bufferPool:_bufferPool\n                                                  context:_context];\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/ImageProcessing/SCMetalModule.h",
    "content": "//\n//  SCMetalModule.h\n//  Snapchat\n//\n//  Created by Michel Loenngren on 7/19/17.\n//\n//\n\n#import \"SCMetalTextureResource.h\"\n#import \"SCMetalUtils.h\"\n#import \"SCProcessingModule.h\"\n\n#import <Foundation/Foundation.h>\n\n@protocol SCMetalModuleFunctionProvider <NSObject>\n\n@property (nonatomic, readonly) NSString *functionName;\n\n@end\n\n@protocol SCMetalRenderCommand <SCMetalModuleFunctionProvider>\n\n/**\n Sets textures and parameters for the shader function. When implementing this function, the command encoder must be\n computed and the pipeline state set. That is, ensure that there are calls to: [commandBuffer computeCommandEncoder]\n and [commandEncoder setComputePipelineState:pipelineState].\n */\n#if !TARGET_IPHONE_SIMULATOR\n- (id<MTLComputeCommandEncoder>)encodeMetalCommand:(id<MTLCommandBuffer>)commandBuffer\n                                     pipelineState:(id<MTLComputePipelineState>)pipelineState\n                                   textureResource:(SCMetalTextureResource *)textureResource;\n#endif\n\n- (BOOL)requiresDepthData;\n\n@end\n\n/**\n NOTE: If we start chaining multiple metal modules we should\n not run them back to back but instead chain different render\n passes.\n */\n@interface SCMetalModule : NSObject <SCProcessingModule>\n\n// Designated initializer: SCMetalModule should always have a SCMetalRenderCommand\n- (instancetype)initWithMetalRenderCommand:(id<SCMetalRenderCommand>)metalRenderCommand;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/ImageProcessing/SCMetalModule.m",
    "content": "//\n//  SCMetalModule.m\n//  Snapchat\n//\n//  Created by Michel Loenngren on 7/19/17.\n//\n//\n\n#import \"SCMetalModule.h\"\n\n#import \"SCCameraTweaks.h\"\n\n#import <SCFoundation/SCAssertWrapper.h>\n#import <SCFoundation/SCLog.h>\n\n@interface SCMetalModule ()\n#if !TARGET_IPHONE_SIMULATOR\n@property (nonatomic, readonly) id<MTLLibrary> library;\n@property (nonatomic, readonly) id<MTLDevice> device;\n@property (nonatomic, readonly) id<MTLFunction> function;\n@property (nonatomic, readonly) id<MTLComputePipelineState> computePipelineState;\n@property (nonatomic, readonly) id<MTLCommandQueue> commandQueue;\n@property (nonatomic, readonly) CVMetalTextureCacheRef textureCache;\n#endif\n@end\n\n@implementation SCMetalModule {\n    id<SCMetalRenderCommand> _metalRenderCommand;\n}\n\n#if !TARGET_IPHONE_SIMULATOR\n@synthesize library = _library;\n@synthesize function = _function;\n@synthesize computePipelineState = _computePipelineState;\n@synthesize commandQueue = _commandQueue;\n@synthesize textureCache = _textureCache;\n#endif\n\n- (instancetype)initWithMetalRenderCommand:(id<SCMetalRenderCommand>)metalRenderCommand\n{\n    self = [super init];\n    if (self) {\n        _metalRenderCommand = metalRenderCommand;\n    }\n    return self;\n}\n\n#pragma mark - SCProcessingModule\n\n- (CMSampleBufferRef)render:(RenderData)renderData\n{\n    CMSampleBufferRef input = renderData.sampleBuffer;\n#if !TARGET_IPHONE_SIMULATOR\n    id<MTLComputePipelineState> pipelineState = self.computePipelineState;\n    SC_GUARD_ELSE_RETURN_VALUE(pipelineState, input);\n\n    CVMetalTextureCacheRef textureCache = self.textureCache;\n    SC_GUARD_ELSE_RETURN_VALUE(textureCache, input);\n\n    id<MTLCommandQueue> commandQueue = self.commandQueue;\n    SC_GUARD_ELSE_RETURN_VALUE(commandQueue, input);\n\n    SCMetalTextureResource *textureResource =\n        [[SCMetalTextureResource alloc] initWithRenderData:renderData textureCache:textureCache device:self.device];\n    id<MTLCommandBuffer> commandBuffer = [commandQueue commandBuffer];\n    if (!_metalRenderCommand) {\n        SCAssertFail(@\"Metal module must be initialized with an SCMetalRenderCommand\");\n    }\n    id<MTLComputeCommandEncoder> commandEncoder = [_metalRenderCommand encodeMetalCommand:commandBuffer\n                                                                            pipelineState:pipelineState\n                                                                          textureResource:textureResource];\n\n    NSUInteger w = pipelineState.threadExecutionWidth;\n    NSUInteger h = pipelineState.maxTotalThreadsPerThreadgroup / w;\n\n    MTLSize threadsPerThreadgroup = MTLSizeMake(w, h, 1);\n    MTLSize threadgroupsPerGrid = MTLSizeMake((textureResource.sourceYTexture.width + w - 1) / w,\n                                              (textureResource.sourceYTexture.height + h - 1) / h, 1);\n\n    [commandEncoder dispatchThreadgroups:threadgroupsPerGrid threadsPerThreadgroup:threadsPerThreadgroup];\n\n    [commandEncoder endEncoding];\n    [commandBuffer commit];\n    [commandBuffer waitUntilCompleted];\n\n    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(renderData.sampleBuffer);\n    SCMetalCopyTexture(textureResource.destinationYTexture, imageBuffer, 0);\n    SCMetalCopyTexture(textureResource.destinationUVTexture, imageBuffer, 1);\n#endif\n    return input;\n}\n\n- (BOOL)requiresDepthData\n{\n    return [_metalRenderCommand requiresDepthData];\n}\n\n#pragma mark - Lazy properties\n\n#if !TARGET_IPHONE_SIMULATOR\n\n- (id<MTLLibrary>)library\n{\n    if (!_library) {\n        NSString *libPath = [[NSBundle mainBundle] pathForResource:@\"sccamera-default\" ofType:@\"metallib\"];\n        NSError *error = nil;\n        _library = [self.device newLibraryWithFile:libPath error:&error];\n        if (error) {\n            SCLogGeneralError(@\"Create metallib error: %@\", error.description);\n        }\n    }\n    return _library;\n}\n\n- (id<MTLDevice>)device\n{\n    return SCGetManagedCaptureMetalDevice();\n}\n\n- (id<MTLFunction>)function\n{\n    return [self.library newFunctionWithName:[_metalRenderCommand functionName]];\n}\n\n- (id<MTLComputePipelineState>)computePipelineState\n{\n    if (!_computePipelineState) {\n        NSError *error = nil;\n        _computePipelineState = [self.device newComputePipelineStateWithFunction:self.function error:&error];\n        if (error) {\n            SCLogGeneralError(@\"Error while creating compute pipeline state %@\", error.description);\n        }\n    }\n    return _computePipelineState;\n}\n\n- (id<MTLCommandQueue>)commandQueue\n{\n    if (!_commandQueue) {\n        _commandQueue = [self.device newCommandQueue];\n    }\n    return _commandQueue;\n}\n\n- (CVMetalTextureCacheRef)textureCache\n{\n    if (!_textureCache) {\n        CVMetalTextureCacheCreate(kCFAllocatorDefault, nil, self.device, nil, &_textureCache);\n    }\n    return _textureCache;\n}\n\n#endif\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/ImageProcessing/SCMetalTextureResource.h",
    "content": "//\n//  SCMetalTextureResource.h\n//  Snapchat\n//\n//  Created by Brian Ng on 11/7/17.\n//\n\n#import \"SCProcessingModule.h\"\n#import \"SCCapturerDefines.h\"\n\n#import <Foundation/Foundation.h>\n#if !TARGET_IPHONE_SIMULATOR\n#import <Metal/Metal.h>\n#endif\n\n/*\n @class SCMetalTextureResource\n    The SCMetalTextureResource is created by SCMetalModule and is passed to a SCMetalRenderCommand.\n        This resource provides a collection of textures for rendering, where a SCMetalRenderCommand\n        selects which textures it needs. Textures are lazily initialiazed to optimize performance.\n        Additionally, information pertaining to depth is provided if normalizing depth is desired:\n        depthRange is the range of possible depth values [depthOffset, depthOffset + depthRange],\n        where depthOffset is the min depth value in the given depth map.\n    NOTE: This class is NOT thread safe -- ensure any calls are made by a performer by calling\n        SCAssertPerformer before actually accessing any textures\n */\n@interface SCMetalTextureResource : NSObject\n\n#if !TARGET_IPHONE_SIMULATOR\n@property (nonatomic, readonly) id<MTLTexture> sourceYTexture;\n@property (nonatomic, readonly) id<MTLTexture> sourceUVTexture;\n@property (nonatomic, readonly) id<MTLTexture> destinationYTexture;\n@property (nonatomic, readonly) id<MTLTexture> destinationUVTexture;\n\n// Textures for SCDepthBlurMetalCommand\n@property (nonatomic, readonly) id<MTLTexture> sourceBlurredYTexture;\n@property (nonatomic, readonly) id<MTLTexture> sourceDepthTexture;\n\n@property (nonatomic, readonly) id<MTLDevice> device;\n#endif\n\n// Available depth-related auxiliary resources (when depth data is provided)\n@property (nonatomic, readonly) float depthRange;\n@property (nonatomic, readonly) float depthOffset;\n@property (nonatomic, readonly) CGFloat depthBlurForegroundThreshold;\n@property (nonatomic, readonly) SampleBufferMetadata sampleBufferMetadata;\n\n#if !TARGET_IPHONE_SIMULATOR\n- (instancetype)initWithRenderData:(RenderData)renderData\n                      textureCache:(CVMetalTextureCacheRef)textureCache\n                            device:(id<MTLDevice>)device;\n#endif\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/ImageProcessing/SCMetalTextureResource.m",
    "content": "//\n//  SCMetalTextureResource.m\n//  Snapchat\n//\n//  Created by Brian Ng on 11/7/17.\n//\n\n#import \"SCMetalTextureResource.h\"\n\n#import \"SCCameraSettingUtils.h\"\n#import \"SCCameraTweaks.h\"\n#import \"SCMetalUtils.h\"\n\n@import CoreImage;\n\n#if !TARGET_IPHONE_SIMULATOR\nstatic NSInteger const kSCFocusRectSize = 4;\n#endif\n\n@interface SCMetalTextureResource ()\n#if !TARGET_IPHONE_SIMULATOR\n@property (nonatomic, readonly) CVMetalTextureCacheRef textureCache;\n#endif\n@end\n\n@implementation SCMetalTextureResource {\n    RenderData _renderData;\n    CVImageBufferRef _imageBuffer;\n    CIContext *_context;\n}\n\n#if !TARGET_IPHONE_SIMULATOR\n@synthesize sourceYTexture = _sourceYTexture;\n@synthesize sourceUVTexture = _sourceUVTexture;\n@synthesize destinationYTexture = _destinationYTexture;\n@synthesize destinationUVTexture = _destinationUVTexture;\n@synthesize sourceBlurredYTexture = _sourceBlurredYTexture;\n@synthesize sourceDepthTexture = _sourceDepthTexture;\n@synthesize depthRange = _depthRange;\n@synthesize depthOffset = _depthOffset;\n@synthesize depthBlurForegroundThreshold = _depthBlurForegroundThreshold;\n@synthesize device = _device;\n@synthesize sampleBufferMetadata = _sampleBufferMetadata;\n\n- (instancetype)initWithRenderData:(RenderData)renderData\n                      textureCache:(CVMetalTextureCacheRef)textureCache\n                            device:(id<MTLDevice>)device\n{\n    self = [super init];\n    if (self) {\n        _imageBuffer = CMSampleBufferGetImageBuffer(renderData.sampleBuffer);\n        _renderData = renderData;\n        _textureCache = textureCache;\n        _device = device;\n        _context = [CIContext contextWithOptions:@{ kCIContextWorkingFormat : @(kCIFormatRGBAh) }];\n    }\n    return self;\n}\n#endif\n\n#if !TARGET_IPHONE_SIMULATOR\n\n- (id<MTLTexture>)sourceYTexture\n{\n    if (!_sourceYTexture) {\n        CVPixelBufferLockBaseAddress(_imageBuffer, kCVPixelBufferLock_ReadOnly);\n        _sourceYTexture = SCMetalTextureFromPixelBuffer(_imageBuffer, 0, MTLPixelFormatR8Unorm, _textureCache);\n        CVPixelBufferUnlockBaseAddress(_imageBuffer, kCVPixelBufferLock_ReadOnly);\n    }\n    return _sourceYTexture;\n}\n\n- (id<MTLTexture>)sourceUVTexture\n{\n    if (!_sourceUVTexture) {\n        CVPixelBufferLockBaseAddress(_imageBuffer, kCVPixelBufferLock_ReadOnly);\n        _sourceUVTexture = SCMetalTextureFromPixelBuffer(_imageBuffer, 1, MTLPixelFormatRG8Unorm, _textureCache);\n        CVPixelBufferUnlockBaseAddress(_imageBuffer, kCVPixelBufferLock_ReadOnly);\n    }\n    return _sourceUVTexture;\n}\n\n- (id<MTLTexture>)destinationYTexture\n{\n    if (!_destinationYTexture) {\n        MTLTextureDescriptor *textureDescriptor =\n            [MTLTextureDescriptor texture2DDescriptorWithPixelFormat:MTLPixelFormatR8Unorm\n                                                               width:CVPixelBufferGetWidthOfPlane(_imageBuffer, 0)\n                                                              height:CVPixelBufferGetHeightOfPlane(_imageBuffer, 0)\n                                                           mipmapped:NO];\n        textureDescriptor.usage |= MTLTextureUsageShaderWrite;\n        _destinationYTexture = [_device newTextureWithDescriptor:textureDescriptor];\n    }\n    return _destinationYTexture;\n}\n\n- (id<MTLTexture>)destinationUVTexture\n{\n    if (!_destinationUVTexture) {\n        MTLTextureDescriptor *textureDescriptor =\n            [MTLTextureDescriptor texture2DDescriptorWithPixelFormat:MTLPixelFormatRG8Unorm\n                                                               width:CVPixelBufferGetWidthOfPlane(_imageBuffer, 1)\n                                                              height:CVPixelBufferGetHeightOfPlane(_imageBuffer, 1)\n                                                           mipmapped:NO];\n        textureDescriptor.usage |= MTLTextureUsageShaderWrite;\n        _destinationUVTexture = [_device newTextureWithDescriptor:textureDescriptor];\n    }\n    return _destinationUVTexture;\n}\n\n- (id<MTLTexture>)sourceBlurredYTexture\n{\n    if (!_sourceBlurredYTexture) {\n        MTLTextureDescriptor *textureDescriptor =\n            [MTLTextureDescriptor texture2DDescriptorWithPixelFormat:MTLPixelFormatR8Unorm\n                                                               width:CVPixelBufferGetWidthOfPlane(_imageBuffer, 0)\n                                                              height:CVPixelBufferGetHeightOfPlane(_imageBuffer, 0)\n                                                           mipmapped:NO];\n        textureDescriptor.usage |= MTLTextureUsageShaderWrite;\n        _sourceBlurredYTexture = [_device newTextureWithDescriptor:textureDescriptor];\n    }\n    return _sourceBlurredYTexture;\n}\n\n- (id<MTLTexture>)sourceDepthTexture\n{\n    if (!_sourceDepthTexture) {\n        CVPixelBufferLockBaseAddress(_imageBuffer, kCVPixelBufferLock_ReadOnly);\n        _sourceDepthTexture =\n            SCMetalTextureFromPixelBuffer(_renderData.depthDataMap, 0, MTLPixelFormatR16Float, _textureCache);\n        CVPixelBufferUnlockBaseAddress(_imageBuffer, kCVPixelBufferLock_ReadOnly);\n    }\n    return _sourceDepthTexture;\n}\n\n- (float)depthRange\n{\n    if (_depthRange == 0) {\n        //  Get min/max values of depth image to normalize\n        size_t bufferWidth = CVPixelBufferGetWidth(_renderData.depthDataMap);\n        size_t bufferHeight = CVPixelBufferGetHeight(_renderData.depthDataMap);\n        size_t bufferBytesPerRow = CVPixelBufferGetBytesPerRow(_renderData.depthDataMap);\n\n        CVPixelBufferLockBaseAddress(_renderData.depthDataMap, kCVPixelBufferLock_ReadOnly);\n        unsigned char *pixelBufferPointer = CVPixelBufferGetBaseAddress(_renderData.depthDataMap);\n        __fp16 *bufferPtr = (__fp16 *)pixelBufferPointer;\n        uint32_t ptrInc = (int)bufferBytesPerRow / sizeof(__fp16);\n\n        float depthMin = MAXFLOAT;\n        float depthMax = -MAXFLOAT;\n        for (int j = 0; j < bufferHeight; j++) {\n            for (int i = 0; i < bufferWidth; i++) {\n                float value = bufferPtr[i];\n                if (!isnan(value)) {\n                    depthMax = MAX(depthMax, value);\n                    depthMin = MIN(depthMin, value);\n                }\n            }\n            bufferPtr += ptrInc;\n        }\n        CVPixelBufferUnlockBaseAddress(_renderData.depthDataMap, kCVPixelBufferLock_ReadOnly);\n        _depthRange = depthMax - depthMin;\n        _depthOffset = depthMin;\n    }\n    return _depthRange;\n}\n\n- (float)depthOffset\n{\n    if (_depthRange == 0) {\n        [self depthRange];\n    }\n    return _depthOffset;\n}\n\n- (CGFloat)depthBlurForegroundThreshold\n{\n    if (_renderData.depthBlurPointOfInterest) {\n        CGPoint point = *_renderData.depthBlurPointOfInterest;\n        CIImage *disparityImage = [CIImage imageWithCVPixelBuffer:_renderData.depthDataMap];\n        CIVector *vector =\n            [CIVector vectorWithX:point.x * CVPixelBufferGetWidth(_renderData.depthDataMap) - kSCFocusRectSize / 2\n                                Y:point.y * CVPixelBufferGetHeight(_renderData.depthDataMap) - kSCFocusRectSize / 2\n                                Z:kSCFocusRectSize\n                                W:kSCFocusRectSize];\n        CIImage *minMaxImage =\n            [[disparityImage imageByClampingToExtent] imageByApplyingFilter:@\"CIAreaMinMaxRed\"\n                                                        withInputParameters:@{kCIInputExtentKey : vector}];\n        UInt8 pixel[4] = {0, 0, 0, 0};\n        [_context render:minMaxImage\n                toBitmap:&pixel\n                rowBytes:4\n                  bounds:CGRectMake(0, 0, 1, 1)\n                  format:kCIFormatRGBA8\n              colorSpace:nil];\n        CGFloat disparity = pixel[1] / 255.0;\n        CGFloat normalizedDisparity = (disparity - self.depthOffset) / self.depthRange;\n        return normalizedDisparity;\n    } else {\n        return SCCameraTweaksDepthBlurForegroundThreshold();\n    }\n}\n\n- (SampleBufferMetadata)sampleBufferMetadata\n{\n    SampleBufferMetadata sampleMetadata = {\n        .isoSpeedRating = 0, .exposureTime = 0.033, .brightness = 0,\n    };\n    retrieveSampleBufferMetadata(_renderData.sampleBuffer, &sampleMetadata);\n    return sampleMetadata;\n}\n\n#endif\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/ImageProcessing/SCNightModeEnhancementMetalModule.metal",
    "content": "//\n//  SCNightModeEnhancementMetalModule.metal\n//  Snapchat\n//\n//  Created by Chao Pang on 12/21/17.\n//\n//\n\n#include <metal_stdlib>\nusing namespace metal;\n\ntypedef struct SampleBufferMetadata {\n    int iosSpeedRating;\n    float exposureTime;\n    float brightness;\n}SampleBufferMetadata;\n\nkernel void kernel_night_mode_enhancement(texture2d<float, access::read> sourceYTexture [[texture(0)]],\n                                   \t\t  texture2d<float, access::read> sourceUVTexture [[texture(1)]],\n                                          texture2d<float, access::write> destinationYTexture [[texture(2)]],\n                                          texture2d<float, access::write> destinationUVTexture [[texture(3)]],\n                                          constant SampleBufferMetadata &metaData [[buffer(0)]],\n                                          uint2 gid [[thread_position_in_grid]],\n                                          uint2 size [[threads_per_grid]]) {\n    float valueY = sourceYTexture.read(gid).r;\n    float2 valueUV = sourceUVTexture.read(gid).rg;\n\n    float factor = 1.0 - metaData.brightness * 0.1;\n    factor = max(min(factor, 1.3), 1.0);\n\n    valueY = min(valueY * factor, 1.0);\n    valueUV.rg = max(min((valueUV.rg - 0.5) * factor + 0.5, 1.0), 0.0);\n\n    destinationYTexture.write(valueY, gid);\n    destinationUVTexture.write(float4(valueUV.r, valueUV.g, 0, 0), gid);\n\n}\n"
  },
  {
    "path": "ManagedCapturer/ImageProcessing/SCNightModeEnhancementMetalRenderCommand.h",
    "content": "//\n//  SCNightModeEnhancementMetalRenderCommand.h\n//  Snapchat\n//\n//  Created by Chao Pang on 12/21/17.\n//\n\n#import \"SCMetalModule.h\"\n\n#import <Foundation/Foundation.h>\n\n/*\n Prepares the command buffer for the SCNightModeEnhancementMetalModule.metal.\n */\n@interface SCNightModeEnhancementMetalRenderCommand : SCMetalModule <SCMetalRenderCommand>\n\n@property (nonatomic, readonly) NSString *functionName;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/ImageProcessing/SCNightModeEnhancementMetalRenderCommand.m",
    "content": "//\n//  SCNightModeEnhancementMetalRenderCommand.m\n//  Snapchat\n//\n//  Created by Chao Pang on 12/21/17.\n//\n\n#import \"SCNightModeEnhancementMetalRenderCommand.h\"\n\n#import \"SCCameraTweaks.h\"\n#import \"SCMetalUtils.h\"\n\n#import <SCFoundation/NSString+SCFormat.h>\n\n@import Metal;\n\n@implementation SCNightModeEnhancementMetalRenderCommand\n\n#pragma mark - SCMetalRenderCommand\n\n- (id<MTLComputeCommandEncoder>)encodeMetalCommand:(id<MTLCommandBuffer>)commandBuffer\n                                     pipelineState:(id<MTLComputePipelineState>)pipelineState\n                                   textureResource:(SCMetalTextureResource *)textureResource\n{\n    id<MTLComputeCommandEncoder> commandEncoder = [commandBuffer computeCommandEncoder];\n    [commandEncoder setComputePipelineState:pipelineState];\n#if !TARGET_IPHONE_SIMULATOR\n    SampleBufferMetadata sampleBufferMetadata = {\n        .isoSpeedRating = textureResource.sampleBufferMetadata.isoSpeedRating,\n        .exposureTime = textureResource.sampleBufferMetadata.exposureTime,\n        .brightness = textureResource.sampleBufferMetadata.brightness,\n    };\n    id<MTLBuffer> metadataBuffer = [textureResource.device newBufferWithLength:sizeof(SampleBufferMetadata)\n                                                                       options:MTLResourceOptionCPUCacheModeDefault];\n    memcpy(metadataBuffer.contents, &sampleBufferMetadata, sizeof(SampleBufferMetadata));\n    [commandEncoder setTexture:textureResource.sourceYTexture atIndex:0];\n    [commandEncoder setTexture:textureResource.sourceUVTexture atIndex:1];\n    [commandEncoder setTexture:textureResource.destinationYTexture atIndex:2];\n    [commandEncoder setTexture:textureResource.destinationUVTexture atIndex:3];\n    [commandEncoder setBuffer:metadataBuffer offset:0 atIndex:0];\n#endif\n\n    return commandEncoder;\n}\n\n#pragma mark - SCMetalModuleFunctionProvider\n\n- (NSString *)functionName\n{\n    return @\"kernel_night_mode_enhancement\";\n}\n\n- (BOOL)requiresDepthData\n{\n    return NO;\n}\n\n- (NSString *)description\n{\n    return [NSString\n        sc_stringWithFormat:@\"SCNightModeEnhancementMetalRenderCommand (shader function = %@)\", self.functionName];\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/ImageProcessing/SCProcessingModule.h",
    "content": "//\n//  SCProcessingModule.h\n//  Snapchat\n//\n//  Created by Yu-Kuan (Anthony) Lai on 5/30/17.\n//  Copyright © 2017 Snapchat, Inc. All rights reserved.\n//\n\n#import <AVFoundation/AVFoundation.h>\n#import <CoreMedia/CoreMedia.h>\n#import <Foundation/Foundation.h>\n\ntypedef struct RenderData {\n    CMSampleBufferRef sampleBuffer;\n    CVPixelBufferRef depthDataMap;     // Optional - for depth blur rendering\n    CGPoint *depthBlurPointOfInterest; // Optional - for depth blur rendering\n} RenderData;\n\n/*\n @protocol SCProcessingModule\n    A single module that is responsible for the actual image processing work. Multiple modules can be chained\n        together by the SCProcessingPipelineBuilder and the frame can be passed through the entire\n        SCProcessingPipeline.\n */\n@protocol SCProcessingModule <NSObject>\n\n- (CMSampleBufferRef)render:(RenderData)renderData;\n\n// Needed to protect against depth data potentially being nil during the render pass\n- (BOOL)requiresDepthData;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/ImageProcessing/SCProcessingModuleUtils.h",
    "content": "//\n//  SCProcessingModuleUtils.h\n//  Snapchat\n//\n//  Created by Brian Ng on 11/10/17.\n//\n\n#import <CoreImage/CoreImage.h>\n#import <CoreMedia/CoreMedia.h>\n#import <Foundation/Foundation.h>\n\n@interface SCProcessingModuleUtils : NSObject\n\n+ (CVPixelBufferRef)pixelBufferFromImage:(CIImage *)image\n                              bufferPool:(CVPixelBufferPoolRef)bufferPool\n                                 context:(CIContext *)context;\n\n+ (CMSampleBufferRef)sampleBufferFromImage:(CIImage *)image\n                           oldSampleBuffer:(CMSampleBufferRef)oldSampleBuffer\n                                bufferPool:(CVPixelBufferPoolRef)bufferPool\n                                   context:(CIContext *)context;\n@end\n"
  },
  {
    "path": "ManagedCapturer/ImageProcessing/SCProcessingModuleUtils.m",
    "content": "//\n//  SCProcessingModuleUtils.m\n//  Snapchat\n//\n//  Created by Brian Ng on 11/10/17.\n//\n\n#import \"SCProcessingModuleUtils.h\"\n\n#import <SCFoundation/SCLog.h>\n\n@import CoreImage;\n\n@implementation SCProcessingModuleUtils\n\n+ (CVPixelBufferRef)pixelBufferFromImage:(CIImage *)image\n                              bufferPool:(CVPixelBufferPoolRef)bufferPool\n                                 context:(CIContext *)context\n{\n    CVReturn result;\n\n    if (bufferPool == NULL) {\n        NSDictionary *pixelAttributes = @{\n            (NSString *) kCVPixelBufferIOSurfacePropertiesKey : @{}, (NSString *)\n            kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange), (NSString *)\n            kCVPixelBufferWidthKey : @(image.extent.size.width), (NSString *)\n            kCVPixelBufferHeightKey : @(image.extent.size.height)\n        };\n        result = CVPixelBufferPoolCreate(kCFAllocatorDefault, NULL,\n                                         (__bridge CFDictionaryRef _Nullable)(pixelAttributes), &bufferPool);\n        if (result != kCVReturnSuccess) {\n            SCLogGeneralError(@\"[Processing Pipeline] Error creating pixel buffer pool %i\", result);\n            return NULL;\n        }\n    }\n\n    CVPixelBufferRef resultBuffer = NULL;\n    result = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, bufferPool, &resultBuffer);\n\n    if (result == kCVReturnSuccess) {\n        [context render:image toCVPixelBuffer:resultBuffer];\n    } else {\n        SCLogGeneralError(@\"[Processing Pipeline] Error creating pixel buffer from pool %i\", result);\n    }\n    return resultBuffer;\n}\n\n+ (CMSampleBufferRef)sampleBufferFromImage:(CIImage *)image\n                           oldSampleBuffer:(CMSampleBufferRef)oldSampleBuffer\n                                bufferPool:(CVPixelBufferPoolRef)bufferPool\n                                   context:(CIContext *)context\n{\n    CVPixelBufferRef pixelBuffer =\n        [SCProcessingModuleUtils pixelBufferFromImage:image bufferPool:bufferPool context:context];\n    if (!pixelBuffer) {\n        SCLogGeneralError(@\"[Processing Pipeline] Error creating new pixel buffer from image\");\n        return oldSampleBuffer;\n    }\n\n    CMSampleBufferRef newSampleBuffer = NULL;\n    CMSampleTimingInfo timimgInfo = kCMTimingInfoInvalid;\n    CMSampleBufferGetSampleTimingInfo(oldSampleBuffer, 0, &timimgInfo);\n\n    CMVideoFormatDescriptionRef videoInfo = NULL;\n    OSStatus status = CMVideoFormatDescriptionCreateForImageBuffer(NULL, pixelBuffer, &videoInfo);\n    if (status != noErr) {\n        SCLogGeneralError(@\"[Processing Pipeline] Error creating video format description %i\", (int)status);\n        CVPixelBufferRelease(pixelBuffer);\n        return oldSampleBuffer;\n    }\n\n    status = CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, true, NULL, NULL, videoInfo,\n                                                &timimgInfo, &newSampleBuffer);\n    if (status != noErr) {\n        SCLogGeneralError(@\"[Processing Pipeline] Error creating CMSampleBuffer %i\", (int)status);\n        CVPixelBufferRelease(pixelBuffer);\n        return oldSampleBuffer;\n    }\n\n    CVPixelBufferRelease(pixelBuffer);\n    return newSampleBuffer;\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/ImageProcessing/SCProcessingPipeline.h",
    "content": "//\n//  SCProcessingPipeline.h\n//  Snapchat\n//\n//  Created by Yu-Kuan (Anthony) Lai on 5/30/17.\n//  Copyright © 2017 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCProcessingModule.h\"\n\n#import <Foundation/Foundation.h>\n\n/*\n @class SCProcessingPipeline\n    The SCProcessingPipeline chains together a series of SCProcessingModules and passes the frame through\n        each of them in a pre-determined order. This is done through a chain of command, where the resulting\n        frame from the the first module is passed to the second, then to the third, etc.\n */\n@interface SCProcessingPipeline : NSObject <SCProcessingModule>\n\n@property (nonatomic, strong) NSMutableArray<id<SCProcessingModule>> *processingModules;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/ImageProcessing/SCProcessingPipeline.m",
    "content": "//\n//  SCProcessingPipeline.m\n//  Snapchat\n//\n//  Created by Yu-Kuan (Anthony) Lai on 5/30/17.\n//  Copyright © 2017 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCProcessingPipeline.h\"\n\n#import <SCFoundation/NSString+Helpers.h>\n\n@import CoreMedia;\n\n@implementation SCProcessingPipeline\n\n- (CMSampleBufferRef)render:(RenderData)renderData\n{\n    for (id<SCProcessingModule> module in self.processingModules) {\n        if (![module requiresDepthData] || ([module requiresDepthData] && renderData.depthDataMap)) {\n            renderData.sampleBuffer = [module render:renderData];\n        }\n    }\n\n    return renderData.sampleBuffer;\n}\n\n- (NSString *)description\n{\n    NSMutableString *desc = [NSMutableString new];\n    [desc appendString:@\"ProcessingPipeline, modules: \"];\n    for (id<SCProcessingModule> module in self.processingModules) {\n        [desc appendFormat:@\"%@, \", [module description]];\n    }\n    if (self.processingModules.count > 0) {\n        return [desc substringToIndex:desc.lengthOfCharacterSequences - 2];\n    }\n    return desc;\n}\n\n- (BOOL)requiresDepthData\n{\n    return NO;\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/ImageProcessing/SCProcessingPipelineBuilder.h",
    "content": "//\n//  SCProcessingPipelineBuilder.h\n//  Snapchat\n//\n//  Created by Yu-Kuan (Anthony) Lai on 6/1/17.\n//  Copyright © 2017 Snapchat, Inc. All rights reserved.\n//\n\n#import <Foundation/Foundation.h>\n\n@class SCDigitalExposureHandler;\n@class SCProcessingPipeline;\n\n/*\n @class SCProcessingPipelineBuilder\n    The builder object is responsible for creating the SCProcessingPipeline, the underneath\n        SCProcessingModules, and eventually chaining the SCProcessingModules together in a pre-determined\n        order. The builder is also responsible for providing consumers with handler objects.\n\n */\n@interface SCProcessingPipelineBuilder : NSObject\n\n@property (nonatomic) BOOL useExposureAdjust;\n@property (nonatomic) BOOL portraitModeEnabled;\n@property (nonatomic) BOOL enhancedNightMode;\n\n- (SCProcessingPipeline *)build;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/ImageProcessing/SCProcessingPipelineBuilder.m",
    "content": "//\n//  SCProcessingPipelineBuilder.m\n//  Snapchat\n//\n//  Created by Yu-Kuan (Anthony) Lai on 6/1/17.\n//  Copyright © 2017 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCProcessingPipelineBuilder.h\"\n\n#import \"SCCameraTweaks.h\"\n#import \"SCDepthBlurMetalRenderCommand.h\"\n#import \"SCDepthToGrayscaleMetalRenderCommand.h\"\n#import \"SCDigitalExposureHandler.h\"\n#import \"SCExposureAdjustMetalRenderCommand.h\"\n#import \"SCMetalUtils.h\"\n#import \"SCNightModeEnhancementMetalRenderCommand.h\"\n#import \"SCProcessingPipeline.h\"\n\n@implementation SCProcessingPipelineBuilder\n\n- (SCProcessingPipeline *)build\n{\n    if (!_useExposureAdjust && !_portraitModeEnabled && !_enhancedNightMode) { // in the future: && !useA && !useB ...\n        return nil;\n    }\n\n    SCProcessingPipeline *processingPipeline = [[SCProcessingPipeline alloc] init];\n    NSMutableArray<id<SCProcessingModule>> *processingModules = [NSMutableArray array];\n\n    // order of adding module matters!\n    if (_useExposureAdjust && SCDeviceSupportsMetal()) {\n        // this check looks redundant right now, but when we have more modules it will be necessary\n        SCMetalModule *exposureAdjustMetalModule =\n            [[SCMetalModule alloc] initWithMetalRenderCommand:[SCExposureAdjustMetalRenderCommand new]];\n        [processingModules addObject:exposureAdjustMetalModule];\n    }\n\n    if (_portraitModeEnabled) {\n        id<SCMetalRenderCommand> renderCommand = SCCameraTweaksDepthToGrayscaleOverride()\n                                                     ? [SCDepthToGrayscaleMetalRenderCommand new]\n                                                     : [SCDepthBlurMetalRenderCommand new];\n        SCMetalModule *depthBlurMetalModule = [[SCMetalModule alloc] initWithMetalRenderCommand:renderCommand];\n        [processingModules addObject:depthBlurMetalModule];\n    }\n\n    if (_enhancedNightMode && SCDeviceSupportsMetal()) {\n        SCMetalModule *nightModeEnhancementModule =\n            [[SCMetalModule alloc] initWithMetalRenderCommand:[SCNightModeEnhancementMetalRenderCommand new]];\n        [processingModules addObject:nightModeEnhancementModule];\n    }\n\n    processingPipeline.processingModules = processingModules;\n    return processingPipeline;\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/ImageProcessing/SCStillImageDepthBlurFilter.h",
    "content": "//\n//  SCStillImageDepthBlurFilter.h\n//  Snapchat\n//\n//  Created by Brian Ng on 10/11/17.\n//\n\n#import \"SCProcessingModule.h\"\n\n#import <Foundation/Foundation.h>\n\n/*\n @class SCStillImageDepthBlurFilter\n    This module uses the CIDepthBlurEffect CIFilter that uses rgb and depth information to produce an image with\n    the portrait mode effect (background blurred, foreground sharp).\n */\n@interface SCStillImageDepthBlurFilter : NSObject\n\n// Applies the CIDepthBlurEffect filter to a still image capture photo. If an error occured, the original\n// photoData will be returned\n- (NSData *)renderWithPhotoData:(NSData *)photoData renderData:(RenderData)renderData NS_AVAILABLE_IOS(11_0);\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/ImageProcessing/SCStillImageDepthBlurFilter.m",
    "content": "//\n//  SCStillImageDepthBlurFilter.m\n//  Snapchat\n//\n//  Created by Brian Ng on 10/11/17.\n//\n\n#import \"SCStillImageDepthBlurFilter.h\"\n\n#import \"SCCameraTweaks.h\"\n#import \"SCProcessingModuleUtils.h\"\n\n@import CoreMedia;\n\n@implementation SCStillImageDepthBlurFilter {\n    CIContext *_context;\n    CIFilter *_filter;\n    CVPixelBufferPoolRef _bufferPool;\n}\n\n- (instancetype)init\n{\n    if (self = [super init]) {\n        _context = [CIContext contextWithOptions:@{ kCIContextWorkingFormat : @(kCIFormatRGBAh) }];\n        _filter = [CIFilter filterWithName:@\"CIDepthBlurEffect\"];\n    }\n    return self;\n}\n\n- (void)dealloc\n{\n    CVPixelBufferPoolFlush(_bufferPool, kCVPixelBufferPoolFlushExcessBuffers);\n    CVPixelBufferPoolRelease(_bufferPool);\n}\n\n- (NSData *)renderWithPhotoData:(NSData *)photoData renderData:(RenderData)renderData NS_AVAILABLE_IOS(11_0)\n{\n    CIImage *mainImage = [CIImage imageWithData:photoData];\n    CVPixelBufferRef disparityImagePixelBuffer = renderData.depthDataMap;\n    CIImage *disparityImage = [CIImage imageWithCVPixelBuffer:disparityImagePixelBuffer];\n    if (!disparityImage) {\n        return photoData;\n    }\n    [_filter setValue:mainImage forKey:kCIInputImageKey];\n    [_filter setValue:disparityImage forKey:kCIInputDisparityImageKey];\n    if (renderData.depthBlurPointOfInterest && SCCameraTweaksEnableFilterInputFocusRect()) {\n        CGPoint pointOfInterest = *renderData.depthBlurPointOfInterest;\n        [_filter setValue:[CIVector vectorWithX:pointOfInterest.x Y:pointOfInterest.y Z:1 W:1]\n                   forKey:@\"inputFocusRect\"];\n    }\n    CIImage *result = [_filter outputImage];\n    if (!result) {\n        return photoData;\n    }\n    CGColorSpaceRef deviceRGBColorSpace = CGColorSpaceCreateDeviceRGB();\n    NSData *processedPhotoData = [_context JPEGRepresentationOfImage:result colorSpace:deviceRGBColorSpace options:@{}];\n    CGColorSpaceRelease(deviceRGBColorSpace);\n    if (!processedPhotoData) {\n        return photoData;\n    }\n    renderData.sampleBuffer = [SCProcessingModuleUtils sampleBufferFromImage:result\n                                                             oldSampleBuffer:renderData.sampleBuffer\n                                                                  bufferPool:_bufferPool\n                                                                     context:_context];\n    return processedPhotoData;\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/NSURL+Asset.h",
    "content": "//\n//  NSURL+NSURL_Asset.h\n//  Snapchat\n//\n//  Created by Michel Loenngren on 4/30/17.\n//  Copyright © 2017 Snapchat, Inc. All rights reserved.\n//\n\n#import <Foundation/Foundation.h>\n\n@interface NSURL (Asset)\n\n/**\n In case the media server is reset while recording AVFoundation\n gets in a weird state. Even though we reload our AVFoundation\n object we still need to reload the assetkeys on the\n outputfile. If we don't the AVAssetWriter will fail when started.\n */\n- (void)reloadAssetKeys;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/NSURL+Asset.m",
    "content": "//\n//  NSURL+NSURL_Asset.m\n//  Snapchat\n//\n//  Created by Michel Loenngren on 4/30/17.\n//  Copyright © 2017 Snapchat, Inc. All rights reserved.\n//\n\n#import \"NSURL+Asset.h\"\n\n#import <SCBase/SCMacros.h>\n\n@import AVFoundation;\n\n@implementation NSURL (Asset)\n\n- (void)reloadAssetKeys\n{\n    AVAsset *videoAsset = [AVAsset assetWithURL:self];\n    [videoAsset loadValuesAsynchronouslyForKeys:@[ @keypath(videoAsset.duration) ] completionHandler:nil];\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/OWNERS",
    "content": "--- !OWNERS\n\nversion: 2\n\ndefault:\n  jira_project: CCAM\n  owners:\n    num_required_reviewers: 0\n    teams:\n    - Snapchat/core-camera-ios\n"
  },
  {
    "path": "ManagedCapturer/SCAudioCaptureSession.h",
    "content": "//\n//  SCAudioCaptureSession.h\n//  Snapchat\n//\n//  Created by Liu Liu on 3/5/15.\n//  Copyright (c) 2015 Snapchat, Inc. All rights reserved.\n//\n\n#import <CoreMedia/CoreMedia.h>\n#import <Foundation/Foundation.h>\n\nextern double const kSCAudioCaptureSessionDefaultSampleRate;\n\ntypedef void (^audio_capture_session_block)(NSError *error);\n\n@protocol SCAudioCaptureSession;\n\n@protocol SCAudioCaptureSessionDelegate <NSObject>\n\n- (void)audioCaptureSession:(id<SCAudioCaptureSession>)audioCaptureSession\n      didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer;\n\n@end\n\n@protocol SCAudioCaptureSession <NSObject>\n\n@property (nonatomic, weak) id<SCAudioCaptureSessionDelegate> delegate;\n\n// Return detail informantions dictionary if error occured, else return nil\n- (void)beginAudioRecordingAsynchronouslyWithSampleRate:(double)sampleRate\n                                      completionHandler:(audio_capture_session_block)completionHandler;\n\n- (void)disposeAudioRecordingSynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler;\n\n@end\n\n@interface SCAudioCaptureSession : NSObject <SCAudioCaptureSession>\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCAudioCaptureSession.m",
    "content": "//\n//  SCAudioCaptureSession.m\n//  Snapchat\n//\n//  Created by Liu Liu on 3/5/15.\n//  Copyright (c) 2015 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCAudioCaptureSession.h\"\n\n#import <SCAudio/SCAudioSession.h>\n#import <SCFoundation/SCLog.h>\n#import <SCFoundation/SCQueuePerformer.h>\n#import <SCFoundation/SCTrace.h>\n\n#import <mach/mach.h>\n#import <mach/mach_time.h>\n\n@import AVFoundation;\n\ndouble const kSCAudioCaptureSessionDefaultSampleRate = 44100;\nNSString *const SCAudioCaptureSessionErrorDomain = @\"SCAudioCaptureSessionErrorDomain\";\n\nstatic NSInteger const kNumberOfAudioBuffersInQueue = 15;\nstatic float const kAudioBufferDurationInSeconds = 0.2;\n\nstatic char *const kSCAudioCaptureSessionQueueLabel = \"com.snapchat.audio-capture-session\";\n\n@implementation SCAudioCaptureSession {\n    SCQueuePerformer *_performer;\n\n    AudioQueueRef _audioQueue;\n    AudioQueueBufferRef _audioQueueBuffers[kNumberOfAudioBuffersInQueue];\n    CMAudioFormatDescriptionRef _audioFormatDescription;\n}\n\n@synthesize delegate = _delegate;\n\n- (instancetype)init\n{\n    SCTraceStart();\n    self = [super init];\n    if (self) {\n        _performer = [[SCQueuePerformer alloc] initWithLabel:kSCAudioCaptureSessionQueueLabel\n                                            qualityOfService:QOS_CLASS_USER_INTERACTIVE\n                                                   queueType:DISPATCH_QUEUE_SERIAL\n                                                     context:SCQueuePerformerContextCamera];\n    }\n    return self;\n}\n\n- (void)dealloc\n{\n    [self disposeAudioRecordingSynchronouslyWithCompletionHandler:NULL];\n}\n\nstatic AudioStreamBasicDescription setupAudioFormat(UInt32 inFormatID, Float64 sampleRate)\n{\n    SCTraceStart();\n    AudioStreamBasicDescription recordFormat = {0};\n\n    recordFormat.mSampleRate = sampleRate;\n    recordFormat.mChannelsPerFrame = (UInt32)[SCAudioSession sharedInstance].inputNumberOfChannels;\n\n    recordFormat.mFormatID = inFormatID;\n    if (inFormatID == kAudioFormatLinearPCM) {\n        // if we want pcm, default to signed 16-bit little-endian\n        recordFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked;\n        recordFormat.mBitsPerChannel = 16;\n        recordFormat.mBytesPerPacket = recordFormat.mBytesPerFrame =\n            (recordFormat.mBitsPerChannel / 8) * recordFormat.mChannelsPerFrame;\n        recordFormat.mFramesPerPacket = 1;\n    }\n    return recordFormat;\n}\n\nstatic int computeRecordBufferSize(const AudioStreamBasicDescription *format, const AudioQueueRef audioQueue,\n                                   float seconds)\n{\n    SCTraceStart();\n    int packets, frames, bytes = 0;\n    frames = (int)ceil(seconds * format->mSampleRate);\n\n    if (format->mBytesPerFrame > 0) {\n        bytes = frames * format->mBytesPerFrame;\n    } else {\n        UInt32 maxPacketSize;\n        if (format->mBytesPerPacket > 0)\n            maxPacketSize = format->mBytesPerPacket; // constant packet size\n        else {\n            UInt32 propertySize = sizeof(maxPacketSize);\n            AudioQueueGetProperty(audioQueue, kAudioQueueProperty_MaximumOutputPacketSize, &maxPacketSize,\n                                  &propertySize);\n        }\n        if (format->mFramesPerPacket > 0)\n            packets = frames / format->mFramesPerPacket;\n        else\n            packets = frames; // worst-case scenario: 1 frame in a packet\n        if (packets == 0)     // sanity check\n            packets = 1;\n        bytes = packets * maxPacketSize;\n    }\n    return bytes;\n}\n\nstatic NSTimeInterval machHostTimeToSeconds(UInt64 mHostTime)\n{\n    static dispatch_once_t onceToken;\n    static mach_timebase_info_data_t timebase_info;\n    dispatch_once(&onceToken, ^{\n        (void)mach_timebase_info(&timebase_info);\n    });\n    return (double)mHostTime * timebase_info.numer / timebase_info.denom / NSEC_PER_SEC;\n}\n\nstatic void audioQueueBufferHandler(void *inUserData, AudioQueueRef inAQ, AudioQueueBufferRef inBuffer,\n                                    const AudioTimeStamp *nStartTime, UInt32 inNumPackets,\n                                    const AudioStreamPacketDescription *inPacketDesc)\n{\n    SCTraceStart();\n    SCAudioCaptureSession *audioCaptureSession = (__bridge SCAudioCaptureSession *)inUserData;\n    if (inNumPackets > 0) {\n        CMTime PTS = CMTimeMakeWithSeconds(machHostTimeToSeconds(nStartTime->mHostTime), 600);\n        [audioCaptureSession appendAudioQueueBuffer:inBuffer\n                                         numPackets:inNumPackets\n                                                PTS:PTS\n                                 packetDescriptions:inPacketDesc];\n    }\n\n    AudioQueueEnqueueBuffer(inAQ, inBuffer, 0, NULL);\n}\n\n- (void)appendAudioQueueBuffer:(AudioQueueBufferRef)audioQueueBuffer\n                    numPackets:(UInt32)numPackets\n                           PTS:(CMTime)PTS\n            packetDescriptions:(const AudioStreamPacketDescription *)packetDescriptions\n{\n    SCTraceStart();\n    CMBlockBufferRef dataBuffer = NULL;\n    CMBlockBufferCreateWithMemoryBlock(NULL, NULL, audioQueueBuffer->mAudioDataByteSize, NULL, NULL, 0,\n                                       audioQueueBuffer->mAudioDataByteSize, 0, &dataBuffer);\n    if (dataBuffer) {\n        CMBlockBufferReplaceDataBytes(audioQueueBuffer->mAudioData, dataBuffer, 0,\n                                      audioQueueBuffer->mAudioDataByteSize);\n        CMSampleBufferRef sampleBuffer = NULL;\n        CMAudioSampleBufferCreateWithPacketDescriptions(NULL, dataBuffer, true, NULL, NULL, _audioFormatDescription,\n                                                        numPackets, PTS, packetDescriptions, &sampleBuffer);\n        if (sampleBuffer) {\n            [self processAudioSampleBuffer:sampleBuffer];\n            CFRelease(sampleBuffer);\n        }\n        CFRelease(dataBuffer);\n    }\n}\n\n- (void)processAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer\n{\n    SCTraceStart();\n    [_delegate audioCaptureSession:self didOutputSampleBuffer:sampleBuffer];\n}\n\n- (NSError *)_generateErrorForType:(NSString *)errorType\n                         errorCode:(int)errorCode\n                            format:(AudioStreamBasicDescription)format\n{\n    NSDictionary *errorInfo = @{\n        @\"error_type\" : errorType,\n        @\"error_code\" : @(errorCode),\n        @\"record_format\" : @{\n            @\"format_id\" : @(format.mFormatID),\n            @\"format_flags\" : @(format.mFormatFlags),\n            @\"sample_rate\" : @(format.mSampleRate),\n            @\"bytes_per_packet\" : @(format.mBytesPerPacket),\n            @\"frames_per_packet\" : @(format.mFramesPerPacket),\n            @\"bytes_per_frame\" : @(format.mBytesPerFrame),\n            @\"channels_per_frame\" : @(format.mChannelsPerFrame),\n            @\"bits_per_channel\" : @(format.mBitsPerChannel)\n        }\n    };\n    SCLogGeneralInfo(@\"Audio queue error occured. ErrorInfo: %@\", errorInfo);\n    return [NSError errorWithDomain:SCAudioCaptureSessionErrorDomain code:errorCode userInfo:errorInfo];\n}\n\n- (NSError *)beginAudioRecordingWithSampleRate:(Float64)sampleRate\n{\n    SCTraceStart();\n    if ([SCAudioSession sharedInstance].inputAvailable) {\n        // SCAudioSession should be activated already\n        SCTraceSignal(@\"Set audio session to be active\");\n        AudioStreamBasicDescription recordFormat = setupAudioFormat(kAudioFormatLinearPCM, sampleRate);\n        OSStatus audioQueueCreationStatus = AudioQueueNewInput(&recordFormat, audioQueueBufferHandler,\n                                                               (__bridge void *)self, NULL, NULL, 0, &_audioQueue);\n        if (audioQueueCreationStatus != 0) {\n            NSError *error = [self _generateErrorForType:@\"audio_queue_create_error\"\n                                               errorCode:audioQueueCreationStatus\n                                                  format:recordFormat];\n            return error;\n        }\n        SCTraceSignal(@\"Initialize audio queue with new input\");\n        UInt32 bufferByteSize = computeRecordBufferSize(\n            &recordFormat, _audioQueue, kAudioBufferDurationInSeconds); // Enough bytes for half a second\n        for (int i = 0; i < kNumberOfAudioBuffersInQueue; i++) {\n            AudioQueueAllocateBuffer(_audioQueue, bufferByteSize, &_audioQueueBuffers[i]);\n            AudioQueueEnqueueBuffer(_audioQueue, _audioQueueBuffers[i], 0, NULL);\n        }\n        SCTraceSignal(@\"Allocate audio buffer\");\n        UInt32 size = sizeof(recordFormat);\n        audioQueueCreationStatus =\n            AudioQueueGetProperty(_audioQueue, kAudioQueueProperty_StreamDescription, &recordFormat, &size);\n        if (0 != audioQueueCreationStatus) {\n            NSError *error = [self _generateErrorForType:@\"audio_queue_get_property_error\"\n                                               errorCode:audioQueueCreationStatus\n                                                  format:recordFormat];\n            [self disposeAudioRecording];\n            return error;\n        }\n        SCTraceSignal(@\"Audio queue sample rate %lf\", recordFormat.mSampleRate);\n        AudioChannelLayout acl;\n        bzero(&acl, sizeof(acl));\n        acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;\n        audioQueueCreationStatus = CMAudioFormatDescriptionCreate(NULL, &recordFormat, sizeof(acl), &acl, 0, NULL, NULL,\n                                                                  &_audioFormatDescription);\n        if (0 != audioQueueCreationStatus) {\n            NSError *error = [self _generateErrorForType:@\"audio_queue_audio_format_error\"\n                                               errorCode:audioQueueCreationStatus\n                                                  format:recordFormat];\n            [self disposeAudioRecording];\n            return error;\n        }\n        SCTraceSignal(@\"Start audio queue\");\n        audioQueueCreationStatus = AudioQueueStart(_audioQueue, NULL);\n        if (0 != audioQueueCreationStatus) {\n            NSError *error = [self _generateErrorForType:@\"audio_queue_start_error\"\n                                               errorCode:audioQueueCreationStatus\n                                                  format:recordFormat];\n            [self disposeAudioRecording];\n            return error;\n        }\n    }\n    return nil;\n}\n\n- (void)disposeAudioRecording\n{\n    SCTraceStart();\n    SCLogGeneralInfo(@\"dispose audio recording\");\n    if (_audioQueue) {\n        AudioQueueStop(_audioQueue, true);\n        AudioQueueDispose(_audioQueue, true);\n        for (int i = 0; i < kNumberOfAudioBuffersInQueue; i++) {\n            _audioQueueBuffers[i] = NULL;\n        }\n        _audioQueue = NULL;\n    }\n    if (_audioFormatDescription) {\n        CFRelease(_audioFormatDescription);\n        _audioFormatDescription = NULL;\n    }\n}\n\n#pragma mark - Public methods\n\n- (void)beginAudioRecordingAsynchronouslyWithSampleRate:(double)sampleRate\n                                      completionHandler:(audio_capture_session_block)completionHandler\n{\n    SCTraceStart();\n    // Request audio session change for recording mode.\n    [_performer perform:^{\n        SCTraceStart();\n        NSError *error = [self beginAudioRecordingWithSampleRate:sampleRate];\n        if (completionHandler) {\n            completionHandler(error);\n        }\n    }];\n}\n\n- (void)disposeAudioRecordingSynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler\n{\n    SCTraceStart();\n    [_performer performAndWait:^{\n        SCTraceStart();\n        [self disposeAudioRecording];\n        if (completionHandler) {\n            completionHandler();\n        }\n    }];\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCCameraSettingUtils.h",
    "content": "//\n//  SCCameraSettingUtils.h\n//  Snapchat\n//\n//  Created by Pinlin Chen on 12/09/2017.\n//\n\n#import <SCBase/SCMacros.h>\n\n#import <SCCapturerDefines.h>\n\n#import <CoreMedia/CoreMedia.h>\n#import <Foundation/Foundation.h>\n\nSC_EXTERN_C_BEGIN\n\n// Return the value if metadata attribute is found; otherwise, return nil\nextern NSNumber *retrieveExposureTimeFromEXIFAttachments(CFDictionaryRef exifAttachments);\nextern NSNumber *retrieveBrightnessFromEXIFAttachments(CFDictionaryRef exifAttachments);\nextern NSNumber *retrieveISOSpeedRatingFromEXIFAttachments(CFDictionaryRef exifAttachments);\nextern void retrieveSampleBufferMetadata(CMSampleBufferRef sampleBuffer, SampleBufferMetadata *metadata);\n\nSC_EXTERN_C_END\n"
  },
  {
    "path": "ManagedCapturer/SCCameraSettingUtils.m",
    "content": "//\n//  SCCameraSettingUtils.m\n//  Snapchat\n//\n//  Created by Pinlin Chen on 12/09/2017.\n//\n\n#import \"SCCameraSettingUtils.h\"\n\n#import <SCFoundation/SCLog.h>\n\n#import <ImageIO/CGImageProperties.h>\n\nNSNumber *retrieveExposureTimeFromEXIFAttachments(CFDictionaryRef exifAttachments)\n{\n    if (!exifAttachments) {\n        return nil;\n    }\n    id value = CFDictionaryGetValue(exifAttachments, kCGImagePropertyExifExposureTime);\n    // Fetching exposure time from the sample buffer\n    if ([value isKindOfClass:[NSNumber class]]) {\n        return (NSNumber *)value;\n    }\n    return nil;\n}\n\nNSNumber *retrieveBrightnessFromEXIFAttachments(CFDictionaryRef exifAttachments)\n{\n    if (!exifAttachments) {\n        return nil;\n    }\n    id value = CFDictionaryGetValue(exifAttachments, kCGImagePropertyExifBrightnessValue);\n    if ([value isKindOfClass:[NSNumber class]]) {\n        return (NSNumber *)value;\n    }\n    return nil;\n}\n\nNSNumber *retrieveISOSpeedRatingFromEXIFAttachments(CFDictionaryRef exifAttachments)\n{\n    if (!exifAttachments) {\n        return nil;\n    }\n    NSArray *ISOSpeedRatings = CFDictionaryGetValue(exifAttachments, kCGImagePropertyExifISOSpeedRatings);\n    if ([ISOSpeedRatings respondsToSelector:@selector(count)] &&\n        [ISOSpeedRatings respondsToSelector:@selector(firstObject)] && ISOSpeedRatings.count > 0) {\n        id value = [ISOSpeedRatings firstObject];\n        if ([value isKindOfClass:[NSNumber class]]) {\n            return (NSNumber *)value;\n        }\n    }\n    return nil;\n}\n\nvoid retrieveSampleBufferMetadata(CMSampleBufferRef sampleBuffer, SampleBufferMetadata *metadata)\n{\n    CFDictionaryRef exifAttachments = CMGetAttachment(sampleBuffer, kCGImagePropertyExifDictionary, NULL);\n    if (exifAttachments == nil) {\n        SCLogCoreCameraWarning(@\"SampleBuffer exifAttachment is nil\");\n    }\n    // Fetching exposure time from the sample buffer\n    NSNumber *currentExposureTimeNum = retrieveExposureTimeFromEXIFAttachments(exifAttachments);\n    if (currentExposureTimeNum) {\n        metadata->exposureTime = [currentExposureTimeNum floatValue];\n    }\n    NSNumber *currentISOSpeedRatingNum = retrieveISOSpeedRatingFromEXIFAttachments(exifAttachments);\n    if (currentISOSpeedRatingNum) {\n        metadata->isoSpeedRating = (int)[currentISOSpeedRatingNum integerValue];\n    }\n    NSNumber *currentBrightnessNum = retrieveBrightnessFromEXIFAttachments(exifAttachments);\n    if (currentBrightnessNum) {\n        float currentBrightness = [currentBrightnessNum floatValue];\n        if (isfinite(currentBrightness)) {\n            metadata->brightness = currentBrightness;\n        } else {\n            metadata->brightness = 0;\n        }\n    }\n}\n"
  },
  {
    "path": "ManagedCapturer/SCCaptureCommon.h",
    "content": "//\n//  SCCaptureCommon.h\n//  Snapchat\n//\n//  Created by Lin Jia on 9/29/17.\n//\n//\n\n#import \"SCManagedCaptureDevice.h\"\n#import \"SCManagedDeviceCapacityAnalyzerListener.h\"\n#import \"SCVideoCaptureSessionInfo.h\"\n\n#import <SCCameraFoundation/SCManagedVideoDataSourceListener.h>\n\n#import <AVFoundation/AVFoundation.h>\n#import <Foundation/Foundation.h>\n\n@class SCManagedCapturerState;\n@class SCManagedLensesProcessor;\n@class SCManagedVideoDataSource;\n@class SCManagedVideoCapturerOutputSettings;\n@class SCLens;\n@class SCLensCategory;\n@class SCLookseryFilterFactory;\n@class SCSnapScannedData;\n@class SCCraftResourceManager;\n@class SCScanConfiguration;\n@class SCCapturerToken;\n@class SCProcessingPipeline;\n@class SCTimedTask;\n@protocol SCManagedSampleBufferDisplayController;\n\ntypedef void (^sc_managed_capturer_capture_still_image_completion_handler_t)(UIImage *fullScreenImage,\n                                                                             NSDictionary *metadata, NSError *error,\n                                                                             SCManagedCapturerState *state);\n\ntypedef void (^sc_managed_capturer_capture_video_frame_completion_handler_t)(UIImage *image);\n\ntypedef void (^sc_managed_capturer_start_recording_completion_handler_t)(SCVideoCaptureSessionInfo session,\n                                                                         NSError *error);\n\ntypedef void (^sc_managed_capturer_convert_view_coordniates_completion_handler_t)(CGPoint pointOfInterest);\n\ntypedef void (^sc_managed_capturer_unsafe_changes_t)(AVCaptureSession *session, AVCaptureDevice *front,\n                                                     AVCaptureDeviceInput *frontInput, AVCaptureDevice *back,\n                                                     AVCaptureDeviceInput *backInput, SCManagedCapturerState *state);\n\ntypedef void (^sc_managed_capturer_stop_running_completion_handler_t)(BOOL succeed);\n\ntypedef void (^sc_managed_capturer_scan_results_handler_t)(NSObject *resultObject);\n\ntypedef void (^sc_managed_lenses_processor_category_point_completion_handler_t)(SCLensCategory *category,\n                                                                                NSInteger categoriesCount);\nextern CGFloat const kSCManagedCapturerAspectRatioUnspecified;\n\nextern CGFloat const kSCManagedCapturerDefaultVideoActiveFormatWidth;\n\nextern CGFloat const kSCManagedCapturerDefaultVideoActiveFormatHeight;\n\nextern CGFloat const kSCManagedCapturerVideoActiveFormatWidth1080p;\n\nextern CGFloat const kSCManagedCapturerVideoActiveFormatHeight1080p;\n\nextern CGFloat const kSCManagedCapturerNightVideoHighResActiveFormatWidth;\n\nextern CGFloat const kSCManagedCapturerNightVideoHighResActiveFormatHeight;\n\nextern CGFloat const kSCManagedCapturerNightVideoDefaultResActiveFormatWidth;\n\nextern CGFloat const kSCManagedCapturerNightVideoDefaultResActiveFormatHeight;\n\nextern CGFloat const kSCManagedCapturerLiveStreamingVideoActiveFormatWidth;\n\nextern CGFloat const kSCManagedCapturerLiveStreamingVideoActiveFormatHeight;\n"
  },
  {
    "path": "ManagedCapturer/SCCaptureCommon.m",
    "content": "//\n//  SCCaptureCommon.m\n//  Snapchat\n//\n//  Created by Lin Jia on 9/29/17.\n//\n//\n\n#import \"SCCaptureCommon.h\"\n\nCGFloat const kSCManagedCapturerAspectRatioUnspecified = -1.0;\n\nCGFloat const kSCManagedCapturerDefaultVideoActiveFormatWidth = 1280;\n\nCGFloat const kSCManagedCapturerDefaultVideoActiveFormatHeight = 720;\n\nCGFloat const kSCManagedCapturerVideoActiveFormatWidth1080p = 1920;\n\nCGFloat const kSCManagedCapturerVideoActiveFormatHeight1080p = 1080;\n\nCGFloat const kSCManagedCapturerNightVideoHighResActiveFormatWidth = 2592;\n\nCGFloat const kSCManagedCapturerNightVideoHighResActiveFormatHeight = 1936;\n\nCGFloat const kSCManagedCapturerNightVideoDefaultResActiveFormatWidth = 640;\n\nCGFloat const kSCManagedCapturerNightVideoDefaultResActiveFormatHeight = 480;\n\nCGFloat const kSCManagedCapturerLiveStreamingVideoActiveFormatWidth = 1280;\n\nCGFloat const kSCManagedCapturerLiveStreamingVideoActiveFormatHeight = 720;\n"
  },
  {
    "path": "ManagedCapturer/SCCaptureCoreImageFaceDetector.h",
    "content": "//\n//  SCCaptureCoreImageFaceDetector.h\n//  Snapchat\n//\n//  Created by Jiyang Zhu on 3/27/18.\n//  Copyright © 2018 Snapchat, Inc. All rights reserved.\n//\n//  This class is intended to detect faces in Camera. It receives CMSampleBuffer, process the face detection using\n//  CIDetector, and announce the bounds and faceIDs.\n\n#import \"SCCaptureFaceDetector.h\"\n\n#import <SCBase/SCMacros.h>\n#import <SCCameraFoundation/SCManagedVideoDataSourceListener.h>\n\n#import <Foundation/Foundation.h>\n\n@interface SCCaptureCoreImageFaceDetector : NSObject <SCCaptureFaceDetector, SCManagedVideoDataSourceListener>\n\nSC_INIT_AND_NEW_UNAVAILABLE;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCCaptureCoreImageFaceDetector.m",
    "content": "//\n//  SCCaptureCoreImageFaceDetector.m\n//  Snapchat\n//\n//  Created by Jiyang Zhu on 3/27/18.\n//  Copyright © 2018 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCCaptureCoreImageFaceDetector.h\"\n\n#import \"SCCameraTweaks.h\"\n#import \"SCCaptureFaceDetectionParser.h\"\n#import \"SCCaptureFaceDetectorTrigger.h\"\n#import \"SCCaptureResource.h\"\n#import \"SCManagedCapturer.h\"\n\n#import <SCFoundation/NSArray+Helpers.h>\n#import <SCFoundation/SCAssertWrapper.h>\n#import <SCFoundation/SCLog.h>\n#import <SCFoundation/SCQueuePerformer.h>\n#import <SCFoundation/SCTrace.h>\n#import <SCFoundation/SCTraceODPCompatible.h>\n#import <SCFoundation/SCZeroDependencyExperiments.h>\n#import <SCFoundation/UIImage+CVPixelBufferRef.h>\n\n@import ImageIO;\n\nstatic const NSTimeInterval kSCCaptureCoreImageFaceDetectorMaxAllowedLatency =\n    1; // Drop the face detection result if it is 1 second late.\nstatic const NSInteger kDefaultNumberOfSequentialOutputSampleBuffer = -1; // -1 means no sequential sample buffers.\n\nstatic char *const kSCCaptureCoreImageFaceDetectorProcessQueue =\n    \"com.snapchat.capture-core-image-face-detector-process\";\n\n@implementation SCCaptureCoreImageFaceDetector {\n    CIDetector *_detector;\n    SCCaptureResource *_captureResource;\n\n    BOOL _isDetecting;\n    BOOL _hasDetectedFaces;\n    NSInteger _numberOfSequentialOutputSampleBuffer;\n    NSUInteger _detectionFrequency;\n    NSDictionary *_detectorOptions;\n    SCManagedCaptureDevicePosition _devicePosition;\n    CIContext *_context;\n\n    SCQueuePerformer *_callbackPerformer;\n    SCQueuePerformer *_processPerformer;\n\n    SCCaptureFaceDetectionParser *_parser;\n    SCCaptureFaceDetectorTrigger *_trigger;\n}\n\n@synthesize trigger = _trigger;\n@synthesize parser = _parser;\n\n- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource\n{\n    SCTraceODPCompatibleStart(2);\n    self = [super init];\n    if (self) {\n        SCAssert(captureResource, @\"SCCaptureResource should not be nil\");\n        SCAssert(captureResource.queuePerformer, @\"SCQueuePerformer should not be nil\");\n        _callbackPerformer = captureResource.queuePerformer;\n        _captureResource = captureResource;\n        _parser = [[SCCaptureFaceDetectionParser alloc]\n            initWithFaceBoundsAreaThreshold:pow(SCCameraFaceFocusMinFaceSize(), 2)];\n        _processPerformer = [[SCQueuePerformer alloc] initWithLabel:kSCCaptureCoreImageFaceDetectorProcessQueue\n                                                   qualityOfService:QOS_CLASS_USER_INITIATED\n                                                          queueType:DISPATCH_QUEUE_SERIAL\n                                                            context:SCQueuePerformerContextCamera];\n        _detectionFrequency = SCExperimentWithFaceDetectionFrequency();\n        _devicePosition = captureResource.device.position;\n        _trigger = [[SCCaptureFaceDetectorTrigger alloc] initWithDetector:self];\n    }\n    return self;\n}\n\n- (void)_setupDetectionIfNeeded\n{\n    SCTraceODPCompatibleStart(2);\n    SC_GUARD_ELSE_RETURN(!_detector);\n    if (!_context) {\n        _context = [CIContext context];\n    }\n    // For CIDetectorMinFeatureSize, the valid range is [0.0100, 0.5000], otherwise, it will cause a crash.\n    if (!_detectorOptions) {\n        _detectorOptions = @{\n            CIDetectorAccuracy : CIDetectorAccuracyLow,\n            CIDetectorTracking : @(YES),\n            CIDetectorMaxFeatureCount : @(2),\n            CIDetectorMinFeatureSize : @(SCCameraFaceFocusMinFaceSize()),\n            CIDetectorNumberOfAngles : @(3)\n        };\n    }\n    @try {\n        _detector = [CIDetector detectorOfType:CIDetectorTypeFace context:_context options:_detectorOptions];\n    } @catch (NSException *exception) {\n        SCLogCoreCameraError(@\"Failed to create CIDetector with exception:%@\", exception);\n    }\n}\n\n- (void)_resetDetection\n{\n    SCTraceODPCompatibleStart(2);\n    _detector = nil;\n    [self _setupDetectionIfNeeded];\n}\n\n- (SCQueuePerformer *)detectionPerformer\n{\n    return _processPerformer;\n}\n\n- (void)startDetection\n{\n    SCTraceODPCompatibleStart(2);\n    SCAssert([[self detectionPerformer] isCurrentPerformer], @\"Calling -startDetection in an invalid queue.\");\n    [self _setupDetectionIfNeeded];\n    _isDetecting = YES;\n    _hasDetectedFaces = NO;\n    _numberOfSequentialOutputSampleBuffer = kDefaultNumberOfSequentialOutputSampleBuffer;\n}\n\n- (void)stopDetection\n{\n    SCTraceODPCompatibleStart(2);\n    SCAssert([[self detectionPerformer] isCurrentPerformer], @\"Calling -stopDetection in an invalid queue.\");\n    _isDetecting = NO;\n}\n\n- (NSDictionary<NSNumber *, NSValue *> *)_detectFaceFeaturesInImage:(CIImage *)image\n                                                    withOrientation:(CGImagePropertyOrientation)orientation\n{\n    SCTraceODPCompatibleStart(2);\n    NSDictionary *opts =\n        @{ CIDetectorImageOrientation : @(orientation),\n           CIDetectorEyeBlink : @(NO),\n           CIDetectorSmile : @(NO) };\n    NSArray<CIFeature *> *features = [_detector featuresInImage:image options:opts];\n    return [_parser parseFaceBoundsByFaceIDFromCIFeatures:features\n                                            withImageSize:image.extent.size\n                                         imageOrientation:orientation];\n}\n\n#pragma mark - SCManagedVideoDataSourceListener\n\n- (void)managedVideoDataSource:(id<SCManagedVideoDataSource>)managedVideoDataSource\n         didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer\n                devicePosition:(SCManagedCaptureDevicePosition)devicePosition\n{\n    SCTraceODPCompatibleStart(2);\n    SC_GUARD_ELSE_RETURN(_isDetecting);\n\n    // Reset detection if the device position changes. Resetting detection should execute in _processPerformer, so we\n    // just set a flag here, and then do it later in the perform block.\n    BOOL shouldForceResetDetection = NO;\n    if (devicePosition != _devicePosition) {\n        _devicePosition = devicePosition;\n        shouldForceResetDetection = YES;\n        _numberOfSequentialOutputSampleBuffer = kDefaultNumberOfSequentialOutputSampleBuffer;\n    }\n\n    _numberOfSequentialOutputSampleBuffer++;\n    SC_GUARD_ELSE_RETURN(_numberOfSequentialOutputSampleBuffer % _detectionFrequency == 0);\n    @weakify(self);\n    CFRetain(sampleBuffer);\n    [_processPerformer perform:^{\n        SCTraceStart();\n        @strongify(self);\n        SC_GUARD_ELSE_RETURN(self);\n\n        if (shouldForceResetDetection) {\n            // Resetting detection usually costs no more than 1ms.\n            [self _resetDetection];\n        }\n\n        CGImagePropertyOrientation orientation =\n            (devicePosition == SCManagedCaptureDevicePositionBack ? kCGImagePropertyOrientationRight\n                                                                  : kCGImagePropertyOrientationLeftMirrored);\n        CIImage *image = [CIImage imageWithCVPixelBuffer:CMSampleBufferGetImageBuffer(sampleBuffer)];\n        NSDictionary<NSNumber *, NSValue *> *faceBoundsByFaceID =\n            [self _detectFaceFeaturesInImage:image withOrientation:orientation];\n\n        // Calculate the latency for face detection, if it is too long, discard the face detection results.\n        NSTimeInterval latency =\n            CACurrentMediaTime() - CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer));\n        CFRelease(sampleBuffer);\n        if (latency >= kSCCaptureCoreImageFaceDetectorMaxAllowedLatency) {\n            faceBoundsByFaceID = nil;\n        }\n\n        // Only announce face detection result if faceBoundsByFaceID is not empty, or faceBoundsByFaceID was not empty\n        // last time.\n        if (faceBoundsByFaceID.count > 0 || self->_hasDetectedFaces) {\n            self->_hasDetectedFaces = faceBoundsByFaceID.count > 0;\n            [self->_callbackPerformer perform:^{\n                [self->_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]\n                                              didDetectFaceBounds:faceBoundsByFaceID];\n            }];\n        }\n    }];\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCCaptureDeviceAuthorization.h",
    "content": "//\n//  SCCaptureDeviceAuthorization.h\n//  Snapchat\n//\n//  Created by Xiaomu Wu on 8/19/14.\n//  Copyright (c) 2014 Snapchat, Inc. All rights reserved.\n//\n\n#import <Foundation/Foundation.h>\n\n@interface SCCaptureDeviceAuthorization : NSObject\n\n// Methods for checking / requesting authorization to use media capture devices of a given type.\n+ (BOOL)notDeterminedForMediaType:(NSString *)mediaType;\n+ (BOOL)deniedForMediaType:(NSString *)mediaType;\n+ (BOOL)restrictedForMediaType:(NSString *)mediaType;\n+ (void)requestAccessForMediaType:(NSString *)mediaType completionHandler:(void (^)(BOOL granted))handler;\n\n// Convenience methods for media type == AVMediaTypeVideo\n+ (BOOL)notDeterminedForVideoCapture;\n+ (BOOL)deniedForVideoCapture;\n+ (void)requestAccessForVideoCaptureWithCompletionHandler:(void (^)(BOOL granted))handler;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCCaptureDeviceAuthorization.m",
    "content": "//\n//  SCCaptureDeviceAuthorization.m\n//  Snapchat\n//\n//  Created by Xiaomu Wu on 8/19/14.\n//  Copyright (c) 2014 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCCaptureDeviceAuthorization.h\"\n\n#import <BlizzardSchema/SCAEvents.h>\n#import <SCFoundation/SCTrace.h>\n#import <SCLogger/SCLogger.h>\n\n@import AVFoundation;\n\n@implementation SCCaptureDeviceAuthorization\n\n#pragma mark - Public\n\n+ (BOOL)notDeterminedForMediaType:(NSString *)mediaType\n{\n    return [AVCaptureDevice authorizationStatusForMediaType:mediaType] == AVAuthorizationStatusNotDetermined;\n}\n\n+ (BOOL)deniedForMediaType:(NSString *)mediaType\n{\n    return [AVCaptureDevice authorizationStatusForMediaType:mediaType] == AVAuthorizationStatusDenied;\n}\n\n+ (BOOL)restrictedForMediaType:(NSString *)mediaType\n{\n    return [AVCaptureDevice authorizationStatusForMediaType:mediaType] == AVAuthorizationStatusRestricted;\n}\n\n+ (void)requestAccessForMediaType:(NSString *)mediaType completionHandler:(void (^)(BOOL granted))handler\n{\n    [AVCaptureDevice requestAccessForMediaType:mediaType completionHandler:handler];\n}\n\n#pragma mark - Convenience methods for AVMediaTypeVideo\n\n+ (BOOL)notDeterminedForVideoCapture\n{\n    return [self notDeterminedForMediaType:AVMediaTypeVideo];\n}\n\n+ (BOOL)deniedForVideoCapture\n{\n    return [self deniedForMediaType:AVMediaTypeVideo];\n}\n\n+ (void)requestAccessForVideoCaptureWithCompletionHandler:(void (^)(BOOL granted))handler\n{\n    BOOL firstTimeAsking =\n        [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo] == AVAuthorizationStatusNotDetermined;\n    [self requestAccessForMediaType:AVMediaTypeVideo\n                  completionHandler:^(BOOL granted) {\n                      if (firstTimeAsking) {\n                          SCAPermissionPromptResponse *responseEvent = [[SCAPermissionPromptResponse alloc] init];\n                          [responseEvent setPermissionPromptType:SCAPermissionPromptType_OS_CAMERA];\n                          [responseEvent setAccepted:granted];\n                          [[SCLogger sharedInstance] logUserTrackedEvent:responseEvent];\n                      }\n                      if (handler) {\n                          handler(granted);\n                      }\n                  }];\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCCaptureDeviceAuthorizationChecker.h",
    "content": "//\n//  SCCaptureDeviceAuthorizationChecker.h\n//  Snapchat\n//\n//  Created by Sun Lei on 15/03/2018.\n//\n\n@class SCQueuePerformer;\n\n#import <SCBase/SCMacros.h>\n\n#import <Foundation/Foundation.h>\n\n/*\n In general, the function of SCCaptureDeviceAuthorizationChecker is to speed up the checking of AVMediaTypeVideo\n authorization. It would cache the authorization value. 'preloadVideoCaptureAuthorization' would be called very early\n after the app is launched to populate the cached value. 'authorizedForVideoCapture' could be called to get the value\n synchronously.\n\n */\n\n@interface SCCaptureDeviceAuthorizationChecker : NSObject\n\nSC_INIT_AND_NEW_UNAVAILABLE\n- (instancetype)initWithPerformer:(SCQueuePerformer *)performer NS_DESIGNATED_INITIALIZER;\n\n- (BOOL)authorizedForVideoCapture;\n\n- (void)preloadVideoCaptureAuthorization;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCCaptureDeviceAuthorizationChecker.m",
    "content": "//\n//  SCCaptureDeviceAuthorizationChecker.m\n//  Snapchat\n//\n//  Created by Sun Lei on 15/03/2018.\n//\n\n#import \"SCCaptureDeviceAuthorizationChecker.h\"\n\n#import <SCFoundation/SCQueuePerformer.h>\n#import <SCFoundation/SCTraceODPCompatible.h>\n\n@import AVFoundation;\n\n@interface SCCaptureDeviceAuthorizationChecker () {\n    SCQueuePerformer *_performer;\n    BOOL _videoCaptureAuthorizationCachedValue;\n}\n@end\n\n@implementation SCCaptureDeviceAuthorizationChecker\n\n- (instancetype)initWithPerformer:(SCQueuePerformer *)performer\n{\n    SCTraceODPCompatibleStart(2);\n    self = [super init];\n    if (self) {\n        _performer = performer;\n        _videoCaptureAuthorizationCachedValue = NO;\n    }\n    return self;\n}\n\n- (void)preloadVideoCaptureAuthorization\n{\n    SCTraceODPCompatibleStart(2);\n    [_performer perform:^{\n        SCTraceODPCompatibleStart(2);\n        _videoCaptureAuthorizationCachedValue = [self authorizedForMediaType:AVMediaTypeVideo];\n    }];\n}\n\n- (BOOL)authorizedForVideoCapture\n{\n    SCTraceODPCompatibleStart(2);\n    // Cache authorizedForVideoCapture for low devices if it's YES\n    // [AVCaptureDevice authorizationStatusForMediaType:] is expensive on low devices like iPhone4\n    if (_videoCaptureAuthorizationCachedValue) {\n        // If the user authorizes and then unauthorizes, iOS would SIGKILL the app.\n        // When the user opens the app, a pop-up tells the user to allow camera access in settings.\n        // So 'return YES' makes sense here.\n        return YES;\n    } else {\n        @weakify(self);\n        [_performer performAndWait:^{\n            @strongify(self);\n            SC_GUARD_ELSE_RETURN(self);\n            if (!_videoCaptureAuthorizationCachedValue) {\n                _videoCaptureAuthorizationCachedValue = [self authorizedForMediaType:AVMediaTypeVideo];\n            }\n        }];\n        return _videoCaptureAuthorizationCachedValue;\n    }\n}\n\n- (BOOL)authorizedForMediaType:(NSString *)mediaType\n{\n    return [AVCaptureDevice authorizationStatusForMediaType:mediaType] == AVAuthorizationStatusAuthorized;\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCCaptureDeviceResolver.h",
    "content": "//\n//  SCCaptureDeviceResolver.h\n//  Snapchat\n//\n//  Created by Lin Jia on 11/8/17.\n//\n//\n\n#import <AVFoundation/AVFoundation.h>\n\n/*\n See https://jira.sc-corp.net/browse/CCAM-5843\n\n Retrieving AVCaptureDevice is a flaky operation. Thus create capture device resolver to make our code more robust.\n\n Resolver is used to retrieve AVCaptureDevice. We are going to do our best to find the camera for you.\n\n Resolver is only going to be used by SCManagedCaptureDevice.\n\n All APIs are thread safe.\n */\n\n@interface SCCaptureDeviceResolver : NSObject\n\n+ (instancetype)sharedInstance;\n\n- (AVCaptureDevice *)findAVCaptureDevice:(AVCaptureDevicePosition)position;\n\n- (AVCaptureDevice *)findDualCamera;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCCaptureDeviceResolver.m",
    "content": "//\n//  SCCaptureDeviceResolver.m\n//  Snapchat\n//\n//  Created by Lin Jia on 11/8/17.\n//\n//\n\n#import \"SCCaptureDeviceResolver.h\"\n\n#import \"SCCameraTweaks.h\"\n\n#import <SCBase/SCAvailability.h>\n#import <SCFoundation/SCAssertWrapper.h>\n\n@interface SCCaptureDeviceResolver () {\n    AVCaptureDeviceDiscoverySession *_discoverySession;\n}\n\n@end\n\n@implementation SCCaptureDeviceResolver\n\n+ (instancetype)sharedInstance\n{\n    static SCCaptureDeviceResolver *resolver;\n    static dispatch_once_t onceToken;\n    dispatch_once(&onceToken, ^{\n        resolver = [[SCCaptureDeviceResolver alloc] init];\n    });\n    return resolver;\n}\n\n- (instancetype)init\n{\n    self = [super init];\n    if (self) {\n        NSMutableArray *deviceTypes = [[NSMutableArray alloc] init];\n        [deviceTypes addObject:AVCaptureDeviceTypeBuiltInWideAngleCamera];\n        if (SC_AT_LEAST_IOS_10_2) {\n            [deviceTypes addObject:AVCaptureDeviceTypeBuiltInDualCamera];\n        }\n        // TODO: we should KVO _discoverySession.devices.\n        _discoverySession =\n            [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:deviceTypes\n                                                                   mediaType:AVMediaTypeVideo\n                                                                    position:AVCaptureDevicePositionUnspecified];\n    }\n    return self;\n}\n\n- (AVCaptureDevice *)findAVCaptureDevice:(AVCaptureDevicePosition)position\n{\n    SCAssert(position == AVCaptureDevicePositionFront || position == AVCaptureDevicePositionBack, @\"\");\n    AVCaptureDevice *captureDevice;\n    if (position == AVCaptureDevicePositionFront) {\n        captureDevice = [self _pickBestFrontCamera:[_discoverySession.devices copy]];\n    } else if (position == AVCaptureDevicePositionBack) {\n        captureDevice = [self _pickBestBackCamera:[_discoverySession.devices copy]];\n    }\n    if (captureDevice) {\n        return captureDevice;\n    }\n\n    if (SC_AT_LEAST_IOS_10_2 && SCCameraTweaksEnableDualCamera()) {\n        captureDevice = [AVCaptureDevice defaultDeviceWithDeviceType:AVCaptureDeviceTypeBuiltInDualCamera\n                                                           mediaType:AVMediaTypeVideo\n                                                            position:position];\n        if (captureDevice) {\n            return captureDevice;\n        }\n    }\n\n    // if code still execute, discoverSession failed, then we keep searching.\n    captureDevice = [AVCaptureDevice defaultDeviceWithDeviceType:AVCaptureDeviceTypeBuiltInWideAngleCamera\n                                                       mediaType:AVMediaTypeVideo\n                                                        position:position];\n    if (captureDevice) {\n        return captureDevice;\n    }\n\n#if !TARGET_IPHONE_SIMULATOR\n    // We do not return nil at the beginning of the function for simulator, because simulators of different IOS\n    // versions can check whether or not our camera device API access is correct.\n    SCAssertFail(@\"No camera is found.\");\n#endif\n    return nil;\n}\n\n- (AVCaptureDevice *)_pickBestFrontCamera:(NSArray<AVCaptureDevice *> *)devices\n{\n    for (AVCaptureDevice *device in devices) {\n        if (device.position == AVCaptureDevicePositionFront) {\n            return device;\n        }\n    }\n    return nil;\n}\n\n- (AVCaptureDevice *)_pickBestBackCamera:(NSArray<AVCaptureDevice *> *)devices\n{\n    // Look for dual camera first if needed. If dual camera not found, continue to look for wide angle camera.\n    if (SC_AT_LEAST_IOS_10_2 && SCCameraTweaksEnableDualCamera()) {\n        for (AVCaptureDevice *device in devices) {\n            if (device.position == AVCaptureDevicePositionBack &&\n                device.deviceType == AVCaptureDeviceTypeBuiltInDualCamera) {\n                return device;\n            }\n        }\n    }\n\n    for (AVCaptureDevice *device in devices) {\n        if (device.position == AVCaptureDevicePositionBack &&\n            device.deviceType == AVCaptureDeviceTypeBuiltInWideAngleCamera) {\n            return device;\n        }\n    }\n    return nil;\n}\n\n- (AVCaptureDevice *)findDualCamera\n{\n    if (SC_AT_LEAST_IOS_10_2) {\n        for (AVCaptureDevice *device in [_discoverySession.devices copy]) {\n            if (device.position == AVCaptureDevicePositionBack &&\n                device.deviceType == AVCaptureDeviceTypeBuiltInDualCamera) {\n                return device;\n            }\n        }\n    }\n\n    AVCaptureDevice *captureDevice = [AVCaptureDevice defaultDeviceWithDeviceType:AVCaptureDeviceTypeBuiltInDualCamera\n                                                                        mediaType:AVMediaTypeVideo\n                                                                         position:AVCaptureDevicePositionBack];\n    if (captureDevice) {\n        return captureDevice;\n    }\n\n#if !TARGET_IPHONE_SIMULATOR\n    // We do not return nil at the beginning of the function for simulator, because simulators of different IOS\n    // versions can check whether or not our camera device API access is correct.\n    SCAssertFail(@\"No camera is found.\");\n#endif\n    return nil;\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCCaptureFaceDetectionParser.h",
    "content": "//\n//  SCCaptureFaceDetectionParser.h\n//  Snapchat\n//\n//  Created by Jiyang Zhu on 3/13/18.\n//  Copyright © 2018 Snapchat, Inc. All rights reserved.\n//\n//  This class offers methods to parse face bounds from raw data, e.g., AVMetadataObject, CIFeature.\n\n#import <SCBase/SCMacros.h>\n\n#import <AVFoundation/AVFoundation.h>\n#import <CoreImage/CoreImage.h>\n\n@interface SCCaptureFaceDetectionParser : NSObject\n\nSC_INIT_AND_NEW_UNAVAILABLE;\n\n- (instancetype)initWithFaceBoundsAreaThreshold:(CGFloat)minimumArea;\n\n/**\n Parse face bounds from AVMetadataObject.\n\n @param metadataObjects An array of AVMetadataObject.\n @return A dictionary, value is faceBounds: CGRect, key is faceID: NSString.\n */\n- (NSDictionary<NSNumber *, NSValue *> *)parseFaceBoundsByFaceIDFromMetadataObjects:\n    (NSArray<__kindof AVMetadataObject *> *)metadataObjects;\n\n/**\n Parse face bounds from CIFeature.\n\n @param features An array of CIFeature.\n @param imageSize Size of the image, where the feature are detected from.\n @param imageOrientation Orientation of the image.\n @return A dictionary, value is faceBounds: CGRect, key is faceID: NSString.\n */\n- (NSDictionary<NSNumber *, NSValue *> *)parseFaceBoundsByFaceIDFromCIFeatures:(NSArray<__kindof CIFeature *> *)features\n                                                                 withImageSize:(CGSize)imageSize\n                                                              imageOrientation:\n                                                                  (CGImagePropertyOrientation)imageOrientation;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCCaptureFaceDetectionParser.m",
    "content": "//\n//  SCCaptureFaceDetectionParser.m\n//  Snapchat\n//\n//  Created by Jiyang Zhu on 3/13/18.\n//  Copyright © 2018 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCCaptureFaceDetectionParser.h\"\n\n#import <SCFoundation/NSArray+Helpers.h>\n#import <SCFoundation/SCLog.h>\n#import <SCFoundation/SCTraceODPCompatible.h>\n\n@implementation SCCaptureFaceDetectionParser {\n    CGFloat _minimumArea;\n}\n\n- (instancetype)initWithFaceBoundsAreaThreshold:(CGFloat)minimumArea\n{\n    self = [super init];\n    if (self) {\n        _minimumArea = minimumArea;\n    }\n    return self;\n}\n\n- (NSDictionary<NSNumber *, NSValue *> *)parseFaceBoundsByFaceIDFromMetadataObjects:\n    (NSArray<__kindof AVMetadataObject *> *)metadataObjects\n{\n    SCTraceODPCompatibleStart(2);\n    NSMutableArray *faceObjects = [NSMutableArray array];\n    [metadataObjects\n        enumerateObjectsUsingBlock:^(__kindof AVMetadataObject *_Nonnull obj, NSUInteger idx, BOOL *_Nonnull stop) {\n            if ([obj isKindOfClass:[AVMetadataFaceObject class]]) {\n                [faceObjects addObject:obj];\n            }\n        }];\n\n    SC_GUARD_ELSE_RETURN_VALUE(faceObjects.count > 0, nil);\n\n    NSMutableDictionary<NSNumber *, NSValue *> *faceBoundsByFaceID =\n        [NSMutableDictionary dictionaryWithCapacity:faceObjects.count];\n    for (AVMetadataFaceObject *faceObject in faceObjects) {\n        CGRect bounds = faceObject.bounds;\n        if (CGRectGetWidth(bounds) * CGRectGetHeight(bounds) >= _minimumArea) {\n            [faceBoundsByFaceID setObject:[NSValue valueWithCGRect:bounds] forKey:@(faceObject.faceID)];\n        }\n    }\n    return faceBoundsByFaceID;\n}\n\n- (NSDictionary<NSNumber *, NSValue *> *)parseFaceBoundsByFaceIDFromCIFeatures:(NSArray<__kindof CIFeature *> *)features\n                                                                 withImageSize:(CGSize)imageSize\n                                                              imageOrientation:\n                                                                  (CGImagePropertyOrientation)imageOrientation\n{\n    SCTraceODPCompatibleStart(2);\n    NSArray<CIFaceFeature *> *faceFeatures = [features filteredArrayUsingBlock:^BOOL(id _Nonnull evaluatedObject) {\n        return [evaluatedObject isKindOfClass:[CIFaceFeature class]];\n    }];\n\n    SC_GUARD_ELSE_RETURN_VALUE(faceFeatures.count > 0, nil);\n\n    NSMutableDictionary<NSNumber *, NSValue *> *faceBoundsByFaceID =\n        [NSMutableDictionary dictionaryWithCapacity:faceFeatures.count];\n    CGFloat width = imageSize.width;\n    CGFloat height = imageSize.height;\n    SCLogGeneralInfo(@\"Face feature count:%d\", faceFeatures.count);\n    for (CIFaceFeature *faceFeature in faceFeatures) {\n        SCLogGeneralInfo(@\"Face feature: hasTrackingID:%d, bounds:%@\", faceFeature.hasTrackingID,\n                         NSStringFromCGRect(faceFeature.bounds));\n        if (faceFeature.hasTrackingID) {\n            CGRect transferredBounds;\n            // Somehow the detected bounds for back camera is mirrored.\n            if (imageOrientation == kCGImagePropertyOrientationRight) {\n                transferredBounds = CGRectMake(\n                    CGRectGetMinX(faceFeature.bounds) / width, 1 - CGRectGetMaxY(faceFeature.bounds) / height,\n                    CGRectGetWidth(faceFeature.bounds) / width, CGRectGetHeight(faceFeature.bounds) / height);\n            } else {\n                transferredBounds = CGRectMake(\n                    CGRectGetMinX(faceFeature.bounds) / width, CGRectGetMinY(faceFeature.bounds) / height,\n                    CGRectGetWidth(faceFeature.bounds) / width, CGRectGetHeight(faceFeature.bounds) / height);\n            }\n            if (CGRectGetWidth(transferredBounds) * CGRectGetHeight(transferredBounds) >= _minimumArea) {\n                [faceBoundsByFaceID setObject:[NSValue valueWithCGRect:transferredBounds]\n                                       forKey:@(faceFeature.trackingID)];\n            }\n        }\n    }\n    return faceBoundsByFaceID;\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCCaptureFaceDetector.h",
    "content": "//\n//  SCCaptureFaceDetector.h\n//  Snapchat\n//\n//  Created by Jiyang Zhu on 3/27/18.\n//  Copyright © 2018 Snapchat, Inc. All rights reserved.\n//\n//  This protocol declares properties and methods that are used for face detectors.\n\n#import <Foundation/Foundation.h>\n\n@class SCCaptureResource;\n@class SCQueuePerformer;\n@class SCCaptureFaceDetectorTrigger;\n@class SCCaptureFaceDetectionParser;\n\n@protocol SCCaptureFaceDetector <NSObject>\n\n@property (nonatomic, strong, readonly) SCCaptureFaceDetectorTrigger *trigger;\n\n@property (nonatomic, strong, readonly) SCCaptureFaceDetectionParser *parser;\n\n- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource;\n\n- (SCQueuePerformer *)detectionPerformer;\n\n- (void)startDetection;\n\n- (void)stopDetection;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCCaptureFaceDetectorTrigger.h",
    "content": "//\n//  SCCaptureFaceDetectorTrigger.h\n//  Snapchat\n//\n//  Created by Jiyang Zhu on 3/22/18.\n//  Copyright © 2018 Snapchat, Inc. All rights reserved.\n//\n//  This class is used to control when should SCCaptureFaceDetector starts and stops.\n\n#import <SCBase/SCMacros.h>\n\n#import <Foundation/Foundation.h>\n\n@protocol SCCaptureFaceDetector;\n\n@interface SCCaptureFaceDetectorTrigger : NSObject\n\nSC_INIT_AND_NEW_UNAVAILABLE;\n\n- (instancetype)initWithDetector:(id<SCCaptureFaceDetector>)detector;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCCaptureFaceDetectorTrigger.m",
    "content": "//\n//  SCCaptureFaceDetectorTrigger.m\n//  Snapchat\n//\n//  Created by Jiyang Zhu on 3/22/18.\n//  Copyright © 2018 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCCaptureFaceDetectorTrigger.h\"\n\n#import \"SCCaptureFaceDetector.h\"\n\n#import <SCFoundation/SCAppLifecycle.h>\n#import <SCFoundation/SCIdleMonitor.h>\n#import <SCFoundation/SCQueuePerformer.h>\n#import <SCFoundation/SCTaskManager.h>\n#import <SCFoundation/SCTraceODPCompatible.h>\n\n@interface SCCaptureFaceDetectorTrigger () {\n    id<SCCaptureFaceDetector> __weak _detector;\n}\n@end\n\n@implementation SCCaptureFaceDetectorTrigger\n\n- (instancetype)initWithDetector:(id<SCCaptureFaceDetector>)detector\n{\n    self = [super init];\n    if (self) {\n        _detector = detector;\n        [[NSNotificationCenter defaultCenter] addObserver:self\n                                                 selector:@selector(_applicationDidBecomeActive)\n                                                     name:kSCPostponedUIApplicationDidBecomeActiveNotification\n                                                   object:nil];\n        [[NSNotificationCenter defaultCenter] addObserver:self\n                                                 selector:@selector(_applicationWillResignActive)\n                                                     name:UIApplicationWillResignActiveNotification\n                                                   object:nil];\n    }\n    return self;\n}\n\n#pragma mark - Internal Methods\n- (void)_applicationWillResignActive\n{\n    SCTraceODPCompatibleStart(2);\n    [self _stopDetection];\n}\n\n- (void)_applicationDidBecomeActive\n{\n    SCTraceODPCompatibleStart(2);\n    [self _waitUntilAppStartCompleteToStartDetection];\n}\n\n- (void)_waitUntilAppStartCompleteToStartDetection\n{\n    SCTraceODPCompatibleStart(2);\n    @weakify(self);\n\n    if (SCExperimentWithWaitUntilIdleReplacement()) {\n        [[SCTaskManager sharedManager] addTaskToRunWhenAppIdle:\"SCCaptureFaceDetectorTrigger.startDetection\"\n                                                     performer:[_detector detectionPerformer]\n                                                         block:^{\n                                                             @strongify(self);\n                                                             SC_GUARD_ELSE_RETURN(self);\n\n                                                             [self _startDetection];\n                                                         }];\n    } else {\n        [[SCIdleMonitor sharedInstance] waitUntilIdleForTag:\"SCCaptureFaceDetectorTrigger.startDetection\"\n                                              callbackQueue:[_detector detectionPerformer].queue\n                                                      block:^{\n                                                          @strongify(self);\n                                                          SC_GUARD_ELSE_RETURN(self);\n                                                          [self _startDetection];\n                                                      }];\n    }\n}\n\n- (void)_startDetection\n{\n    SCTraceODPCompatibleStart(2);\n    [[_detector detectionPerformer] performImmediatelyIfCurrentPerformer:^{\n        [_detector startDetection];\n    }];\n}\n\n- (void)_stopDetection\n{\n    SCTraceODPCompatibleStart(2);\n    [[_detector detectionPerformer] performImmediatelyIfCurrentPerformer:^{\n        [_detector stopDetection];\n    }];\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCCaptureMetadataObjectParser.h",
    "content": "//\n//  SCCaptureMetadataObjectParser.h\n//  Snapchat\n//\n//  Created by Jiyang Zhu on 3/13/18.\n//  Copyright © 2018 Snapchat, Inc. All rights reserved.\n//\n//  This class offers class methods to parse AVMetadataObject.\n\n#import <AVFoundation/AVFoundation.h>\n\n@interface SCCaptureMetadataObjectParser : NSObject\n\n/**\n Parse face bounds from AVMetadataObject.\n\n @param metadataObjects An array of AVMetadataObject.\n @return A dictionary, value is faceBounds: CGRect, key is faceID: NSString.\n */\n- (NSDictionary<NSNumber *, NSValue *> *)parseFaceBoundsByFaceIDFromMetadataObjects:\n    (NSArray<__kindof AVMetadataObject *> *)metadataObjects;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCCaptureMetadataObjectParser.m",
    "content": "//\n//  SCCaptureMetadataObjectParser.m\n//  Snapchat\n//\n//  Created by Jiyang Zhu on 3/13/18.\n//  Copyright © 2018 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCCaptureMetadataObjectParser.h\"\n\n#import <SCBase/SCMacros.h>\n\n@import UIKit;\n\n@implementation SCCaptureMetadataObjectParser\n\n- (NSDictionary<NSNumber *, NSValue *> *)parseFaceBoundsByFaceIDFromMetadataObjects:\n    (NSArray<__kindof AVMetadataObject *> *)metadataObjects\n{\n    NSMutableArray *faceObjects = [NSMutableArray array];\n    [metadataObjects\n        enumerateObjectsUsingBlock:^(__kindof AVMetadataObject *_Nonnull obj, NSUInteger idx, BOOL *_Nonnull stop) {\n            if ([obj isKindOfClass:[AVMetadataFaceObject class]]) {\n                [faceObjects addObject:obj];\n            }\n        }];\n\n    SC_GUARD_ELSE_RETURN_VALUE(faceObjects.count > 0, nil);\n\n    NSMutableDictionary<NSNumber *, NSValue *> *faceBoundsByFaceID =\n        [NSMutableDictionary dictionaryWithCapacity:faceObjects.count];\n    for (AVMetadataFaceObject *faceObject in faceObjects) {\n        [faceBoundsByFaceID setObject:[NSValue valueWithCGRect:faceObject.bounds] forKey:@(faceObject.faceID)];\n    }\n    return faceBoundsByFaceID;\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCCaptureMetadataOutputDetector.h",
    "content": "//\n//  SCCaptureMetadataOutputDetector.h\n//  Snapchat\n//\n//  Created by Jiyang Zhu on 12/21/17.\n//  Copyright © 2017 Snapchat, Inc. All rights reserved.\n//\n//  This class is intended to detect faces in Camera. It receives AVMetadataFaceObjects, and announce the bounds and\n//  faceIDs.\n\n#import \"SCCaptureFaceDetector.h\"\n\n#import <SCBase/SCMacros.h>\n\n@interface SCCaptureMetadataOutputDetector : NSObject <SCCaptureFaceDetector>\n\nSC_INIT_AND_NEW_UNAVAILABLE;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCCaptureMetadataOutputDetector.m",
    "content": "//\n//  SCCaptureMetadataOutputDetector.m\n//  Snapchat\n//\n//  Created by Jiyang Zhu on 12/21/17.\n//  Copyright © 2017 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCCaptureMetadataOutputDetector.h\"\n\n#import \"SCCameraTweaks.h\"\n#import \"SCCaptureFaceDetectionParser.h\"\n#import \"SCCaptureFaceDetectorTrigger.h\"\n#import \"SCCaptureResource.h\"\n#import \"SCManagedCaptureSession.h\"\n#import \"SCManagedCapturer.h\"\n\n#import <SCFoundation/SCAssertWrapper.h>\n#import <SCFoundation/SCLog.h>\n#import <SCFoundation/SCQueuePerformer.h>\n#import <SCFoundation/SCTrace.h>\n#import <SCFoundation/SCTraceODPCompatible.h>\n#import <SCFoundation/SCZeroDependencyExperiments.h>\n#import <SCFoundation/UIImage+CVPixelBufferRef.h>\n\n#define SCLogCaptureMetaDetectorInfo(fmt, ...)                                                                         \\\n    SCLogCoreCameraInfo(@\"[SCCaptureMetadataOutputDetector] \" fmt, ##__VA_ARGS__)\n#define SCLogCaptureMetaDetectorWarning(fmt, ...)                                                                      \\\n    SCLogCoreCameraWarning(@\"[SCCaptureMetadataOutputDetector] \" fmt, ##__VA_ARGS__)\n#define SCLogCaptureMetaDetectorError(fmt, ...)                                                                        \\\n    SCLogCoreCameraError(@\"[SCCaptureMetadataOutputDetector] \" fmt, ##__VA_ARGS__)\n\nstatic char *const kSCCaptureMetadataOutputDetectorProcessQueue =\n    \"com.snapchat.capture-metadata-output-detector-process\";\n\nstatic const NSInteger kDefaultNumberOfSequentialFramesWithFaces = -1; // -1 means no sequential frames with faces.\n\n@interface SCCaptureMetadataOutputDetector () <AVCaptureMetadataOutputObjectsDelegate>\n\n@end\n\n@implementation SCCaptureMetadataOutputDetector {\n    BOOL _isDetecting;\n\n    AVCaptureMetadataOutput *_metadataOutput;\n    SCCaptureResource *_captureResource;\n\n    SCCaptureFaceDetectionParser *_parser;\n    NSInteger _numberOfSequentialFramesWithFaces;\n    NSUInteger _detectionFrequency;\n\n    SCQueuePerformer *_callbackPerformer;\n    SCQueuePerformer *_metadataProcessPerformer;\n\n    SCCaptureFaceDetectorTrigger *_trigger;\n}\n\n@synthesize trigger = _trigger;\n@synthesize parser = _parser;\n\n- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource\n{\n    SCTraceODPCompatibleStart(2);\n    self = [super init];\n    if (self) {\n        SCAssert(captureResource, @\"SCCaptureResource should not be nil\");\n        SCAssert(captureResource.managedSession.avSession, @\"AVCaptureSession should not be nil\");\n        SCAssert(captureResource.queuePerformer, @\"SCQueuePerformer should not be nil\");\n        _metadataOutput = [AVCaptureMetadataOutput new];\n        _callbackPerformer = captureResource.queuePerformer;\n        _captureResource = captureResource;\n        _detectionFrequency = SCExperimentWithFaceDetectionFrequency();\n\n        _parser = [[SCCaptureFaceDetectionParser alloc]\n            initWithFaceBoundsAreaThreshold:pow(SCCameraFaceFocusMinFaceSize(), 2)];\n        _metadataProcessPerformer = [[SCQueuePerformer alloc] initWithLabel:kSCCaptureMetadataOutputDetectorProcessQueue\n                                                           qualityOfService:QOS_CLASS_DEFAULT\n                                                                  queueType:DISPATCH_QUEUE_SERIAL\n                                                                    context:SCQueuePerformerContextCamera];\n        if ([self _initDetection]) {\n            _trigger = [[SCCaptureFaceDetectorTrigger alloc] initWithDetector:self];\n        }\n    }\n    return self;\n}\n\n- (AVCaptureSession *)_captureSession\n{\n    // _captureResource.avSession may change, so we don't retain any specific AVCaptureSession.\n    return _captureResource.managedSession.avSession;\n}\n\n- (BOOL)_initDetection\n{\n    BOOL success = NO;\n    if ([[self _captureSession] canAddOutput:_metadataOutput]) {\n        [[self _captureSession] addOutput:_metadataOutput];\n        if ([_metadataOutput.availableMetadataObjectTypes containsObject:AVMetadataObjectTypeFace]) {\n            _numberOfSequentialFramesWithFaces = kDefaultNumberOfSequentialFramesWithFaces;\n            _metadataOutput.metadataObjectTypes = @[ AVMetadataObjectTypeFace ];\n            success = YES;\n            SCLogCaptureMetaDetectorInfo(@\"AVMetadataObjectTypeFace detection successfully enabled.\");\n        } else {\n            [[self _captureSession] removeOutput:_metadataOutput];\n            success = NO;\n            SCLogCaptureMetaDetectorError(@\"AVMetadataObjectTypeFace is not available for \"\n                                          @\"AVMetadataOutput[%@]\",\n                                          _metadataOutput);\n        }\n    } else {\n        success = NO;\n        SCLogCaptureMetaDetectorError(@\"AVCaptureSession[%@] cannot add AVMetadataOutput[%@] as an output\",\n                                      [self _captureSession], _metadataOutput);\n    }\n    return success;\n}\n\n- (void)startDetection\n{\n    SCAssert([[self detectionPerformer] isCurrentPerformer], @\"Calling -startDetection in an invalid queue.\");\n    SC_GUARD_ELSE_RETURN(!_isDetecting);\n    [_captureResource.queuePerformer performImmediatelyIfCurrentPerformer:^{\n        [_metadataOutput setMetadataObjectsDelegate:self queue:_metadataProcessPerformer.queue];\n        _isDetecting = YES;\n        SCLogCaptureMetaDetectorInfo(@\"AVMetadataObjectTypeFace detection successfully enabled.\");\n    }];\n}\n\n- (void)stopDetection\n{\n    SCAssert([[self detectionPerformer] isCurrentPerformer], @\"Calling -stopDetection in an invalid queue.\");\n    SC_GUARD_ELSE_RETURN(_isDetecting);\n    [_captureResource.queuePerformer performImmediatelyIfCurrentPerformer:^{\n        [_metadataOutput setMetadataObjectsDelegate:nil queue:NULL];\n        _isDetecting = NO;\n        SCLogCaptureMetaDetectorInfo(@\"AVMetadataObjectTypeFace detection successfully disabled.\");\n    }];\n}\n\n- (SCQueuePerformer *)detectionPerformer\n{\n    return _captureResource.queuePerformer;\n}\n\n#pragma mark - AVCaptureMetadataOutputObjectsDelegate\n- (void)captureOutput:(AVCaptureOutput *)output\n    didOutputMetadataObjects:(NSArray<__kindof AVMetadataObject *> *)metadataObjects\n              fromConnection:(AVCaptureConnection *)connection\n{\n    SCTraceODPCompatibleStart(2);\n\n    BOOL shouldNotify = NO;\n    if (metadataObjects.count == 0 &&\n        _numberOfSequentialFramesWithFaces !=\n            kDefaultNumberOfSequentialFramesWithFaces) { // There were faces detected before, but there is no face right\n                                                         // now, so send out the notification.\n        _numberOfSequentialFramesWithFaces = kDefaultNumberOfSequentialFramesWithFaces;\n        shouldNotify = YES;\n    } else if (metadataObjects.count > 0) {\n        _numberOfSequentialFramesWithFaces++;\n        shouldNotify = (_numberOfSequentialFramesWithFaces % _detectionFrequency == 0);\n    }\n\n    SC_GUARD_ELSE_RETURN(shouldNotify);\n\n    NSDictionary<NSNumber *, NSValue *> *faceBoundsByFaceID =\n        [_parser parseFaceBoundsByFaceIDFromMetadataObjects:metadataObjects];\n\n    [_callbackPerformer perform:^{\n        [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]\n                                didDetectFaceBounds:faceBoundsByFaceID];\n    }];\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCCapturer.h",
    "content": "//\n//  SCManagedCapturer.h\n//  Snapchat\n//\n//  Created by Liu Liu on 4/20/15.\n//  Copyright (c) 2015 Liu Liu. All rights reserved.\n//\n\n#import \"SCCaptureCommon.h\"\n#import \"SCSnapCreationTriggers.h\"\n\n#import <SCAudio/SCAudioConfiguration.h>\n\n#import <AVFoundation/AVFoundation.h>\n#import <Foundation/Foundation.h>\n\n#define SCCapturerContext [NSString sc_stringWithFormat:@\"%s/%d\", __FUNCTION__, __LINE__]\n\n@class SCBlackCameraDetector;\n@protocol SCManagedCapturerListener\n, SCManagedCapturerLensAPI, SCDeviceMotionProvider, SCFileInputDecider, SCManagedCapturerARImageCaptureProvider,\n    SCManagedCapturerGLViewManagerAPI, SCManagedCapturerLensAPIProvider, SCManagedCapturerLSAComponentTrackerAPI,\n    SCManagedCapturePreviewLayerControllerDelegate;\n\n@protocol SCCapturer <NSObject>\n\n@property (nonatomic, readonly) SCBlackCameraDetector *blackCameraDetector;\n\n/**\n * Returns id<SCLensProcessingCore> for the current capturer.\n */\n- (id<SCManagedCapturerLensAPI>)lensProcessingCore;\n\n- (CMTime)firstWrittenAudioBufferDelay;\n- (BOOL)audioQueueStarted;\n- (BOOL)isLensApplied;\n- (BOOL)isVideoMirrored;\n\n- (SCVideoCaptureSessionInfo)activeSession;\n\n#pragma mark - Outside resources\n\n- (void)setBlackCameraDetector:(SCBlackCameraDetector *)blackCameraDetector\n                             deviceMotionProvider:(id<SCDeviceMotionProvider>)deviceMotionProvider\n                                 fileInputDecider:(id<SCFileInputDecider>)fileInputDecider\n                           arImageCaptureProvider:(id<SCManagedCapturerARImageCaptureProvider>)arImageCaptureProvider\n                                    glviewManager:(id<SCManagedCapturerGLViewManagerAPI>)glViewManager\n                                  lensAPIProvider:(id<SCManagedCapturerLensAPIProvider>)lensAPIProvider\n                              lsaComponentTracker:(id<SCManagedCapturerLSAComponentTrackerAPI>)lsaComponentTracker\n    managedCapturerPreviewLayerControllerDelegate:\n        (id<SCManagedCapturePreviewLayerControllerDelegate>)previewLayerControllerDelegate;\n\n#pragma mark - Setup, Start & Stop\n\n// setupWithDevicePositionAsynchronously will be called on the main thread, executed off the main thread, exactly once\n- (void)setupWithDevicePositionAsynchronously:(SCManagedCaptureDevicePosition)devicePosition\n                            completionHandler:(dispatch_block_t)completionHandler\n                                      context:(NSString *)context;\n\n/**\n *  Important: Remember to call stopRunningAsynchronously to stop the capture session. Dismissing the view is not enough\n *  @param identifier is for knowing the callsite. Pass in the classname of the callsite is generally suggested.\n *  Currently it is used for debugging purposes. In other words the capture session will work without it.\n */\n- (SCCapturerToken *)startRunningAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler\n                                                             context:(NSString *)context;\n- (void)stopRunningAsynchronously:(SCCapturerToken *)token\n                completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler\n                          context:(NSString *)context;\n\n- (void)stopRunningAsynchronously:(SCCapturerToken *)token\n                completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler\n                            after:(NSTimeInterval)delay\n                          context:(NSString *)context;\n\n- (void)startStreamingAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler\n                                                  context:(NSString *)context;\n\n- (void)addSampleBufferDisplayController:(id<SCManagedSampleBufferDisplayController>)sampleBufferDisplayController\n                                 context:(NSString *)context;\n\n#pragma mark - Recording / Capture\n\n- (void)captureStillImageAsynchronouslyWithAspectRatio:(CGFloat)aspectRatio\n                                      captureSessionID:(NSString *)captureSessionID\n                                     completionHandler:\n                                         (sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler\n                                               context:(NSString *)context;\n/**\n * Unlike captureStillImageAsynchronouslyWithAspectRatio, this captures a single frame from the ongoing video\n * stream. This should be faster but lower quality (and smaller size), and does not play the shutter sound.\n */\n- (void)captureSingleVideoFrameAsynchronouslyWithCompletionHandler:\n            (sc_managed_capturer_capture_video_frame_completion_handler_t)completionHandler\n                                                           context:(NSString *)context;\n\n- (void)prepareForRecordingAsynchronouslyWithContext:(NSString *)context\n                                  audioConfiguration:(SCAudioConfiguration *)configuration;\n- (void)startRecordingAsynchronouslyWithOutputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings\n                                    audioConfiguration:(SCAudioConfiguration *)configuration\n                                           maxDuration:(NSTimeInterval)maxDuration\n                                               fileURL:(NSURL *)fileURL\n                                      captureSessionID:(NSString *)captureSessionID\n                                     completionHandler:\n                                         (sc_managed_capturer_start_recording_completion_handler_t)completionHandler\n                                               context:(NSString *)context;\n- (void)stopRecordingAsynchronouslyWithContext:(NSString *)context;\n- (void)cancelRecordingAsynchronouslyWithContext:(NSString *)context;\n\n- (void)startScanAsynchronouslyWithScanConfiguration:(SCScanConfiguration *)configuration context:(NSString *)context;\n- (void)stopScanAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler context:(NSString *)context;\n- (void)sampleFrameWithCompletionHandler:(void (^)(UIImage *frame, CMTime presentationTime))completionHandler\n                                 context:(NSString *)context;\n\n// AddTimedTask will schedule a task to run, it is thread safe API. Your task will run on main thread, so it is not\n// recommended to add large amount of tasks which all have the same task target time.\n- (void)addTimedTask:(SCTimedTask *)task context:(NSString *)context;\n\n// clearTimedTasks will cancel the tasks, it is thread safe API.\n- (void)clearTimedTasksWithContext:(NSString *)context;\n\n#pragma mark - Utilities\n\n- (void)convertViewCoordinates:(CGPoint)viewCoordinates\n             completionHandler:(sc_managed_capturer_convert_view_coordniates_completion_handler_t)completionHandler\n                       context:(NSString *)context;\n\n- (void)detectLensCategoryOnNextFrame:(CGPoint)point\n                               lenses:(NSArray<SCLens *> *)lenses\n                           completion:(sc_managed_lenses_processor_category_point_completion_handler_t)completion\n                              context:(NSString *)context;\n\n#pragma mark - Configurations\n\n- (void)setDevicePositionAsynchronously:(SCManagedCaptureDevicePosition)devicePosition\n                      completionHandler:(dispatch_block_t)completionHandler\n                                context:(NSString *)context;\n\n- (void)setFlashActive:(BOOL)flashActive\n     completionHandler:(dispatch_block_t)completionHandler\n               context:(NSString *)context;\n\n- (void)setLensesActive:(BOOL)lensesActive\n      completionHandler:(dispatch_block_t)completionHandler\n                context:(NSString *)context;\n\n- (void)setLensesActive:(BOOL)lensesActive\n          filterFactory:(SCLookseryFilterFactory *)filterFactory\n      completionHandler:(dispatch_block_t)completionHandler\n                context:(NSString *)context;\n\n- (void)setLensesInTalkActive:(BOOL)lensesActive\n            completionHandler:(dispatch_block_t)completionHandler\n                      context:(NSString *)context;\n\n- (void)setTorchActiveAsynchronously:(BOOL)torchActive\n                   completionHandler:(dispatch_block_t)completionHandler\n                             context:(NSString *)context;\n\n- (void)setNightModeActiveAsynchronously:(BOOL)active\n                       completionHandler:(dispatch_block_t)completionHandler\n                                 context:(NSString *)context;\n\n- (void)lockZoomWithContext:(NSString *)context;\n\n- (void)unlockZoomWithContext:(NSString *)context;\n\n- (void)setZoomFactorAsynchronously:(CGFloat)zoomFactor context:(NSString *)context;\n- (void)resetZoomFactorAsynchronously:(CGFloat)zoomFactor\n                       devicePosition:(SCManagedCaptureDevicePosition)devicePosition\n                              context:(NSString *)context;\n\n- (void)setExposurePointOfInterestAsynchronously:(CGPoint)pointOfInterest\n                                        fromUser:(BOOL)fromUser\n                               completionHandler:(dispatch_block_t)completionHandler\n                                         context:(NSString *)context;\n\n- (void)setAutofocusPointOfInterestAsynchronously:(CGPoint)pointOfInterest\n                                completionHandler:(dispatch_block_t)completionHandler\n                                          context:(NSString *)context;\n\n- (void)setPortraitModePointOfInterestAsynchronously:(CGPoint)pointOfInterest\n                                   completionHandler:(dispatch_block_t)completionHandler\n                                             context:(NSString *)context;\n\n- (void)continuousAutofocusAndExposureAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler\n                                                                  context:(NSString *)context;\n\n// I need to call these three methods from SCAppDelegate explicitly so that I get the latest information.\n- (void)applicationDidEnterBackground;\n- (void)applicationWillEnterForeground;\n- (void)applicationDidBecomeActive;\n- (void)applicationWillResignActive;\n- (void)mediaServicesWereReset;\n- (void)mediaServicesWereLost;\n\n#pragma mark - Add / Remove Listener\n\n- (void)addListener:(id<SCManagedCapturerListener>)listener;\n- (void)removeListener:(id<SCManagedCapturerListener>)listener;\n- (void)addVideoDataSourceListener:(id<SCManagedVideoDataSourceListener>)listener;\n- (void)removeVideoDataSourceListener:(id<SCManagedVideoDataSourceListener>)listener;\n- (void)addDeviceCapacityAnalyzerListener:(id<SCManagedDeviceCapacityAnalyzerListener>)listener;\n- (void)removeDeviceCapacityAnalyzerListener:(id<SCManagedDeviceCapacityAnalyzerListener>)listener;\n\n- (NSString *)debugInfo;\n\n- (id<SCManagedVideoDataSource>)currentVideoDataSource;\n\n- (void)checkRestrictedCamera:(void (^)(BOOL, BOOL, AVAuthorizationStatus))callback;\n\n// Need to be visible so that classes like SCCaptureSessionFixer can manage capture session\n- (void)recreateAVCaptureSession;\n\n#pragma mark - Snap Creation triggers\n\n- (SCSnapCreationTriggers *)snapCreationTriggers;\n\n@optional\n\n- (BOOL)authorizedForVideoCapture;\n\n- (void)preloadVideoCaptureAuthorization;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCCapturerBufferedVideoWriter.h",
    "content": "//\n//  SCCapturerBufferedVideoWriter.h\n//  Snapchat\n//\n//  Created by Chao Pang on 12/5/17.\n//\n\n#import <SCFoundation/SCQueuePerformer.h>\n\n#import <SCManagedVideoCapturerOutputSettings.h>\n\n#import <AVFoundation/AVFoundation.h>\n#import <Foundation/Foundation.h>\n\n@protocol SCCapturerBufferedVideoWriterDelegate <NSObject>\n\n- (void)videoWriterDidFailWritingWithError:(NSError *)error;\n\n@end\n\n@interface SCCapturerBufferedVideoWriter : NSObject\n\n- (instancetype)init NS_UNAVAILABLE;\n\n- (instancetype)initWithPerformer:(id<SCPerforming>)performer\n                        outputURL:(NSURL *)outputURL\n                         delegate:(id<SCCapturerBufferedVideoWriterDelegate>)delegate\n                            error:(NSError **)error;\n\n- (BOOL)prepareWritingWithOutputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings;\n\n- (void)startWritingAtSourceTime:(CMTime)sourceTime;\n\n- (void)finishWritingAtSourceTime:(CMTime)sourceTime withCompletionHanlder:(dispatch_block_t)completionBlock;\n\n- (void)cancelWriting;\n\n- (void)appendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer;\n\n- (void)appendAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer;\n\n- (void)cleanUp;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCCapturerBufferedVideoWriter.m",
    "content": "//\n//  SCCapturerBufferedVideoWriter.m\n//  Snapchat\n//\n//  Created by Chao Pang on 12/5/17.\n//\n\n#import \"SCCapturerBufferedVideoWriter.h\"\n\n#import \"SCAudioCaptureSession.h\"\n#import \"SCCaptureCommon.h\"\n#import \"SCManagedCapturerUtils.h\"\n\n#import <SCBase/SCMacros.h>\n#import <SCFoundation/SCAssertWrapper.h>\n#import <SCFoundation/SCDeviceName.h>\n#import <SCFoundation/SCLog.h>\n#import <SCFoundation/SCTrace.h>\n\n#import <FBKVOController/FBKVOController.h>\n\n@implementation SCCapturerBufferedVideoWriter {\n    SCQueuePerformer *_performer;\n    __weak id<SCCapturerBufferedVideoWriterDelegate> _delegate;\n    FBKVOController *_observeController;\n\n    AVAssetWriter *_assetWriter;\n    AVAssetWriterInput *_audioWriterInput;\n    AVAssetWriterInput *_videoWriterInput;\n    AVAssetWriterInputPixelBufferAdaptor *_pixelBufferAdaptor;\n    CVPixelBufferPoolRef _defaultPixelBufferPool;\n    CVPixelBufferPoolRef _nightPixelBufferPool;\n    CVPixelBufferPoolRef _lensesPixelBufferPool;\n    CMBufferQueueRef _videoBufferQueue;\n    CMBufferQueueRef _audioBufferQueue;\n}\n\n- (instancetype)initWithPerformer:(id<SCPerforming>)performer\n                        outputURL:(NSURL *)outputURL\n                         delegate:(id<SCCapturerBufferedVideoWriterDelegate>)delegate\n                            error:(NSError **)error\n{\n    self = [super init];\n    if (self) {\n        _performer = performer;\n        _delegate = delegate;\n        _observeController = [[FBKVOController alloc] initWithObserver:self];\n        CMBufferQueueCreate(kCFAllocatorDefault, 0, CMBufferQueueGetCallbacksForUnsortedSampleBuffers(),\n                            &_videoBufferQueue);\n        CMBufferQueueCreate(kCFAllocatorDefault, 0, CMBufferQueueGetCallbacksForUnsortedSampleBuffers(),\n                            &_audioBufferQueue);\n        _assetWriter = [[AVAssetWriter alloc] initWithURL:outputURL fileType:AVFileTypeMPEG4 error:error];\n        if (*error) {\n            self = nil;\n            return self;\n        }\n    }\n    return self;\n}\n\n- (BOOL)prepareWritingWithOutputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings\n{\n    SCTraceStart();\n    SCAssert([_performer isCurrentPerformer], @\"\");\n    SCAssert(outputSettings, @\"empty output setting\");\n    // Audio\n    SCTraceSignal(@\"Derive audio output setting\");\n    NSDictionary *audioOutputSettings = @{\n        AVFormatIDKey : @(kAudioFormatMPEG4AAC),\n        AVNumberOfChannelsKey : @(1),\n        AVSampleRateKey : @(kSCAudioCaptureSessionDefaultSampleRate),\n        AVEncoderBitRateKey : @(outputSettings.audioBitRate)\n    };\n    _audioWriterInput =\n        [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeAudio outputSettings:audioOutputSettings];\n    _audioWriterInput.expectsMediaDataInRealTime = YES;\n\n    // Video\n    SCTraceSignal(@\"Derive video output setting\");\n    size_t outputWidth = outputSettings.width;\n    size_t outputHeight = outputSettings.height;\n    SCAssert(outputWidth > 0 && outputHeight > 0 && (outputWidth % 2 == 0) && (outputHeight % 2 == 0),\n             @\"invalid output size\");\n    NSDictionary *videoCompressionSettings = @{\n        AVVideoAverageBitRateKey : @(outputSettings.videoBitRate),\n        AVVideoMaxKeyFrameIntervalKey : @(outputSettings.keyFrameInterval)\n    };\n    NSDictionary *videoOutputSettings = @{\n        AVVideoCodecKey : AVVideoCodecH264,\n        AVVideoWidthKey : @(outputWidth),\n        AVVideoHeightKey : @(outputHeight),\n        AVVideoScalingModeKey : AVVideoScalingModeResizeAspectFill,\n        AVVideoCompressionPropertiesKey : videoCompressionSettings\n    };\n    _videoWriterInput =\n        [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo outputSettings:videoOutputSettings];\n    _videoWriterInput.expectsMediaDataInRealTime = YES;\n    CGAffineTransform transform = CGAffineTransformMakeTranslation(outputHeight, 0);\n    _videoWriterInput.transform = CGAffineTransformRotate(transform, M_PI_2);\n    _pixelBufferAdaptor = [[AVAssetWriterInputPixelBufferAdaptor alloc]\n           initWithAssetWriterInput:_videoWriterInput\n        sourcePixelBufferAttributes:@{\n            (NSString *)\n            kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange), (NSString *)\n            kCVPixelBufferWidthKey : @(outputWidth), (NSString *)\n            kCVPixelBufferHeightKey : @(outputHeight)\n        }];\n\n    SCTraceSignal(@\"Setup video writer input\");\n    if ([_assetWriter canAddInput:_videoWriterInput]) {\n        [_assetWriter addInput:_videoWriterInput];\n    } else {\n        return NO;\n    }\n\n    SCTraceSignal(@\"Setup audio writer input\");\n    if ([_assetWriter canAddInput:_audioWriterInput]) {\n        [_assetWriter addInput:_audioWriterInput];\n    } else {\n        return NO;\n    }\n\n    return YES;\n}\n\n- (void)appendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer\n{\n    SCAssert([_performer isCurrentPerformer], @\"\");\n    SC_GUARD_ELSE_RETURN(sampleBuffer);\n    if (!CMBufferQueueIsEmpty(_videoBufferQueue)) {\n        // We need to drain the buffer queue in this case\n        while (_videoWriterInput.readyForMoreMediaData) { // TODO: also need to break out in case of errors\n            CMSampleBufferRef dequeuedSampleBuffer =\n                (CMSampleBufferRef)CMBufferQueueDequeueAndRetain(_videoBufferQueue);\n            if (dequeuedSampleBuffer == NULL) {\n                break;\n            }\n            [self _appendVideoSampleBuffer:dequeuedSampleBuffer];\n            CFRelease(dequeuedSampleBuffer);\n        }\n    }\n    // Fast path, just append this sample buffer if ready\n    if (_videoWriterInput.readyForMoreMediaData) {\n        [self _appendVideoSampleBuffer:sampleBuffer];\n    } else {\n        // It is not ready, queuing the sample buffer\n        CMBufferQueueEnqueue(_videoBufferQueue, sampleBuffer);\n    }\n}\n\n- (void)appendAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer\n{\n    SCAssert([_performer isCurrentPerformer], @\"\");\n    SC_GUARD_ELSE_RETURN(sampleBuffer);\n    if (!CMBufferQueueIsEmpty(_audioBufferQueue)) {\n        // We need to drain the buffer queue in this case\n        while (_audioWriterInput.readyForMoreMediaData) {\n            CMSampleBufferRef dequeuedSampleBuffer =\n                (CMSampleBufferRef)CMBufferQueueDequeueAndRetain(_audioBufferQueue);\n            if (dequeuedSampleBuffer == NULL) {\n                break;\n            }\n            [_audioWriterInput appendSampleBuffer:sampleBuffer];\n            CFRelease(dequeuedSampleBuffer);\n        }\n    }\n    // fast path, just append this sample buffer if ready\n    if ((_audioWriterInput.readyForMoreMediaData)) {\n        [_audioWriterInput appendSampleBuffer:sampleBuffer];\n    } else {\n        // it is not ready, queuing the sample buffer\n        CMBufferQueueEnqueue(_audioBufferQueue, sampleBuffer);\n    }\n}\n\n- (void)startWritingAtSourceTime:(CMTime)sourceTime\n{\n    SCTraceStart();\n    SCAssert([_performer isCurrentPerformer], @\"\");\n    // To observe the status change on assetWriter because when assetWriter errors out, it only changes the\n    // status, no further delegate callbacks etc.\n    [_observeController observe:_assetWriter\n                        keyPath:@keypath(_assetWriter, status)\n                        options:NSKeyValueObservingOptionNew\n                         action:@selector(assetWriterStatusChanged:)];\n    [_assetWriter startWriting];\n    [_assetWriter startSessionAtSourceTime:sourceTime];\n}\n\n- (void)cancelWriting\n{\n    SCTraceStart();\n    SCAssert([_performer isCurrentPerformer], @\"\");\n    CMBufferQueueReset(_videoBufferQueue);\n    CMBufferQueueReset(_audioBufferQueue);\n    [_assetWriter cancelWriting];\n}\n\n- (void)finishWritingAtSourceTime:(CMTime)sourceTime withCompletionHanlder:(dispatch_block_t)completionBlock\n{\n    SCTraceStart();\n    SCAssert([_performer isCurrentPerformer], @\"\");\n\n    while (_audioWriterInput.readyForMoreMediaData && !CMBufferQueueIsEmpty(_audioBufferQueue)) {\n        CMSampleBufferRef audioSampleBuffer = (CMSampleBufferRef)CMBufferQueueDequeueAndRetain(_audioBufferQueue);\n        if (audioSampleBuffer == NULL) {\n            break;\n        }\n        [_audioWriterInput appendSampleBuffer:audioSampleBuffer];\n        CFRelease(audioSampleBuffer);\n    }\n    while (_videoWriterInput.readyForMoreMediaData && !CMBufferQueueIsEmpty(_videoBufferQueue)) {\n        CMSampleBufferRef videoSampleBuffer = (CMSampleBufferRef)CMBufferQueueDequeueAndRetain(_videoBufferQueue);\n        if (videoSampleBuffer == NULL) {\n            break;\n        }\n        [_videoWriterInput appendSampleBuffer:videoSampleBuffer];\n        CFRelease(videoSampleBuffer);\n    }\n\n    dispatch_block_t finishWritingBlock = ^() {\n        [_assetWriter endSessionAtSourceTime:sourceTime];\n        [_audioWriterInput markAsFinished];\n        [_videoWriterInput markAsFinished];\n        [_assetWriter finishWritingWithCompletionHandler:^{\n            if (completionBlock) {\n                completionBlock();\n            }\n        }];\n    };\n\n    if (CMBufferQueueIsEmpty(_audioBufferQueue) && CMBufferQueueIsEmpty(_videoBufferQueue)) {\n        finishWritingBlock();\n    } else {\n        // We need to drain the samples from the queues before finish writing\n        __block BOOL isAudioDone = NO;\n        __block BOOL isVideoDone = NO;\n        // Audio\n        [_audioWriterInput\n            requestMediaDataWhenReadyOnQueue:_performer.queue\n                                  usingBlock:^{\n                                      if (!CMBufferQueueIsEmpty(_audioBufferQueue) &&\n                                          _assetWriter.status == AVAssetWriterStatusWriting) {\n                                          CMSampleBufferRef audioSampleBuffer =\n                                              (CMSampleBufferRef)CMBufferQueueDequeueAndRetain(_audioBufferQueue);\n                                          if (audioSampleBuffer) {\n                                              [_audioWriterInput appendSampleBuffer:audioSampleBuffer];\n                                              CFRelease(audioSampleBuffer);\n                                          }\n                                      } else if (!isAudioDone) {\n                                          isAudioDone = YES;\n                                      }\n                                      if (isAudioDone && isVideoDone) {\n                                          finishWritingBlock();\n                                      }\n                                  }];\n\n        // Video\n        [_videoWriterInput\n            requestMediaDataWhenReadyOnQueue:_performer.queue\n                                  usingBlock:^{\n                                      if (!CMBufferQueueIsEmpty(_videoBufferQueue) &&\n                                          _assetWriter.status == AVAssetWriterStatusWriting) {\n                                          CMSampleBufferRef videoSampleBuffer =\n                                              (CMSampleBufferRef)CMBufferQueueDequeueAndRetain(_videoBufferQueue);\n                                          if (videoSampleBuffer) {\n                                              [_videoWriterInput appendSampleBuffer:videoSampleBuffer];\n                                              CFRelease(videoSampleBuffer);\n                                          }\n                                      } else if (!isVideoDone) {\n                                          isVideoDone = YES;\n                                      }\n                                      if (isAudioDone && isVideoDone) {\n                                          finishWritingBlock();\n                                      }\n                                  }];\n    }\n}\n\n- (void)cleanUp\n{\n    _assetWriter = nil;\n    _videoWriterInput = nil;\n    _audioWriterInput = nil;\n    _pixelBufferAdaptor = nil;\n}\n\n- (void)dealloc\n{\n    CFRelease(_videoBufferQueue);\n    CFRelease(_audioBufferQueue);\n    CVPixelBufferPoolRelease(_defaultPixelBufferPool);\n    CVPixelBufferPoolRelease(_nightPixelBufferPool);\n    CVPixelBufferPoolRelease(_lensesPixelBufferPool);\n    [_observeController unobserveAll];\n}\n\n- (void)assetWriterStatusChanged:(NSDictionary *)change\n{\n    SCTraceStart();\n    if (_assetWriter.status == AVAssetWriterStatusFailed) {\n        SCTraceSignal(@\"Asset writer status failed %@, error %@\", change, _assetWriter.error);\n        [_delegate videoWriterDidFailWritingWithError:[_assetWriter.error copy]];\n    }\n}\n\n#pragma - Private methods\n\n- (CVImageBufferRef)_croppedPixelBufferWithInputPixelBuffer:(CVImageBufferRef)inputPixelBuffer\n{\n    SCAssertTrue([SCDeviceName isIphoneX]);\n    const size_t inputBufferWidth = CVPixelBufferGetWidth(inputPixelBuffer);\n    const size_t inputBufferHeight = CVPixelBufferGetHeight(inputPixelBuffer);\n    const size_t croppedBufferWidth = (size_t)(inputBufferWidth * kSCIPhoneXCapturedImageVideoCropRatio) / 2 * 2;\n    const size_t croppedBufferHeight =\n        (size_t)(croppedBufferWidth * SCManagedCapturedImageAndVideoAspectRatio()) / 2 * 2;\n    const size_t offsetPointX = inputBufferWidth - croppedBufferWidth;\n    const size_t offsetPointY = (inputBufferHeight - croppedBufferHeight) / 4 * 2;\n\n    SC_GUARD_ELSE_RUN_AND_RETURN_VALUE((inputBufferWidth >= croppedBufferWidth) &&\n                                           (inputBufferHeight >= croppedBufferHeight) && (offsetPointX % 2 == 0) &&\n                                           (offsetPointY % 2 == 0) &&\n                                           (inputBufferWidth >= croppedBufferWidth + offsetPointX) &&\n                                           (inputBufferHeight >= croppedBufferHeight + offsetPointY),\n                                       SCLogGeneralError(@\"Invalid cropping configuration\"), NULL);\n\n    CVPixelBufferRef croppedPixelBuffer = NULL;\n    CVPixelBufferPoolRef pixelBufferPool =\n        [self _pixelBufferPoolWithInputSize:CGSizeMake(inputBufferWidth, inputBufferHeight)\n                                croppedSize:CGSizeMake(croppedBufferWidth, croppedBufferHeight)];\n\n    if (pixelBufferPool) {\n        CVReturn result = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pixelBufferPool, &croppedPixelBuffer);\n        if ((result != kCVReturnSuccess) || (croppedPixelBuffer == NULL)) {\n            SCLogGeneralError(@\"[SCCapturerVideoWriterInput] Error creating croppedPixelBuffer\");\n            return NULL;\n        }\n    } else {\n        SCAssertFail(@\"[SCCapturerVideoWriterInput] PixelBufferPool is NULL with inputBufferWidth:%@, \"\n                     @\"inputBufferHeight:%@, croppedBufferWidth:%@, croppedBufferHeight:%@\",\n                     @(inputBufferWidth), @(inputBufferHeight), @(croppedBufferWidth), @(croppedBufferHeight));\n        return NULL;\n    }\n    CVPixelBufferLockBaseAddress(inputPixelBuffer, kCVPixelBufferLock_ReadOnly);\n    CVPixelBufferLockBaseAddress(croppedPixelBuffer, 0);\n\n    const size_t planesCount = CVPixelBufferGetPlaneCount(inputPixelBuffer);\n    for (int planeIndex = 0; planeIndex < planesCount; planeIndex++) {\n        size_t inPlaneHeight = CVPixelBufferGetHeightOfPlane(inputPixelBuffer, planeIndex);\n        size_t inPlaneBytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(inputPixelBuffer, planeIndex);\n        uint8_t *inPlaneAdress = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(inputPixelBuffer, planeIndex);\n\n        size_t croppedPlaneHeight = CVPixelBufferGetHeightOfPlane(croppedPixelBuffer, planeIndex);\n        size_t croppedPlaneBytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(croppedPixelBuffer, planeIndex);\n        uint8_t *croppedPlaneAdress = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(croppedPixelBuffer, planeIndex);\n\n        // Note that inPlaneBytesPerRow is not strictly 2x of inPlaneWidth for some devices (e.g. iPhone X).\n        // However, since UV are packed together in memory, we can use offsetPointX for all planes\n        size_t offsetPlaneBytesX = offsetPointX;\n        size_t offsetPlaneBytesY = offsetPointY * inPlaneHeight / inputBufferHeight;\n\n        inPlaneAdress = inPlaneAdress + offsetPlaneBytesY * inPlaneBytesPerRow + offsetPlaneBytesX;\n        size_t bytesToCopyPerRow = MIN(inPlaneBytesPerRow - offsetPlaneBytesX, croppedPlaneBytesPerRow);\n        for (int i = 0; i < croppedPlaneHeight; i++) {\n            memcpy(croppedPlaneAdress, inPlaneAdress, bytesToCopyPerRow);\n            inPlaneAdress += inPlaneBytesPerRow;\n            croppedPlaneAdress += croppedPlaneBytesPerRow;\n        }\n    }\n    CVPixelBufferUnlockBaseAddress(inputPixelBuffer, kCVPixelBufferLock_ReadOnly);\n    CVPixelBufferUnlockBaseAddress(croppedPixelBuffer, 0);\n    return croppedPixelBuffer;\n}\n\n- (CVPixelBufferPoolRef)_pixelBufferPoolWithInputSize:(CGSize)inputSize croppedSize:(CGSize)croppedSize\n{\n    if (CGSizeEqualToSize(inputSize, [SCManagedCaptureDevice defaultActiveFormatResolution])) {\n        if (_defaultPixelBufferPool == NULL) {\n            _defaultPixelBufferPool = [self _newPixelBufferPoolWithWidth:croppedSize.width height:croppedSize.height];\n        }\n        return _defaultPixelBufferPool;\n    } else if (CGSizeEqualToSize(inputSize, [SCManagedCaptureDevice nightModeActiveFormatResolution])) {\n        if (_nightPixelBufferPool == NULL) {\n            _nightPixelBufferPool = [self _newPixelBufferPoolWithWidth:croppedSize.width height:croppedSize.height];\n        }\n        return _nightPixelBufferPool;\n    } else {\n        if (_lensesPixelBufferPool == NULL) {\n            _lensesPixelBufferPool = [self _newPixelBufferPoolWithWidth:croppedSize.width height:croppedSize.height];\n        }\n        return _lensesPixelBufferPool;\n    }\n}\n\n- (CVPixelBufferPoolRef)_newPixelBufferPoolWithWidth:(size_t)width height:(size_t)height\n{\n    NSDictionary *attributes = @{\n        (NSString *) kCVPixelBufferIOSurfacePropertiesKey : @{}, (NSString *)\n        kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange), (NSString *)\n        kCVPixelBufferWidthKey : @(width), (NSString *)\n        kCVPixelBufferHeightKey : @(height)\n    };\n    CVPixelBufferPoolRef pixelBufferPool = NULL;\n    CVReturn result = CVPixelBufferPoolCreate(kCFAllocatorDefault, NULL,\n                                              (__bridge CFDictionaryRef _Nullable)(attributes), &pixelBufferPool);\n    if (result != kCVReturnSuccess) {\n        SCLogGeneralError(@\"[SCCapturerBufferredVideoWriter] Error creating pixel buffer pool %i\", result);\n        return NULL;\n    }\n\n    return pixelBufferPool;\n}\n\n- (void)_appendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer\n{\n    SCAssert([_performer isCurrentPerformer], @\"\");\n    CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);\n    CVImageBufferRef inputPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);\n    if ([SCDeviceName isIphoneX]) {\n        CVImageBufferRef croppedPixelBuffer = [self _croppedPixelBufferWithInputPixelBuffer:inputPixelBuffer];\n        if (croppedPixelBuffer) {\n            [_pixelBufferAdaptor appendPixelBuffer:croppedPixelBuffer withPresentationTime:presentationTime];\n            CVPixelBufferRelease(croppedPixelBuffer);\n        }\n    } else {\n        [_pixelBufferAdaptor appendPixelBuffer:inputPixelBuffer withPresentationTime:presentationTime];\n    }\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCCapturerDefines.h",
    "content": "//\n//  SCCapturerDefines.h\n//  Snapchat\n//\n//  Created by Chao Pang on 12/20/17.\n//\n\n#import <Foundation/Foundation.h>\n\ntypedef NS_ENUM(NSInteger, SCCapturerLightingConditionType) {\n    SCCapturerLightingConditionTypeNormal = 0,\n    SCCapturerLightingConditionTypeDark,\n    SCCapturerLightingConditionTypeExtremeDark,\n};\n\ntypedef struct SampleBufferMetadata {\n    int isoSpeedRating;\n    float exposureTime;\n    float brightness;\n} SampleBufferMetadata;\n"
  },
  {
    "path": "ManagedCapturer/SCCapturerToken.h",
    "content": "//\n//  SCCapturerToken.h\n//  Snapchat\n//\n//  Created by Xishuo Liu on 3/24/17.\n//  Copyright © 2017 Snapchat, Inc. All rights reserved.\n//\n\n#import <Foundation/Foundation.h>\n\n@interface SCCapturerToken : NSObject\n\n- (instancetype)initWithIdentifier:(NSString *)identifier NS_DESIGNATED_INITIALIZER;\n\n- (instancetype)init __attribute__((unavailable(\"Use initWithIdentifier: instead.\")));\n- (instancetype) new __attribute__((unavailable(\"Use initWithIdentifier: instead.\")));\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCCapturerToken.m",
    "content": "//\n//  SCCapturerToken.m\n//  Snapchat\n//\n//  Created by Xishuo Liu on 3/24/17.\n//  Copyright © 2017 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCCapturerToken.h\"\n\n#import <SCFoundation/NSString+SCFormat.h>\n\n@implementation SCCapturerToken {\n    NSString *_identifier;\n}\n\n- (instancetype)initWithIdentifier:(NSString *)identifier\n{\n    if (self = [super init]) {\n        _identifier = identifier.copy;\n    }\n    return self;\n}\n\n- (NSString *)debugDescription\n{\n    return [NSString sc_stringWithFormat:@\"%@_%@\", _identifier, self];\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCCapturerTokenProvider.h",
    "content": "//\n// Created by Aaron Levine on 10/16/17.\n//\n\n#import <SCBase/SCMacros.h>\n\n#import <Foundation/Foundation.h>\n\n@class SCCapturerToken;\n\nNS_ASSUME_NONNULL_BEGIN\n@interface SCCapturerTokenProvider : NSObject\n\nSC_INIT_AND_NEW_UNAVAILABLE\n+ (instancetype)providerWithToken:(SCCapturerToken *)token;\n\n- (nullable SCCapturerToken *)getTokenAndInvalidate;\n\n@end\nNS_ASSUME_NONNULL_END\n"
  },
  {
    "path": "ManagedCapturer/SCCapturerTokenProvider.m",
    "content": "//\n// Created by Aaron Levine on 10/16/17.\n//\n\n#import \"SCCapturerTokenProvider.h\"\n\n#import \"SCCapturerToken.h\"\n\n#import <SCBase/SCAssignment.h>\n#import <SCFoundation/SCAssertWrapper.h>\n\n@implementation SCCapturerTokenProvider {\n    SCCapturerToken *_Nullable _token;\n}\n\n+ (instancetype)providerWithToken:(SCCapturerToken *)token\n{\n    return [[self alloc] initWithToken:token];\n}\n\n- (instancetype)initWithToken:(SCCapturerToken *)token\n{\n    self = [super init];\n    if (self) {\n        _token = token;\n    }\n\n    return self;\n}\n\n- (nullable SCCapturerToken *)getTokenAndInvalidate\n{\n    // ensure serial access by requiring calls be on the main thread\n    SCAssertMainThread();\n\n    let token = _token;\n    _token = nil;\n\n    return token;\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCExposureState.h",
    "content": "//\n//  SCExposureState.h\n//  Snapchat\n//\n//  Created by Derek Peirce on 4/10/17.\n//  Copyright © 2017 Snapchat, Inc. All rights reserved.\n//\n\n#import <AVFoundation/AVFoundation.h>\n#import <Foundation/Foundation.h>\n\n@interface SCExposureState : NSObject\n\n- (instancetype)initWithDevice:(AVCaptureDevice *)device;\n\n- (void)applyISOAndExposureDurationToDevice:(AVCaptureDevice *)device;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCExposureState.m",
    "content": "//\n//  SCExposureState.m\n//  Snapchat\n//\n//  Created by Derek Peirce on 4/10/17.\n//  Copyright © 2017 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCExposureState.h\"\n\n#import \"AVCaptureDevice+ConfigurationLock.h\"\n\n#import <SCBase/SCMacros.h>\n\n@import AVFoundation;\n\n@implementation SCExposureState {\n    float _ISO;\n    CMTime _exposureDuration;\n}\n\n- (instancetype)initWithDevice:(AVCaptureDevice *)device\n{\n    if (self = [super init]) {\n        _ISO = device.ISO;\n        _exposureDuration = device.exposureDuration;\n    }\n    return self;\n}\n\n- (void)applyISOAndExposureDurationToDevice:(AVCaptureDevice *)device\n{\n    if ([device isExposureModeSupported:AVCaptureExposureModeCustom]) {\n        [device runTask:@\"set prior exposure\"\n            withLockedConfiguration:^() {\n                CMTime exposureDuration =\n                    CMTimeClampToRange(_exposureDuration, CMTimeRangeMake(device.activeFormat.minExposureDuration,\n                                                                          device.activeFormat.maxExposureDuration));\n                [device setExposureModeCustomWithDuration:exposureDuration\n                                                      ISO:SC_CLAMP(_ISO, device.activeFormat.minISO,\n                                                                   device.activeFormat.maxISO)\n                                        completionHandler:nil];\n            }];\n    }\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCFileAudioCaptureSession.h",
    "content": "//\n//  SCFileAudioCaptureSession.h\n//  Snapchat\n//\n//  Created by Xiaomu Wu on 2/2/17.\n//  Copyright © 2017 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCAudioCaptureSession.h\"\n\n#import <Foundation/Foundation.h>\n\n@interface SCFileAudioCaptureSession : NSObject <SCAudioCaptureSession>\n\n// Linear PCM is required.\n// To best mimic `SCAudioCaptureSession`, use an audio file recorded from it.\n- (void)setFileURL:(NSURL *)fileURL;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCFileAudioCaptureSession.m",
    "content": "//\n//  SCFileAudioCaptureSession.m\n//  Snapchat\n//\n//  Created by Xiaomu Wu on 2/2/17.\n//  Copyright © 2017 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCFileAudioCaptureSession.h\"\n\n#import <SCFoundation/SCAssertWrapper.h>\n#import <SCFoundation/SCLog.h>\n#import <SCFoundation/SCQueuePerformer.h>\n#import <SCFoundation/SCSentinel.h>\n\n@import AudioToolbox;\n\nstatic float const kAudioBufferDurationInSeconds = 0.2; // same as SCAudioCaptureSession\n\nstatic char *const kSCFileAudioCaptureSessionQueueLabel = \"com.snapchat.file-audio-capture-session\";\n\n@implementation SCFileAudioCaptureSession {\n    SCQueuePerformer *_performer;\n    SCSentinel *_sentinel;\n\n    NSURL *_fileURL;\n\n    AudioFileID _audioFile;                         // audio file\n    AudioStreamBasicDescription _asbd;              // audio format (core audio)\n    CMAudioFormatDescriptionRef _formatDescription; // audio format (core media)\n    SInt64 _readCurPacket;                          // current packet index to read\n    UInt32 _readNumPackets;                         // number of packets to read every time\n    UInt32 _readNumBytes;                           // number of bytes to read every time\n    void *_readBuffer;                              // data buffer to hold read packets\n}\n\n@synthesize delegate = _delegate;\n\n#pragma mark - Public\n\n- (instancetype)init\n{\n    self = [super init];\n    if (self) {\n        _performer = [[SCQueuePerformer alloc] initWithLabel:kSCFileAudioCaptureSessionQueueLabel\n                                            qualityOfService:QOS_CLASS_UNSPECIFIED\n                                                   queueType:DISPATCH_QUEUE_SERIAL\n                                                     context:SCQueuePerformerContextCamera];\n        _sentinel = [[SCSentinel alloc] init];\n    }\n    return self;\n}\n\n- (void)dealloc\n{\n    if (_audioFile) {\n        AudioFileClose(_audioFile);\n    }\n    if (_formatDescription) {\n        CFRelease(_formatDescription);\n    }\n    if (_readBuffer) {\n        free(_readBuffer);\n    }\n}\n\n- (void)setFileURL:(NSURL *)fileURL\n{\n    [_performer perform:^{\n        _fileURL = fileURL;\n    }];\n}\n\n#pragma mark - SCAudioCaptureSession\n\n- (void)beginAudioRecordingAsynchronouslyWithSampleRate:(double)sampleRate // `sampleRate` ignored\n                                      completionHandler:(audio_capture_session_block)completionHandler\n{\n    [_performer perform:^{\n        BOOL succeeded = [self _setup];\n        int32_t sentinelValue = [_sentinel value];\n        if (completionHandler) {\n            completionHandler(nil);\n        }\n        if (succeeded) {\n            [_performer perform:^{\n                SC_GUARD_ELSE_RETURN([_sentinel value] == sentinelValue);\n                [self _read];\n            }\n                          after:kAudioBufferDurationInSeconds];\n        }\n    }];\n}\n\n- (void)disposeAudioRecordingSynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler\n{\n    [_performer performAndWait:^{\n        [self _teardown];\n        if (completionHandler) {\n            completionHandler();\n        }\n    }];\n}\n\n#pragma mark - Private\n\n- (BOOL)_setup\n{\n    SCAssert([_performer isCurrentPerformer], @\"\");\n\n    [_sentinel increment];\n\n    OSStatus status = noErr;\n\n    status = AudioFileOpenURL((__bridge CFURLRef)_fileURL, kAudioFileReadPermission, 0, &_audioFile);\n    if (noErr != status) {\n        SCLogGeneralError(@\"Cannot open file at URL %@, error code %d\", _fileURL, (int)status);\n        return NO;\n    }\n\n    _asbd = (AudioStreamBasicDescription){0};\n    UInt32 asbdSize = sizeof(_asbd);\n    status = AudioFileGetProperty(_audioFile, kAudioFilePropertyDataFormat, &asbdSize, &_asbd);\n    if (noErr != status) {\n        SCLogGeneralError(@\"Cannot get audio data format, error code %d\", (int)status);\n        AudioFileClose(_audioFile);\n        _audioFile = NULL;\n        return NO;\n    }\n\n    if (kAudioFormatLinearPCM != _asbd.mFormatID) {\n        SCLogGeneralError(@\"Linear PCM is required\");\n        AudioFileClose(_audioFile);\n        _audioFile = NULL;\n        _asbd = (AudioStreamBasicDescription){0};\n        return NO;\n    }\n\n    UInt32 aclSize = 0;\n    AudioChannelLayout *acl = NULL;\n    status = AudioFileGetPropertyInfo(_audioFile, kAudioFilePropertyChannelLayout, &aclSize, NULL);\n    if (noErr == status) {\n        acl = malloc(aclSize);\n        status = AudioFileGetProperty(_audioFile, kAudioFilePropertyChannelLayout, &aclSize, acl);\n        if (noErr != status) {\n            aclSize = 0;\n            free(acl);\n            acl = NULL;\n        }\n    }\n\n    status = CMAudioFormatDescriptionCreate(NULL, &_asbd, aclSize, acl, 0, NULL, NULL, &_formatDescription);\n    if (acl) {\n        free(acl);\n        acl = NULL;\n    }\n    if (noErr != status) {\n        SCLogGeneralError(@\"Cannot create format description, error code %d\", (int)status);\n        AudioFileClose(_audioFile);\n        _audioFile = NULL;\n        _asbd = (AudioStreamBasicDescription){0};\n        return NO;\n    }\n\n    _readCurPacket = 0;\n    _readNumPackets = ceil(_asbd.mSampleRate * kAudioBufferDurationInSeconds);\n    _readNumBytes = _asbd.mBytesPerPacket * _readNumPackets;\n    _readBuffer = malloc(_readNumBytes);\n\n    return YES;\n}\n\n- (void)_read\n{\n    SCAssert([_performer isCurrentPerformer], @\"\");\n\n    OSStatus status = noErr;\n\n    UInt32 numBytes = _readNumBytes;\n    UInt32 numPackets = _readNumPackets;\n    status = AudioFileReadPacketData(_audioFile, NO, &numBytes, NULL, _readCurPacket, &numPackets, _readBuffer);\n    if (noErr != status) {\n        SCLogGeneralError(@\"Cannot read audio data, error code %d\", (int)status);\n        return;\n    }\n    if (0 == numPackets) {\n        return;\n    }\n    CMTime PTS = CMTimeMakeWithSeconds(_readCurPacket / _asbd.mSampleRate, 600);\n\n    _readCurPacket += numPackets;\n\n    CMBlockBufferRef dataBuffer = NULL;\n    status = CMBlockBufferCreateWithMemoryBlock(NULL, NULL, numBytes, NULL, NULL, 0, numBytes, 0, &dataBuffer);\n    if (kCMBlockBufferNoErr == status) {\n        if (dataBuffer) {\n            CMBlockBufferReplaceDataBytes(_readBuffer, dataBuffer, 0, numBytes);\n            CMSampleBufferRef sampleBuffer = NULL;\n            CMAudioSampleBufferCreateWithPacketDescriptions(NULL, dataBuffer, true, NULL, NULL, _formatDescription,\n                                                            numPackets, PTS, NULL, &sampleBuffer);\n            if (sampleBuffer) {\n                [_delegate audioCaptureSession:self didOutputSampleBuffer:sampleBuffer];\n                CFRelease(sampleBuffer);\n            }\n            CFRelease(dataBuffer);\n        }\n    } else {\n        SCLogGeneralError(@\"Cannot create data buffer, error code %d\", (int)status);\n    }\n\n    int32_t sentinelValue = [_sentinel value];\n    [_performer perform:^{\n        SC_GUARD_ELSE_RETURN([_sentinel value] == sentinelValue);\n        [self _read];\n    }\n                  after:kAudioBufferDurationInSeconds];\n}\n\n- (void)_teardown\n{\n    SCAssert([_performer isCurrentPerformer], @\"\");\n\n    [_sentinel increment];\n\n    if (_audioFile) {\n        AudioFileClose(_audioFile);\n        _audioFile = NULL;\n    }\n    _asbd = (AudioStreamBasicDescription){0};\n    if (_formatDescription) {\n        CFRelease(_formatDescription);\n        _formatDescription = NULL;\n    }\n    _readCurPacket = 0;\n    _readNumPackets = 0;\n    _readNumBytes = 0;\n    if (_readBuffer) {\n        free(_readBuffer);\n        _readBuffer = NULL;\n    }\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedAudioStreamer.h",
    "content": "//\n//  SCManagedAudioStreamer.h\n//  Snapchat\n//\n//  Created by Ricardo Sánchez-Sáez on 7/28/16.\n//  Copyright © 2016 Snapchat, Inc. All rights reserved.\n//\n\n#import <SCCameraFoundation/SCManagedAudioDataSource.h>\n\n#import <Foundation/Foundation.h>\n\n@interface SCManagedAudioStreamer : NSObject <SCManagedAudioDataSource>\n\n+ (instancetype)sharedInstance;\n\n+ (instancetype) new NS_UNAVAILABLE;\n- (instancetype)init NS_UNAVAILABLE;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedAudioStreamer.m",
    "content": "//\n//  SCManagedAudioStreamer.m\n//  Snapchat\n//\n//  Created by Ricardo Sánchez-Sáez on 7/28/16.\n//  Copyright © 2016 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCManagedAudioStreamer.h\"\n\n#import \"SCAudioCaptureSession.h\"\n\n#import <SCAudio/SCAudioSession.h>\n#import <SCCameraFoundation/SCManagedAudioDataSourceListenerAnnouncer.h>\n#import <SCFoundation/SCQueuePerformer.h>\n#import <SCFoundation/SCTrace.h>\n\n#import <SCAudioScope/SCAudioScope.h>\n#import <SCAudioScope/SCAudioSessionExperimentAdapter.h>\n\nstatic char *const kSCManagedAudioStreamerQueueLabel = \"com.snapchat.audioStreamerQueue\";\n\n@interface SCManagedAudioStreamer () <SCAudioCaptureSessionDelegate>\n\n@end\n\n@implementation SCManagedAudioStreamer {\n    SCAudioCaptureSession *_captureSession;\n    SCAudioConfigurationToken *_audioConfiguration;\n    SCManagedAudioDataSourceListenerAnnouncer *_announcer;\n    SCScopedAccess<SCMutableAudioSession *> *_scopedMutableAudioSession;\n}\n\n@synthesize performer = _performer;\n\n+ (instancetype)sharedInstance\n{\n    static dispatch_once_t onceToken;\n    static SCManagedAudioStreamer *managedAudioStreamer;\n    dispatch_once(&onceToken, ^{\n        managedAudioStreamer = [[SCManagedAudioStreamer alloc] initSharedInstance];\n    });\n    return managedAudioStreamer;\n}\n\n- (instancetype)initSharedInstance\n{\n    SCTraceStart();\n    self = [super init];\n    if (self) {\n        _performer = [[SCQueuePerformer alloc] initWithLabel:kSCManagedAudioStreamerQueueLabel\n                                            qualityOfService:QOS_CLASS_USER_INTERACTIVE\n                                                   queueType:DISPATCH_QUEUE_SERIAL\n                                                     context:SCQueuePerformerContextCamera];\n        _announcer = [[SCManagedAudioDataSourceListenerAnnouncer alloc] init];\n        _captureSession = [[SCAudioCaptureSession alloc] init];\n        _captureSession.delegate = self;\n    }\n    return self;\n}\n\n- (BOOL)isStreaming\n{\n    return _audioConfiguration != nil;\n}\n\n- (void)startStreamingWithAudioConfiguration:(SCAudioConfiguration *)configuration\n{\n    SCTraceStart();\n    [_performer perform:^{\n        if (!self.isStreaming) {\n            // Begin audio recording asynchronously. First we need to have the proper audio session category.\n            _audioConfiguration = [SCAudioSessionExperimentAdapter\n                configureWith:configuration\n                    performer:_performer\n                   completion:^(NSError *error) {\n                       [_captureSession\n                           beginAudioRecordingAsynchronouslyWithSampleRate:kSCAudioCaptureSessionDefaultSampleRate\n                                                         completionHandler:NULL];\n\n                   }];\n        }\n    }];\n}\n\n- (void)stopStreaming\n{\n    [_performer perform:^{\n        if (self.isStreaming) {\n            [_captureSession disposeAudioRecordingSynchronouslyWithCompletionHandler:NULL];\n            [SCAudioSessionExperimentAdapter relinquishConfiguration:_audioConfiguration performer:nil completion:nil];\n            _audioConfiguration = nil;\n        }\n    }];\n}\n\n- (void)addListener:(id<SCManagedAudioDataSourceListener>)listener\n{\n    SCTraceStart();\n    [_announcer addListener:listener];\n}\n\n- (void)removeListener:(id<SCManagedAudioDataSourceListener>)listener\n{\n    SCTraceStart();\n    [_announcer removeListener:listener];\n}\n\n- (void)audioCaptureSession:(SCAudioCaptureSession *)audioCaptureSession\n      didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer\n{\n    [_announcer managedAudioDataSource:self didOutputSampleBuffer:sampleBuffer];\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCaptureDevice+SCManagedCapturer.h",
    "content": "//\n//  SCManagedCaptureDevice+SCManagedCapturer.h\n//  Snapchat\n//\n//  Created by Liu Liu on 5/9/15.\n//  Copyright (c) 2015 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCManagedCaptureDevice.h\"\n\n#import <AVFoundation/AVFoundation.h>\n\n@interface SCManagedCaptureDevice (SCManagedCapturer)\n\n@property (nonatomic, strong, readonly) AVCaptureDevice *device;\n\n@property (nonatomic, strong, readonly) AVCaptureDeviceInput *deviceInput;\n\n@property (nonatomic, copy, readonly) NSError *error;\n\n@property (nonatomic, assign, readonly) BOOL isConnected;\n\n@property (nonatomic, strong, readonly) AVCaptureDeviceFormat *activeFormat;\n\n// Setup and hook up with device\n\n- (BOOL)setDeviceAsInput:(AVCaptureSession *)session;\n\n- (void)removeDeviceAsInput:(AVCaptureSession *)session;\n\n- (void)resetDeviceAsInput;\n\n// Configurations\n\n@property (nonatomic, assign) BOOL flashActive;\n\n@property (nonatomic, assign) BOOL torchActive;\n\n@property (nonatomic, assign) float zoomFactor;\n\n@property (nonatomic, assign, readonly) BOOL liveVideoStreamingActive;\n\n@property (nonatomic, assign, readonly) BOOL isNightModeActive;\n\n@property (nonatomic, assign, readonly) BOOL isFlashSupported;\n\n@property (nonatomic, assign, readonly) BOOL isTorchSupported;\n\n- (void)setNightModeActive:(BOOL)nightModeActive session:(AVCaptureSession *)session;\n\n- (void)setLiveVideoStreaming:(BOOL)liveVideoStreaming session:(AVCaptureSession *)session;\n\n- (void)setCaptureDepthData:(BOOL)captureDepthData session:(AVCaptureSession *)session;\n\n- (void)setExposurePointOfInterest:(CGPoint)pointOfInterest fromUser:(BOOL)fromUser;\n\n- (void)setAutofocusPointOfInterest:(CGPoint)pointOfInterest;\n\n- (void)continuousAutofocus;\n\n- (void)setRecording:(BOOL)recording;\n\n- (void)updateActiveFormatWithSession:(AVCaptureSession *)session;\n\n// Utilities\n\n- (CGPoint)convertViewCoordinates:(CGPoint)viewCoordinates\n                         viewSize:(CGSize)viewSize\n                     videoGravity:(NSString *)videoGravity;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCaptureDevice+SCManagedDeviceCapacityAnalyzer.h",
    "content": "//\n//  SCManagedCaptureDevice+SCManagedDeviceCapacityAnalyzer.h\n//  Snapchat\n//\n//  Created by Kam Sheffield on 10/29/15.\n//  Copyright © 2015 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCManagedCaptureDevice.h\"\n\n#import <AVFoundation/AVFoundation.h>\n\n@interface SCManagedCaptureDevice (SCManagedDeviceCapacityAnalyzer)\n\n@property (nonatomic, strong, readonly) AVCaptureDevice *device;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCaptureDevice.h",
    "content": "//\n//  SCManagedCaptureDevice.h\n//  Snapchat\n//\n//  Created by Liu Liu on 4/22/15.\n//  Copyright (c) 2015 Liu Liu. All rights reserved.\n//\n\n#import <SCCameraFoundation/SCManagedCaptureDevicePosition.h>\n#import <SCCameraFoundation/SCManagedCaptureDeviceProtocol.h>\n\n#import <AVFoundation/AVFoundation.h>\n#import <Foundation/Foundation.h>\n\nextern CGFloat const kSCMaxVideoZoomFactor;\nextern CGFloat const kSCMinVideoZoomFactor;\n\n@class SCManagedCaptureDevice;\n\n@protocol SCManagedCaptureDeviceDelegate <NSObject>\n\n@optional\n- (void)managedCaptureDevice:(SCManagedCaptureDevice *)device didChangeAdjustingExposure:(BOOL)adjustingExposure;\n- (void)managedCaptureDevice:(SCManagedCaptureDevice *)device didChangeExposurePoint:(CGPoint)exposurePoint;\n- (void)managedCaptureDevice:(SCManagedCaptureDevice *)device didChangeFocusPoint:(CGPoint)focusPoint;\n\n@end\n\n@interface SCManagedCaptureDevice : NSObject <SCManagedCaptureDeviceProtocol>\n\n@property (nonatomic, weak) id<SCManagedCaptureDeviceDelegate> delegate;\n\n// These two class methods are thread safe\n+ (instancetype)front;\n\n+ (instancetype)back;\n\n+ (instancetype)dualCamera;\n\n+ (instancetype)deviceWithPosition:(SCManagedCaptureDevicePosition)position;\n\n+ (BOOL)is1080pSupported;\n\n+ (BOOL)isMixCaptureSupported;\n\n+ (BOOL)isNightModeSupported;\n\n+ (BOOL)isEnhancedNightModeSupported;\n\n+ (CGSize)defaultActiveFormatResolution;\n\n+ (CGSize)nightModeActiveFormatResolution;\n\n- (BOOL)softwareZoom;\n\n- (SCManagedCaptureDevicePosition)position;\n\n- (BOOL)isAvailable;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCaptureDevice.m",
    "content": "//\n//  SCManagedCaptureDevice.m\n//  Snapchat\n//\n//  Created by Liu Liu on 4/22/15.\n//  Copyright (c) 2015 Liu Liu. All rights reserved.\n//\n\n#import \"SCManagedCaptureDevice.h\"\n\n#import \"AVCaptureDevice+ConfigurationLock.h\"\n#import \"SCCameraTweaks.h\"\n#import \"SCCaptureCommon.h\"\n#import \"SCCaptureDeviceResolver.h\"\n#import \"SCManagedCaptureDevice+SCManagedCapturer.h\"\n#import \"SCManagedCaptureDeviceAutoExposureHandler.h\"\n#import \"SCManagedCaptureDeviceAutoFocusHandler.h\"\n#import \"SCManagedCaptureDeviceExposureHandler.h\"\n#import \"SCManagedCaptureDeviceFaceDetectionAutoExposureHandler.h\"\n#import \"SCManagedCaptureDeviceFaceDetectionAutoFocusHandler.h\"\n#import \"SCManagedCaptureDeviceFocusHandler.h\"\n#import \"SCManagedCapturer.h\"\n#import \"SCManagedDeviceCapacityAnalyzer.h\"\n\n#import <SCFoundation/SCDeviceName.h>\n#import <SCFoundation/SCLog.h>\n#import <SCFoundation/SCTrace.h>\n\n#import <FBKVOController/FBKVOController.h>\n\nstatic int32_t const kSCManagedCaptureDeviceMaximumHighFrameRate = 30;\nstatic int32_t const kSCManagedCaptureDeviceMaximumLowFrameRate = 24;\n\nstatic float const kSCManagedCaptureDevicecSoftwareMaxZoomFactor = 8;\n\nCGFloat const kSCMaxVideoZoomFactor = 100; // the max videoZoomFactor acceptable\nCGFloat const kSCMinVideoZoomFactor = 1;\n\nstatic NSDictionary *SCBestHRSIFormatsForHeights(NSArray *desiredHeights, NSArray *formats, BOOL shouldSupportDepth)\n{\n    NSMutableDictionary *bestHRSIHeights = [NSMutableDictionary dictionary];\n    for (NSNumber *height in desiredHeights) {\n        bestHRSIHeights[height] = @0;\n    }\n    NSMutableDictionary *bestHRSIFormats = [NSMutableDictionary dictionary];\n    for (AVCaptureDeviceFormat *format in formats) {\n        if (@available(ios 11.0, *)) {\n            if (shouldSupportDepth && format.supportedDepthDataFormats.count == 0) {\n                continue;\n            }\n        }\n        if (CMFormatDescriptionGetMediaSubType(format.formatDescription) !=\n            kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) {\n            continue;\n        }\n        CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription);\n        NSNumber *height = @(dimensions.height);\n        NSNumber *bestHRSI = bestHRSIHeights[height];\n        if (bestHRSI) {\n            CMVideoDimensions hrsi = format.highResolutionStillImageDimensions;\n            // If we enabled HSRI, we only intersted in the ones that is good.\n            if (hrsi.height > [bestHRSI intValue]) {\n                bestHRSIHeights[height] = @(hrsi.height);\n                bestHRSIFormats[height] = format;\n            }\n        }\n    }\n    return [bestHRSIFormats copy];\n}\n\nstatic inline float SCDegreesToRadians(float theta)\n{\n    return theta * (float)M_PI / 180.f;\n}\n\nstatic inline float SCRadiansToDegrees(float theta)\n{\n    return theta * 180.f / (float)M_PI;\n}\n\n@implementation SCManagedCaptureDevice {\n    AVCaptureDevice *_device;\n    AVCaptureDeviceInput *_deviceInput;\n    AVCaptureDeviceFormat *_defaultFormat;\n    AVCaptureDeviceFormat *_nightFormat;\n    AVCaptureDeviceFormat *_liveVideoStreamingFormat;\n    SCManagedCaptureDevicePosition _devicePosition;\n\n    // Configurations on the device, shortcut to avoid re-configurations\n    id<SCManagedCaptureDeviceExposureHandler> _exposureHandler;\n    id<SCManagedCaptureDeviceFocusHandler> _focusHandler;\n\n    FBKVOController *_observeController;\n\n    // For the private category methods\n    NSError *_error;\n    BOOL _softwareZoom;\n    BOOL _isConnected;\n    BOOL _flashActive;\n    BOOL _torchActive;\n    BOOL _liveVideoStreamingActive;\n    float _zoomFactor;\n    BOOL _isNightModeActive;\n    BOOL _captureDepthData;\n}\n@synthesize fieldOfView = _fieldOfView;\n\n+ (instancetype)front\n{\n    SCTraceStart();\n    static dispatch_once_t onceToken;\n    static SCManagedCaptureDevice *front;\n    static dispatch_semaphore_t semaphore;\n    dispatch_once(&onceToken, ^{\n        semaphore = dispatch_semaphore_create(1);\n    });\n    /* You can use the tweak below to intentionally kill camera in debug.\n    if (SCIsDebugBuild() && SCCameraTweaksKillFrontCamera()) {\n        return nil;\n    }\n     */\n    dispatch_semaphore_wait(semaphore, DISPATCH_TIME_FOREVER);\n    if (!front) {\n        AVCaptureDevice *device =\n            [[SCCaptureDeviceResolver sharedInstance] findAVCaptureDevice:AVCaptureDevicePositionFront];\n        if (device) {\n            front = [[SCManagedCaptureDevice alloc] initWithDevice:device\n                                                    devicePosition:SCManagedCaptureDevicePositionFront];\n        }\n    }\n    dispatch_semaphore_signal(semaphore);\n    return front;\n}\n\n+ (instancetype)back\n{\n    SCTraceStart();\n    static dispatch_once_t onceToken;\n    static SCManagedCaptureDevice *back;\n    static dispatch_semaphore_t semaphore;\n    dispatch_once(&onceToken, ^{\n        semaphore = dispatch_semaphore_create(1);\n    });\n    /* You can use the tweak below to intentionally kill camera in debug.\n     if (SCIsDebugBuild() &&  SCCameraTweaksKillBackCamera()) {\n       return nil;\n     }\n     */\n    dispatch_semaphore_wait(semaphore, DISPATCH_TIME_FOREVER);\n    if (!back) {\n        AVCaptureDevice *device =\n            [[SCCaptureDeviceResolver sharedInstance] findAVCaptureDevice:AVCaptureDevicePositionBack];\n        if (device) {\n            back = [[SCManagedCaptureDevice alloc] initWithDevice:device\n                                                   devicePosition:SCManagedCaptureDevicePositionBack];\n        }\n    }\n    dispatch_semaphore_signal(semaphore);\n    return back;\n}\n\n+ (SCManagedCaptureDevice *)dualCamera\n{\n    SCTraceStart();\n    static dispatch_once_t onceToken;\n    static SCManagedCaptureDevice *dualCamera;\n    static dispatch_semaphore_t semaphore;\n    dispatch_once(&onceToken, ^{\n        semaphore = dispatch_semaphore_create(1);\n    });\n    dispatch_semaphore_wait(semaphore, DISPATCH_TIME_FOREVER);\n    if (!dualCamera) {\n        AVCaptureDevice *device = [[SCCaptureDeviceResolver sharedInstance] findDualCamera];\n        if (device) {\n            dualCamera = [[SCManagedCaptureDevice alloc] initWithDevice:device\n                                                         devicePosition:SCManagedCaptureDevicePositionBackDualCamera];\n        }\n    }\n    dispatch_semaphore_signal(semaphore);\n    return dualCamera;\n}\n\n+ (instancetype)deviceWithPosition:(SCManagedCaptureDevicePosition)position\n{\n    switch (position) {\n    case SCManagedCaptureDevicePositionFront:\n        return [self front];\n    case SCManagedCaptureDevicePositionBack:\n        return [self back];\n    case SCManagedCaptureDevicePositionBackDualCamera:\n        return [self dualCamera];\n    }\n}\n\n+ (BOOL)is1080pSupported\n{\n    return [SCDeviceName isIphone] && [SCDeviceName isSimilarToIphone6SorNewer];\n}\n\n+ (BOOL)isMixCaptureSupported\n{\n    return !![self front] && !![self back];\n}\n\n+ (BOOL)isNightModeSupported\n{\n    return [SCDeviceName isIphone] && [SCDeviceName isSimilarToIphone6orNewer];\n}\n\n+ (BOOL)isEnhancedNightModeSupported\n{\n    if (SC_AT_LEAST_IOS_11) {\n        return [SCDeviceName isIphone] && [SCDeviceName isSimilarToIphone6SorNewer];\n    }\n    return NO;\n}\n\n+ (CGSize)defaultActiveFormatResolution\n{\n    if ([SCDeviceName isIphoneX]) {\n        return CGSizeMake(kSCManagedCapturerVideoActiveFormatWidth1080p,\n                          kSCManagedCapturerVideoActiveFormatHeight1080p);\n    }\n    return CGSizeMake(kSCManagedCapturerDefaultVideoActiveFormatWidth,\n                      kSCManagedCapturerDefaultVideoActiveFormatHeight);\n}\n\n+ (CGSize)nightModeActiveFormatResolution\n{\n    if ([SCManagedCaptureDevice isEnhancedNightModeSupported]) {\n        return CGSizeMake(kSCManagedCapturerNightVideoHighResActiveFormatWidth,\n                          kSCManagedCapturerNightVideoHighResActiveFormatHeight);\n    }\n    return CGSizeMake(kSCManagedCapturerNightVideoDefaultResActiveFormatWidth,\n                      kSCManagedCapturerNightVideoDefaultResActiveFormatHeight);\n}\n\n- (instancetype)initWithDevice:(AVCaptureDevice *)device devicePosition:(SCManagedCaptureDevicePosition)devicePosition\n{\n    SCTraceStart();\n    self = [super init];\n    if (self) {\n        _device = device;\n        _devicePosition = devicePosition;\n\n        if (SCCameraTweaksEnableFaceDetectionFocus(devicePosition)) {\n            _exposureHandler = [[SCManagedCaptureDeviceFaceDetectionAutoExposureHandler alloc]\n                 initWithDevice:device\n                pointOfInterest:CGPointMake(0.5, 0.5)\n                managedCapturer:[SCManagedCapturer sharedInstance]];\n            _focusHandler = [[SCManagedCaptureDeviceFaceDetectionAutoFocusHandler alloc]\n                 initWithDevice:device\n                pointOfInterest:CGPointMake(0.5, 0.5)\n                managedCapturer:[SCManagedCapturer sharedInstance]];\n        } else {\n            _exposureHandler = [[SCManagedCaptureDeviceAutoExposureHandler alloc] initWithDevice:device\n                                                                                 pointOfInterest:CGPointMake(0.5, 0.5)];\n            _focusHandler = [[SCManagedCaptureDeviceAutoFocusHandler alloc] initWithDevice:device\n                                                                           pointOfInterest:CGPointMake(0.5, 0.5)];\n        }\n        _observeController = [[FBKVOController alloc] initWithObserver:self];\n        [self _setAsExposureListenerForDevice:device];\n        if (SCCameraTweaksEnableExposurePointObservation()) {\n            [self _observeExposurePointForDevice:device];\n        }\n        if (SCCameraTweaksEnableFocusPointObservation()) {\n            [self _observeFocusPointForDevice:device];\n        }\n\n        _zoomFactor = 1.0;\n        [self _findSupportedFormats];\n    }\n    return self;\n}\n\n- (SCManagedCaptureDevicePosition)position\n{\n    return _devicePosition;\n}\n\n#pragma mark - Setup and hook up with device\n\n- (BOOL)setDeviceAsInput:(AVCaptureSession *)session\n{\n    SCTraceStart();\n    AVCaptureDeviceInput *deviceInput = [self deviceInput];\n    if ([session canAddInput:deviceInput]) {\n        [session addInput:deviceInput];\n    } else {\n        NSString *previousSessionPreset = session.sessionPreset;\n        session.sessionPreset = AVCaptureSessionPresetInputPriority;\n        // Now we surely can add input\n        if ([session canAddInput:deviceInput]) {\n            [session addInput:deviceInput];\n        } else {\n            session.sessionPreset = previousSessionPreset;\n            return NO;\n        }\n    }\n\n    [self _enableSubjectAreaChangeMonitoring];\n\n    [self _updateActiveFormatWithSession:session fallbackPreset:AVCaptureSessionPreset640x480];\n    if (_device.activeFormat.videoMaxZoomFactor < 1 + 1e-5) {\n        _softwareZoom = YES;\n    } else {\n        _softwareZoom = NO;\n        if (_device.videoZoomFactor != _zoomFactor) {\n            // Reset the zoom factor\n            [self setZoomFactor:_zoomFactor];\n        }\n    }\n\n    [_exposureHandler setVisible:YES];\n    [_focusHandler setVisible:YES];\n\n    _isConnected = YES;\n\n    return YES;\n}\n\n- (void)removeDeviceAsInput:(AVCaptureSession *)session\n{\n    SCTraceStart();\n    if (_isConnected) {\n        [session removeInput:_deviceInput];\n        [_exposureHandler setVisible:NO];\n        [_focusHandler setVisible:NO];\n        _isConnected = NO;\n    }\n}\n\n- (void)resetDeviceAsInput\n{\n    _deviceInput = nil;\n    AVCaptureDevice *deviceFound;\n    switch (_devicePosition) {\n    case SCManagedCaptureDevicePositionFront:\n        deviceFound = [[SCCaptureDeviceResolver sharedInstance] findAVCaptureDevice:AVCaptureDevicePositionFront];\n        break;\n    case SCManagedCaptureDevicePositionBack:\n        deviceFound = [[SCCaptureDeviceResolver sharedInstance] findAVCaptureDevice:AVCaptureDevicePositionBack];\n        break;\n    case SCManagedCaptureDevicePositionBackDualCamera:\n        deviceFound = [[SCCaptureDeviceResolver sharedInstance] findDualCamera];\n        break;\n    }\n    if (deviceFound) {\n        _device = deviceFound;\n    }\n}\n\n#pragma mark - Configurations\n\n- (void)_findSupportedFormats\n{\n    NSInteger defaultHeight = [SCManagedCaptureDevice defaultActiveFormatResolution].height;\n    NSInteger nightHeight = [SCManagedCaptureDevice nightModeActiveFormatResolution].height;\n    NSInteger liveVideoStreamingHeight = kSCManagedCapturerLiveStreamingVideoActiveFormatHeight;\n    NSArray *heights = @[ @(nightHeight), @(defaultHeight), @(liveVideoStreamingHeight) ];\n    BOOL formatsShouldSupportDepth = _devicePosition == SCManagedCaptureDevicePositionBackDualCamera;\n    NSDictionary *formats = SCBestHRSIFormatsForHeights(heights, _device.formats, formatsShouldSupportDepth);\n    _nightFormat = formats[@(nightHeight)];\n    _defaultFormat = formats[@(defaultHeight)];\n    _liveVideoStreamingFormat = formats[@(liveVideoStreamingHeight)];\n}\n\n- (AVCaptureDeviceFormat *)_bestSupportedFormat\n{\n    if (_isNightModeActive) {\n        return _nightFormat;\n    }\n    if (_liveVideoStreamingActive) {\n        return _liveVideoStreamingFormat;\n    }\n    return _defaultFormat;\n}\n\n- (void)setNightModeActive:(BOOL)nightModeActive session:(AVCaptureSession *)session\n{\n    SCTraceStart();\n    if (![SCManagedCaptureDevice isNightModeSupported]) {\n        return;\n    }\n    if (_isNightModeActive == nightModeActive) {\n        return;\n    }\n    _isNightModeActive = nightModeActive;\n    [self updateActiveFormatWithSession:session];\n}\n\n- (void)setLiveVideoStreaming:(BOOL)liveVideoStreaming session:(AVCaptureSession *)session\n{\n    SCTraceStart();\n    if (_liveVideoStreamingActive == liveVideoStreaming) {\n        return;\n    }\n    _liveVideoStreamingActive = liveVideoStreaming;\n    [self updateActiveFormatWithSession:session];\n}\n\n- (void)setCaptureDepthData:(BOOL)captureDepthData session:(AVCaptureSession *)session\n{\n    SCTraceStart();\n    _captureDepthData = captureDepthData;\n    [self _findSupportedFormats];\n    [self updateActiveFormatWithSession:session];\n}\n\n- (void)updateActiveFormatWithSession:(AVCaptureSession *)session\n{\n    [self _updateActiveFormatWithSession:session fallbackPreset:AVCaptureSessionPreset640x480];\n    if (_device.videoZoomFactor != _zoomFactor) {\n        [self setZoomFactor:_zoomFactor];\n    }\n}\n\n- (void)_updateActiveFormatWithSession:(AVCaptureSession *)session fallbackPreset:(NSString *)fallbackPreset\n{\n    AVCaptureDeviceFormat *nextFormat = [self _bestSupportedFormat];\n    if (nextFormat && [session canSetSessionPreset:AVCaptureSessionPresetInputPriority]) {\n        session.sessionPreset = AVCaptureSessionPresetInputPriority;\n        if (nextFormat == _device.activeFormat) {\n            // Need to reconfigure frame rate though active format unchanged\n            [_device runTask:@\"update frame rate\"\n                withLockedConfiguration:^() {\n                    [self _updateDeviceFrameRate];\n                }];\n        } else {\n            [_device runTask:@\"update active format\"\n                withLockedConfiguration:^() {\n                    _device.activeFormat = nextFormat;\n                    [self _updateDeviceFrameRate];\n                }];\n        }\n    } else {\n        session.sessionPreset = fallbackPreset;\n    }\n    [self _updateFieldOfView];\n}\n\n- (void)_updateDeviceFrameRate\n{\n    int32_t deviceFrameRate;\n    if (_liveVideoStreamingActive) {\n        deviceFrameRate = kSCManagedCaptureDeviceMaximumLowFrameRate;\n    } else {\n        deviceFrameRate = kSCManagedCaptureDeviceMaximumHighFrameRate;\n    }\n    CMTime frameDuration = CMTimeMake(1, deviceFrameRate);\n    if (@available(ios 11.0, *)) {\n        if (_captureDepthData) {\n            // Sync the video frame rate to the max depth frame rate (24 fps)\n            if (_device.activeDepthDataFormat.videoSupportedFrameRateRanges.firstObject) {\n                frameDuration =\n                    _device.activeDepthDataFormat.videoSupportedFrameRateRanges.firstObject.minFrameDuration;\n            }\n        }\n    }\n    _device.activeVideoMaxFrameDuration = frameDuration;\n    _device.activeVideoMinFrameDuration = frameDuration;\n    if (_device.lowLightBoostSupported) {\n        _device.automaticallyEnablesLowLightBoostWhenAvailable = YES;\n    }\n}\n\n- (void)setZoomFactor:(float)zoomFactor\n{\n    SCTraceStart();\n    if (_softwareZoom) {\n        // Just remember the software zoom scale\n        if (zoomFactor <= kSCManagedCaptureDevicecSoftwareMaxZoomFactor && zoomFactor >= 1) {\n            _zoomFactor = zoomFactor;\n        }\n    } else {\n        [_device runTask:@\"set zoom factor\"\n            withLockedConfiguration:^() {\n                if (zoomFactor <= _device.activeFormat.videoMaxZoomFactor && zoomFactor >= 1) {\n                    _zoomFactor = zoomFactor;\n                    if (_device.videoZoomFactor != _zoomFactor) {\n                        _device.videoZoomFactor = _zoomFactor;\n                    }\n                }\n            }];\n    }\n    [self _updateFieldOfView];\n}\n\n- (void)_updateFieldOfView\n{\n    float fieldOfView = _device.activeFormat.videoFieldOfView;\n    if (_zoomFactor > 1.f) {\n        // Adjust the field of view to take the zoom factor into account.\n        // Note: this assumes the zoom factor linearly affects the focal length.\n        fieldOfView = 2.f * SCRadiansToDegrees(atanf(tanf(SCDegreesToRadians(0.5f * fieldOfView)) / _zoomFactor));\n    }\n    self.fieldOfView = fieldOfView;\n}\n\n- (void)setExposurePointOfInterest:(CGPoint)pointOfInterest fromUser:(BOOL)fromUser\n{\n    [_exposureHandler setExposurePointOfInterest:pointOfInterest fromUser:fromUser];\n}\n\n// called when user taps on a point on screen, to re-adjust camera focus onto that tapped spot.\n// this re-adjustment is always necessary, regardless of scenarios (recording video, taking photo, etc),\n// therefore we don't have to check _focusLock in this method.\n- (void)setAutofocusPointOfInterest:(CGPoint)pointOfInterest\n{\n    SCTraceStart();\n    [_focusHandler setAutofocusPointOfInterest:pointOfInterest];\n}\n\n- (void)continuousAutofocus\n{\n    SCTraceStart();\n    [_focusHandler continuousAutofocus];\n}\n\n- (void)setRecording:(BOOL)recording\n{\n    if (SCCameraTweaksSmoothAutoFocusWhileRecording() && [_device isSmoothAutoFocusSupported]) {\n        [self _setSmoothFocus:recording];\n    } else {\n        [self _setFocusLock:recording];\n    }\n    [_exposureHandler setStableExposure:recording];\n}\n\n- (void)_setFocusLock:(BOOL)focusLock\n{\n    SCTraceStart();\n    [_focusHandler setFocusLock:focusLock];\n}\n\n- (void)_setSmoothFocus:(BOOL)smoothFocus\n{\n    SCTraceStart();\n    [_focusHandler setSmoothFocus:smoothFocus];\n}\n\n- (void)setFlashActive:(BOOL)flashActive\n{\n    SCTraceStart();\n    if (_flashActive != flashActive) {\n        if ([_device hasFlash]) {\n#pragma clang diagnostic push\n#pragma clang diagnostic ignored \"-Wdeprecated-declarations\"\n            if (flashActive && [_device isFlashModeSupported:AVCaptureFlashModeOn]) {\n                [_device runTask:@\"set flash active\"\n                    withLockedConfiguration:^() {\n                        _device.flashMode = AVCaptureFlashModeOn;\n                    }];\n            } else if (!flashActive && [_device isFlashModeSupported:AVCaptureFlashModeOff]) {\n                [_device runTask:@\"set flash off\"\n                    withLockedConfiguration:^() {\n                        _device.flashMode = AVCaptureFlashModeOff;\n                    }];\n            }\n#pragma clang diagnostic pop\n            _flashActive = flashActive;\n        } else {\n            _flashActive = NO;\n        }\n    }\n}\n\n- (void)setTorchActive:(BOOL)torchActive\n{\n    SCTraceStart();\n    if (_torchActive != torchActive) {\n        if ([_device hasTorch]) {\n            if (torchActive && [_device isTorchModeSupported:AVCaptureTorchModeOn]) {\n                [_device runTask:@\"set torch active\"\n                    withLockedConfiguration:^() {\n                        [_device setTorchMode:AVCaptureTorchModeOn];\n                    }];\n            } else if (!torchActive && [_device isTorchModeSupported:AVCaptureTorchModeOff]) {\n                [_device runTask:@\"set torch off\"\n                    withLockedConfiguration:^() {\n                        _device.torchMode = AVCaptureTorchModeOff;\n                    }];\n            }\n            _torchActive = torchActive;\n        } else {\n            _torchActive = NO;\n        }\n    }\n}\n\n#pragma mark - Utilities\n\n- (BOOL)isFlashSupported\n{\n    return _device.hasFlash;\n}\n\n- (BOOL)isTorchSupported\n{\n    return _device.hasTorch;\n}\n\n- (CGPoint)convertViewCoordinates:(CGPoint)viewCoordinates\n                         viewSize:(CGSize)viewSize\n                     videoGravity:(NSString *)videoGravity\n{\n    SCTraceStart();\n    CGPoint pointOfInterest = CGPointMake(.5f, .5f);\n    CGRect cleanAperture;\n    AVCaptureDeviceInput *deviceInput = [self deviceInput];\n    NSArray *ports = [deviceInput.ports copy];\n    if ([videoGravity isEqualToString:AVLayerVideoGravityResize]) {\n        // Scale, switch x and y, and reverse x\n        return CGPointMake(viewCoordinates.y / viewSize.height, 1.f - (viewCoordinates.x / viewSize.width));\n    }\n    for (AVCaptureInputPort *port in ports) {\n        if ([port mediaType] == AVMediaTypeVideo && port.formatDescription) {\n            cleanAperture = CMVideoFormatDescriptionGetCleanAperture(port.formatDescription, YES);\n            CGSize apertureSize = cleanAperture.size;\n            CGPoint point = viewCoordinates;\n            CGFloat apertureRatio = apertureSize.height / apertureSize.width;\n            CGFloat viewRatio = viewSize.width / viewSize.height;\n            CGFloat xc = .5f;\n            CGFloat yc = .5f;\n            if ([videoGravity isEqualToString:AVLayerVideoGravityResizeAspect]) {\n                if (viewRatio > apertureRatio) {\n                    CGFloat y2 = viewSize.height;\n                    CGFloat x2 = viewSize.height * apertureRatio;\n                    CGFloat x1 = viewSize.width;\n                    CGFloat blackBar = (x1 - x2) / 2;\n                    // If point is inside letterboxed area, do coordinate conversion; otherwise, don't change the\n                    // default value returned (.5,.5)\n                    if (point.x >= blackBar && point.x <= blackBar + x2) {\n                        // Scale (accounting for the letterboxing on the left and right of the video preview),\n                        // switch x and y, and reverse x\n                        xc = point.y / y2;\n                        yc = 1.f - ((point.x - blackBar) / x2);\n                    }\n                } else {\n                    CGFloat y2 = viewSize.width / apertureRatio;\n                    CGFloat y1 = viewSize.height;\n                    CGFloat x2 = viewSize.width;\n                    CGFloat blackBar = (y1 - y2) / 2;\n                    // If point is inside letterboxed area, do coordinate conversion. Otherwise, don't change the\n                    // default value returned (.5,.5)\n                    if (point.y >= blackBar && point.y <= blackBar + y2) {\n                        // Scale (accounting for the letterboxing on the top and bottom of the video preview),\n                        // switch x and y, and reverse x\n                        xc = ((point.y - blackBar) / y2);\n                        yc = 1.f - (point.x / x2);\n                    }\n                }\n            } else if ([videoGravity isEqualToString:AVLayerVideoGravityResizeAspectFill]) {\n                // Scale, switch x and y, and reverse x\n                if (viewRatio > apertureRatio) {\n                    CGFloat y2 = apertureSize.width * (viewSize.width / apertureSize.height);\n                    xc = (point.y + ((y2 - viewSize.height) / 2.f)) / y2; // Account for cropped height\n                    yc = (viewSize.width - point.x) / viewSize.width;\n                } else {\n                    CGFloat x2 = apertureSize.height * (viewSize.height / apertureSize.width);\n                    yc = 1.f - ((point.x + ((x2 - viewSize.width) / 2)) / x2); // Account for cropped width\n                    xc = point.y / viewSize.height;\n                }\n            }\n            pointOfInterest = CGPointMake(xc, yc);\n            break;\n        }\n    }\n    return pointOfInterest;\n}\n\n#pragma mark - SCManagedCapturer friendly methods\n\n- (AVCaptureDevice *)device\n{\n    return _device;\n}\n\n- (AVCaptureDeviceInput *)deviceInput\n{\n    SCTraceStart();\n    if (!_deviceInput) {\n        NSError *error = nil;\n        _deviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:_device error:&error];\n        if (!_deviceInput) {\n            _error = [error copy];\n        }\n    }\n    return _deviceInput;\n}\n\n- (NSError *)error\n{\n    return _error;\n}\n\n- (BOOL)softwareZoom\n{\n    return _softwareZoom;\n}\n\n- (BOOL)isConnected\n{\n    return _isConnected;\n}\n\n- (BOOL)flashActive\n{\n    return _flashActive;\n}\n\n- (BOOL)torchActive\n{\n    return _torchActive;\n}\n\n- (float)zoomFactor\n{\n    return _zoomFactor;\n}\n\n- (BOOL)isNightModeActive\n{\n    return _isNightModeActive;\n}\n\n- (BOOL)liveVideoStreamingActive\n{\n    return _liveVideoStreamingActive;\n}\n\n- (BOOL)isAvailable\n{\n    return [_device isConnected];\n}\n\n#pragma mark - Private methods\n\n- (void)_enableSubjectAreaChangeMonitoring\n{\n    SCTraceStart();\n    [_device runTask:@\"enable SubjectAreaChangeMonitoring\"\n        withLockedConfiguration:^() {\n            _device.subjectAreaChangeMonitoringEnabled = YES;\n        }];\n}\n\n- (AVCaptureDeviceFormat *)activeFormat\n{\n    return _device.activeFormat;\n}\n\n#pragma mark - Observe -adjustingExposure\n- (void)_setAsExposureListenerForDevice:(AVCaptureDevice *)device\n{\n    SCTraceStart();\n    SCLogCoreCameraInfo(@\"Set exposure adjustment KVO for device: %ld\", (long)device.position);\n    [_observeController observe:device\n                        keyPath:@keypath(device, adjustingExposure)\n                        options:NSKeyValueObservingOptionNew\n                         action:@selector(_adjustingExposureChanged:)];\n}\n\n- (void)_adjustingExposureChanged:(NSDictionary *)change\n{\n    SCTraceStart();\n    BOOL adjustingExposure = [change[NSKeyValueChangeNewKey] boolValue];\n    SCLogCoreCameraInfo(@\"KVO exposure changed to %d\", adjustingExposure);\n    if ([self.delegate respondsToSelector:@selector(managedCaptureDevice:didChangeAdjustingExposure:)]) {\n        [self.delegate managedCaptureDevice:self didChangeAdjustingExposure:adjustingExposure];\n    }\n}\n\n#pragma mark - Observe -exposurePointOfInterest\n- (void)_observeExposurePointForDevice:(AVCaptureDevice *)device\n{\n    SCTraceStart();\n    SCLogCoreCameraInfo(@\"Set exposure point KVO for device: %ld\", (long)device.position);\n    [_observeController observe:device\n                        keyPath:@keypath(device, exposurePointOfInterest)\n                        options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew\n                         action:@selector(_exposurePointOfInterestChanged:)];\n}\n\n- (void)_exposurePointOfInterestChanged:(NSDictionary *)change\n{\n    SCTraceStart();\n    CGPoint exposurePoint = [change[NSKeyValueChangeNewKey] CGPointValue];\n    SCLogCoreCameraInfo(@\"KVO exposure point changed to %@\", NSStringFromCGPoint(exposurePoint));\n    if ([self.delegate respondsToSelector:@selector(managedCaptureDevice:didChangeExposurePoint:)]) {\n        [self.delegate managedCaptureDevice:self didChangeExposurePoint:exposurePoint];\n    }\n}\n\n#pragma mark - Observe -focusPointOfInterest\n- (void)_observeFocusPointForDevice:(AVCaptureDevice *)device\n{\n    SCTraceStart();\n    SCLogCoreCameraInfo(@\"Set focus point KVO for device: %ld\", (long)device.position);\n    [_observeController observe:device\n                        keyPath:@keypath(device, focusPointOfInterest)\n                        options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew\n                         action:@selector(_focusPointOfInterestChanged:)];\n}\n\n- (void)_focusPointOfInterestChanged:(NSDictionary *)change\n{\n    SCTraceStart();\n    CGPoint focusPoint = [change[NSKeyValueChangeNewKey] CGPointValue];\n    SCLogCoreCameraInfo(@\"KVO focus point changed to %@\", NSStringFromCGPoint(focusPoint));\n    if ([self.delegate respondsToSelector:@selector(managedCaptureDevice:didChangeFocusPoint:)]) {\n        [self.delegate managedCaptureDevice:self didChangeFocusPoint:focusPoint];\n    }\n}\n\n- (void)dealloc\n{\n    [_observeController unobserveAll];\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCaptureDeviceAutoExposureHandler.h",
    "content": "//\n//  SCManagedCaptureDeviceAutoExposureHandler.h\n//  Snapchat\n//\n//  Created by Derek Peirce on 3/21/17.\n//  Copyright © 2017 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCManagedCaptureDeviceExposureHandler.h\"\n\n#import <AVFoundation/AVFoundation.h>\n\n@interface SCManagedCaptureDeviceAutoExposureHandler : NSObject <SCManagedCaptureDeviceExposureHandler>\n\n- (instancetype)initWithDevice:(AVCaptureDevice *)device pointOfInterest:(CGPoint)pointOfInterest;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCaptureDeviceAutoExposureHandler.m",
    "content": "//\n//  SCManagedCaptureDeviceAutoExposureHandler.m\n//  Snapchat\n//\n//  Created by Derek Peirce on 3/21/17.\n//  Copyright © 2017 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCManagedCaptureDeviceAutoExposureHandler.h\"\n\n#import \"AVCaptureDevice+ConfigurationLock.h\"\n#import \"SCManagedCaptureDeviceExposureHandler.h\"\n\n#import <SCFoundation/SCTrace.h>\n\n@import AVFoundation;\n\n@implementation SCManagedCaptureDeviceAutoExposureHandler {\n    CGPoint _exposurePointOfInterest;\n    AVCaptureDevice *_device;\n}\n\n- (instancetype)initWithDevice:(AVCaptureDevice *)device pointOfInterest:(CGPoint)pointOfInterest\n{\n    if (self = [super init]) {\n        _device = device;\n        _exposurePointOfInterest = pointOfInterest;\n    }\n    return self;\n}\n\n- (CGPoint)getExposurePointOfInterest\n{\n    return _exposurePointOfInterest;\n}\n\n- (void)setExposurePointOfInterest:(CGPoint)pointOfInterest fromUser:(BOOL)fromUser\n{\n    SCTraceStart();\n    if (!CGPointEqualToPoint(pointOfInterest, _exposurePointOfInterest)) {\n        if ([_device isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure] &&\n            [_device isExposurePointOfInterestSupported]) {\n            [_device runTask:@\"set exposure\"\n                withLockedConfiguration:^() {\n                    // Set exposure point before changing focus mode\n                    // Be noticed that order does matter\n                    _device.exposurePointOfInterest = pointOfInterest;\n                    _device.exposureMode = AVCaptureExposureModeContinuousAutoExposure;\n                }];\n        }\n        _exposurePointOfInterest = pointOfInterest;\n    }\n}\n\n- (void)setStableExposure:(BOOL)stableExposure\n{\n}\n\n- (void)setVisible:(BOOL)visible\n{\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCaptureDeviceAutoFocusHandler.h",
    "content": "//\n//  SCManagedCaptureDeviceAutoFocusHandler.h\n//  Snapchat\n//\n//  Created by Jiyang Zhu on 3/7/18.\n//  Copyright © 2018 Snapchat, Inc. All rights reserved.\n//\n//  This class is used to adjust focus related parameters of camera, including focus mode and focus point.\n\n#import \"SCManagedCaptureDeviceFocusHandler.h\"\n\n#import <AVFoundation/AVFoundation.h>\n\n@interface SCManagedCaptureDeviceAutoFocusHandler : NSObject <SCManagedCaptureDeviceFocusHandler>\n\n- (instancetype)initWithDevice:(AVCaptureDevice *)device pointOfInterest:(CGPoint)pointOfInterest;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCaptureDeviceAutoFocusHandler.m",
    "content": "//\n//  SCManagedCaptureDeviceAutoFocusHandler.m\n//  Snapchat\n//\n//  Created by Jiyang Zhu on 3/7/18.\n//  Copyright © 2018 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCManagedCaptureDeviceAutoFocusHandler.h\"\n\n#import \"AVCaptureDevice+ConfigurationLock.h\"\n\n#import <SCFoundation/SCTrace.h>\n#import <SCFoundation/SCTraceODPCompatible.h>\n\n@import CoreGraphics;\n\n@interface SCManagedCaptureDeviceAutoFocusHandler ()\n\n@property (nonatomic, assign) CGPoint focusPointOfInterest;\n@property (nonatomic, strong) AVCaptureDevice *device;\n\n@property (nonatomic, assign) BOOL isContinuousAutofocus;\n@property (nonatomic, assign) BOOL isFocusLock;\n\n@end\n\n@implementation SCManagedCaptureDeviceAutoFocusHandler\n\n- (instancetype)initWithDevice:(AVCaptureDevice *)device pointOfInterest:(CGPoint)pointOfInterest\n{\n    if (self = [super init]) {\n        _device = device;\n        _focusPointOfInterest = pointOfInterest;\n        _isContinuousAutofocus = YES;\n        _isFocusLock = NO;\n    }\n    return self;\n}\n\n- (CGPoint)getFocusPointOfInterest\n{\n    return self.focusPointOfInterest;\n}\n\n// called when user taps on a point on screen, to re-adjust camera focus onto that tapped spot.\n// this re-adjustment is always necessary, regardless of scenarios (recording video, taking photo, etc),\n// therefore we don't have to check self.isFocusLock in this method.\n- (void)setAutofocusPointOfInterest:(CGPoint)pointOfInterest\n{\n    SCTraceODPCompatibleStart(2);\n    SC_GUARD_ELSE_RETURN(!CGPointEqualToPoint(pointOfInterest, self.focusPointOfInterest) || self.isContinuousAutofocus)\n    // Do the setup immediately if the focus lock is off.\n    if ([self.device isFocusModeSupported:AVCaptureFocusModeAutoFocus] &&\n        [self.device isFocusPointOfInterestSupported]) {\n        [self.device runTask:@\"set autofocus\"\n            withLockedConfiguration:^() {\n                // Set focus point before changing focus mode\n                // Be noticed that order does matter\n                self.device.focusPointOfInterest = pointOfInterest;\n                self.device.focusMode = AVCaptureFocusModeAutoFocus;\n            }];\n    }\n    self.focusPointOfInterest = pointOfInterest;\n    self.isContinuousAutofocus = NO;\n}\n\n- (void)continuousAutofocus\n{\n    SCTraceODPCompatibleStart(2);\n    SC_GUARD_ELSE_RETURN(!self.isContinuousAutofocus);\n    if (!self.isFocusLock) {\n        // Do the setup immediately if the focus lock is off.\n        if ([self.device isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus] &&\n            [self.device isFocusPointOfInterestSupported]) {\n            [self.device runTask:@\"set continuous autofocus\"\n                withLockedConfiguration:^() {\n                    // Set focus point before changing focus mode\n                    // Be noticed that order does matter\n                    self.device.focusPointOfInterest = CGPointMake(0.5, 0.5);\n                    self.device.focusMode = AVCaptureFocusModeContinuousAutoFocus;\n                }];\n        }\n    }\n    self.focusPointOfInterest = CGPointMake(0.5, 0.5);\n    self.isContinuousAutofocus = YES;\n}\n\n- (void)setFocusLock:(BOOL)focusLock\n{\n    SCTraceODPCompatibleStart(2);\n    SC_GUARD_ELSE_RETURN(self.isFocusLock != focusLock);\n    // This is the old lock, we only do focus lock on back camera\n    if (focusLock) {\n        if ([self.device isFocusModeSupported:AVCaptureFocusModeLocked]) {\n            [self.device runTask:@\"set focus lock on\"\n                withLockedConfiguration:^() {\n                    self.device.focusMode = AVCaptureFocusModeLocked;\n                }];\n        }\n    } else {\n        // Restore to previous autofocus configurations\n        if ([self.device isFocusModeSupported:(self.isContinuousAutofocus ? AVCaptureFocusModeContinuousAutoFocus\n                                                                          : AVCaptureFocusModeAutoFocus)] &&\n            [self.device isFocusPointOfInterestSupported]) {\n            [self.device runTask:@\"set focus lock on\"\n                withLockedConfiguration:^() {\n                    self.device.focusPointOfInterest = self.focusPointOfInterest;\n                    self.device.focusMode = self.isContinuousAutofocus ? AVCaptureFocusModeContinuousAutoFocus\n                                                                       : AVCaptureFocusModeAutoFocus;\n                }];\n        }\n    }\n    self.isFocusLock = focusLock;\n}\n\n- (void)setSmoothFocus:(BOOL)smoothFocus\n{\n    SCTraceODPCompatibleStart(2);\n    SC_GUARD_ELSE_RETURN(smoothFocus != self.device.smoothAutoFocusEnabled);\n    [self.device runTask:@\"set smooth autofocus\"\n        withLockedConfiguration:^() {\n            [self.device setSmoothAutoFocusEnabled:smoothFocus];\n        }];\n}\n\n- (void)setVisible:(BOOL)visible\n{\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCaptureDeviceDefaultZoomHandler.h",
    "content": "//\n//  SCManagedCaptureDeviceDefaultZoomHandler.h\n//  Snapchat\n//\n//  Created by Yu-Kuan Lai on 4/12/17.\n//  Copyright © 2017 Snapchat, Inc. All rights reserved.\n//\n\n#import <SCBase/SCMacros.h>\n\n#import <CoreGraphics/CoreGraphics.h>\n#import <Foundation/Foundation.h>\n\n@class SCManagedCaptureDevice;\n@class SCCaptureResource;\n\n@interface SCManagedCaptureDeviceDefaultZoomHandler : NSObject\n\nSC_INIT_AND_NEW_UNAVAILABLE\n- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource;\n\n- (void)setZoomFactor:(CGFloat)zoomFactor forDevice:(SCManagedCaptureDevice *)device immediately:(BOOL)immediately;\n- (void)softwareZoomWithDevice:(SCManagedCaptureDevice *)device;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCaptureDeviceDefaultZoomHandler.m",
    "content": "//\n//  SCManagedCaptureDeviceDefaultZoomHandler.m\n//  Snapchat\n//\n//  Created by Yu-Kuan Lai on 4/12/17.\n//  Copyright © 2017 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCManagedCaptureDeviceDefaultZoomHandler_Private.h\"\n\n#import \"SCCaptureResource.h\"\n#import \"SCManagedCaptureDevice+SCManagedCapturer.h\"\n#import \"SCManagedCapturer.h\"\n#import \"SCManagedCapturerLogging.h\"\n#import \"SCManagedCapturerStateBuilder.h\"\n#import \"SCMetalUtils.h\"\n\n#import <SCFoundation/SCAssertWrapper.h>\n#import <SCFoundation/SCQueuePerformer.h>\n#import <SCFoundation/SCThreadHelpers.h>\n#import <SCFoundation/SCTraceODPCompatible.h>\n\n@implementation SCManagedCaptureDeviceDefaultZoomHandler\n\n- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource\n{\n    self = [super init];\n    if (self) {\n        _captureResource = captureResource;\n    }\n\n    return self;\n}\n\n- (void)setZoomFactor:(CGFloat)zoomFactor forDevice:(SCManagedCaptureDevice *)device immediately:(BOOL)immediately\n{\n    [self _setZoomFactor:zoomFactor forManagedCaptureDevice:device];\n}\n\n- (void)softwareZoomWithDevice:(SCManagedCaptureDevice *)device\n{\n    SCTraceODPCompatibleStart(2);\n    SCAssert([_captureResource.queuePerformer isCurrentPerformer] ||\n                 [[SCQueuePerformer mainQueuePerformer] isCurrentPerformer],\n             @\"\");\n    SCAssert(device.softwareZoom, @\"Only do software zoom for software zoom device\");\n\n    SC_GUARD_ELSE_RETURN(!SCDeviceSupportsMetal());\n    float zoomFactor = device.zoomFactor;\n    SCLogCapturerInfo(@\"Adjusting software zoom factor to: %f\", zoomFactor);\n    AVCaptureVideoPreviewLayer *videoPreviewLayer = _captureResource.videoPreviewLayer;\n    [[SCQueuePerformer mainQueuePerformer] perform:^{\n        [CATransaction begin];\n        [CATransaction setDisableActions:YES];\n        // I end up need to change its superlayer transform to get the zoom effect\n        videoPreviewLayer.superlayer.affineTransform = CGAffineTransformMakeScale(zoomFactor, zoomFactor);\n        [CATransaction commit];\n    }];\n}\n\n- (void)_setZoomFactor:(CGFloat)zoomFactor forManagedCaptureDevice:(SCManagedCaptureDevice *)device\n{\n    SCTraceODPCompatibleStart(2);\n    [_captureResource.queuePerformer perform:^{\n        SCTraceStart();\n        if (device) {\n            SCLogCapturerInfo(@\"Set zoom factor: %f -> %f\", _captureResource.state.zoomFactor, zoomFactor);\n            [device setZoomFactor:zoomFactor];\n            BOOL zoomFactorChanged = NO;\n            // If the device is our current device, send the notification, update the\n            // state.\n            if (device.isConnected && device == _captureResource.device) {\n                if (device.softwareZoom) {\n                    [self softwareZoomWithDevice:device];\n                }\n                _captureResource.state = [[[SCManagedCapturerStateBuilder\n                    withManagedCapturerState:_captureResource.state] setZoomFactor:zoomFactor] build];\n                zoomFactorChanged = YES;\n            }\n            SCManagedCapturerState *state = [_captureResource.state copy];\n            runOnMainThreadAsynchronously(^{\n                if (zoomFactorChanged) {\n                    [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]\n                                                 didChangeState:state];\n                    [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]\n                                            didChangeZoomFactor:state];\n                }\n            });\n        }\n    }];\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCaptureDeviceDefaultZoomHandler_Private.h",
    "content": "//\n//  SCManagedCaptureDeviceDefaultZoomHandler_Private.h\n//  Snapchat\n//\n//  Created by Joe Qiao on 04/01/2018.\n//\n\n#import \"SCManagedCaptureDeviceDefaultZoomHandler.h\"\n\n@interface SCManagedCaptureDeviceDefaultZoomHandler ()\n\n@property (nonatomic, weak) SCCaptureResource *captureResource;\n@property (nonatomic, weak) SCManagedCaptureDevice *currentDevice;\n\n- (void)_setZoomFactor:(CGFloat)zoomFactor forManagedCaptureDevice:(SCManagedCaptureDevice *)device;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCaptureDeviceExposureHandler.h",
    "content": "//\n//  SCManagedCaptureDeviceExposureHandler.h\n//  Snapchat\n//\n//  Created by Derek Peirce on 3/21/17.\n//  Copyright © 2017 Snapchat, Inc. All rights reserved.\n//\n\n#import <CoreGraphics/CoreGraphics.h>\n#import <Foundation/Foundation.h>\n\n@protocol SCManagedCaptureDeviceExposureHandler <NSObject>\n\n- (CGPoint)getExposurePointOfInterest;\n\n- (void)setStableExposure:(BOOL)stableExposure;\n\n- (void)setExposurePointOfInterest:(CGPoint)pointOfInterest fromUser:(BOOL)fromUser;\n\n- (void)setVisible:(BOOL)visible;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCaptureDeviceFaceDetectionAutoExposureHandler.h",
    "content": "//\n//  SCManagedCaptureDeviceFaceDetectionAutoExposureHandler.h\n//  Snapchat\n//\n//  Created by Jiyang Zhu on 3/6/18.\n//  Copyright © 2018 Snapchat, Inc. All rights reserved.\n//\n//  This class is used to\n//  1. adjust exposure related parameters of camera, including exposure mode and exposure point.\n//  2. receive detected face bounds, and set exposure point to a preferred face if needed.\n\n#import \"SCManagedCaptureDeviceExposureHandler.h\"\n\n#import <SCBase/SCMacros.h>\n\n#import <AVFoundation/AVFoundation.h>\n\n@protocol SCCapturer;\n\n@interface SCManagedCaptureDeviceFaceDetectionAutoExposureHandler : NSObject <SCManagedCaptureDeviceExposureHandler>\n\nSC_INIT_AND_NEW_UNAVAILABLE\n\n- (instancetype)initWithDevice:(AVCaptureDevice *)device\n               pointOfInterest:(CGPoint)pointOfInterest\n               managedCapturer:(id<SCCapturer>)managedCapturer;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCaptureDeviceFaceDetectionAutoExposureHandler.m",
    "content": "//\n//  SCManagedCaptureDeviceFaceDetectionAutoExposureHandler.m\n//  Snapchat\n//\n//  Created by Jiyang Zhu on 3/6/18.\n//  Copyright © 2018 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCManagedCaptureDeviceFaceDetectionAutoExposureHandler.h\"\n\n#import \"AVCaptureDevice+ConfigurationLock.h\"\n#import \"SCCameraTweaks.h\"\n#import \"SCManagedCaptureDeviceExposureHandler.h\"\n#import \"SCManagedCaptureFaceDetectionAdjustingPOIResource.h\"\n#import \"SCManagedCapturer.h\"\n#import \"SCManagedCapturerListener.h\"\n\n#import <SCFoundation/SCAssertWrapper.h>\n#import <SCFoundation/SCTrace.h>\n#import <SCFoundation/SCTraceODPCompatible.h>\n\n@import AVFoundation;\n\n@interface SCManagedCaptureDeviceFaceDetectionAutoExposureHandler () <SCManagedCapturerListener>\n\n@property (nonatomic, strong) AVCaptureDevice *device;\n@property (nonatomic, weak) id<SCCapturer> managedCapturer;\n@property (nonatomic, assign) CGPoint exposurePointOfInterest;\n@property (nonatomic, assign) BOOL isVisible;\n\n@property (nonatomic, copy) NSDictionary<NSNumber *, NSValue *> *faceBoundsByFaceID;\n@property (nonatomic, strong) SCManagedCaptureFaceDetectionAdjustingPOIResource *resource;\n\n@end\n\n@implementation SCManagedCaptureDeviceFaceDetectionAutoExposureHandler\n\n- (instancetype)initWithDevice:(AVCaptureDevice *)device\n               pointOfInterest:(CGPoint)pointOfInterest\n               managedCapturer:(id<SCCapturer>)managedCapturer\n{\n    if (self = [super init]) {\n        SCAssert(device, @\"AVCaptureDevice should not be nil.\");\n        SCAssert(managedCapturer, @\"id<SCCapturer> should not be nil.\");\n        _device = device;\n        _exposurePointOfInterest = pointOfInterest;\n        SCManagedCaptureDevicePosition position =\n            (device.position == AVCaptureDevicePositionFront ? SCManagedCaptureDevicePositionFront\n                                                             : SCManagedCaptureDevicePositionBack);\n        _resource = [[SCManagedCaptureFaceDetectionAdjustingPOIResource alloc]\n             initWithDefaultPointOfInterest:pointOfInterest\n            shouldTargetOnFaceAutomatically:SCCameraTweaksTurnOnFaceDetectionFocusByDefault(position)];\n        _managedCapturer = managedCapturer;\n    }\n    return self;\n}\n\n- (void)dealloc\n{\n    [_managedCapturer removeListener:self];\n}\n\n- (CGPoint)getExposurePointOfInterest\n{\n    return self.exposurePointOfInterest;\n}\n\n- (void)setExposurePointOfInterest:(CGPoint)pointOfInterest fromUser:(BOOL)fromUser\n{\n    SCTraceODPCompatibleStart(2);\n\n    pointOfInterest = [self.resource updateWithNewProposedPointOfInterest:pointOfInterest fromUser:fromUser];\n\n    [self _actuallySetExposurePointOfInterestIfNeeded:pointOfInterest];\n}\n\n- (void)_actuallySetExposurePointOfInterestIfNeeded:(CGPoint)pointOfInterest\n{\n    SCTraceODPCompatibleStart(2);\n    SC_GUARD_ELSE_RETURN(!CGPointEqualToPoint(pointOfInterest, self.exposurePointOfInterest));\n    if ([self.device isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure] &&\n        [self.device isExposurePointOfInterestSupported]) {\n        [self.device runTask:@\"set exposure\"\n            withLockedConfiguration:^() {\n                // Set exposure point before changing exposure mode\n                // Be noticed that order does matter\n                self.device.exposurePointOfInterest = pointOfInterest;\n                self.device.exposureMode = AVCaptureExposureModeContinuousAutoExposure;\n            }];\n    }\n    self.exposurePointOfInterest = pointOfInterest;\n}\n\n- (void)setStableExposure:(BOOL)stableExposure\n{\n}\n\n- (void)setVisible:(BOOL)visible\n{\n    SCTraceODPCompatibleStart(2);\n    SC_GUARD_ELSE_RETURN(_isVisible != visible);\n    _isVisible = visible;\n    if (visible) {\n        [self.managedCapturer addListener:self];\n    } else {\n        [self.managedCapturer removeListener:self];\n        [self.resource reset];\n    }\n}\n\n#pragma mark - SCManagedCapturerListener\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer\n    didDetectFaceBounds:(NSDictionary<NSNumber *, NSValue *> *)faceBoundsByFaceID\n{\n    SCTraceODPCompatibleStart(2);\n    SC_GUARD_ELSE_RETURN(self.isVisible);\n    CGPoint pointOfInterest = [self.resource updateWithNewDetectedFaceBounds:faceBoundsByFaceID];\n    [self _actuallySetExposurePointOfInterestIfNeeded:pointOfInterest];\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCaptureDeviceFaceDetectionAutoFocusHandler.h",
    "content": "//\n//  SCManagedCaptureDeviceFaceDetectionAutoFocusHandler.h\n//  Snapchat\n//\n//  Created by Jiyang Zhu on 3/7/18.\n//  Copyright © 2018 Snapchat, Inc. All rights reserved.\n//\n//  This class is used to\n//  1. adjust focus related parameters of camera, including focus mode and focus point.\n//  2. receive detected face bounds, and focus to a preferred face if needed.\n\n#import \"SCManagedCaptureDeviceFocusHandler.h\"\n\n#import <SCBase/SCMacros.h>\n\n#import <AVFoundation/AVFoundation.h>\n\n@protocol SCCapturer;\n\n@interface SCManagedCaptureDeviceFaceDetectionAutoFocusHandler : NSObject <SCManagedCaptureDeviceFocusHandler>\n\nSC_INIT_AND_NEW_UNAVAILABLE\n\n- (instancetype)initWithDevice:(AVCaptureDevice *)device\n               pointOfInterest:(CGPoint)pointOfInterest\n               managedCapturer:(id<SCCapturer>)managedCapturer;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCaptureDeviceFaceDetectionAutoFocusHandler.m",
    "content": "//\n//  SCManagedCaptureDeviceFaceDetectionAutoFocusHandler.m\n//  Snapchat\n//\n//  Created by Jiyang Zhu on 3/7/18.\n//  Copyright © 2018 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCManagedCaptureDeviceFaceDetectionAutoFocusHandler.h\"\n\n#import \"AVCaptureDevice+ConfigurationLock.h\"\n#import \"SCCameraTweaks.h\"\n#import \"SCManagedCaptureFaceDetectionAdjustingPOIResource.h\"\n#import \"SCManagedCapturer.h\"\n#import \"SCManagedCapturerListener.h\"\n\n#import <SCFoundation/SCAssertWrapper.h>\n#import <SCFoundation/SCTrace.h>\n#import <SCFoundation/SCTraceODPCompatible.h>\n\n@interface SCManagedCaptureDeviceFaceDetectionAutoFocusHandler () <SCManagedCapturerListener>\n\n@property (nonatomic, strong) AVCaptureDevice *device;\n@property (nonatomic, weak) id<SCCapturer> managedCapturer;\n@property (nonatomic, assign) CGPoint focusPointOfInterest;\n\n@property (nonatomic, assign) BOOL isVisible;\n@property (nonatomic, assign) BOOL isContinuousAutofocus;\n@property (nonatomic, assign) BOOL focusLock;\n\n@property (nonatomic, copy) NSDictionary<NSNumber *, NSValue *> *faceBoundsByFaceID;\n@property (nonatomic, strong) SCManagedCaptureFaceDetectionAdjustingPOIResource *resource;\n\n@end\n\n@implementation SCManagedCaptureDeviceFaceDetectionAutoFocusHandler\n\n- (instancetype)initWithDevice:(AVCaptureDevice *)device\n               pointOfInterest:(CGPoint)pointOfInterest\n               managedCapturer:(id<SCCapturer>)managedCapturer\n{\n    if (self = [super init]) {\n        SCAssert(device, @\"AVCaptureDevice should not be nil.\");\n        SCAssert(managedCapturer, @\"id<SCCapturer> should not be nil.\");\n        _device = device;\n        _focusPointOfInterest = pointOfInterest;\n        SCManagedCaptureDevicePosition position =\n            (device.position == AVCaptureDevicePositionFront ? SCManagedCaptureDevicePositionFront\n                                                             : SCManagedCaptureDevicePositionBack);\n        _resource = [[SCManagedCaptureFaceDetectionAdjustingPOIResource alloc]\n             initWithDefaultPointOfInterest:pointOfInterest\n            shouldTargetOnFaceAutomatically:SCCameraTweaksTurnOnFaceDetectionFocusByDefault(position)];\n        _managedCapturer = managedCapturer;\n    }\n    return self;\n}\n\n- (CGPoint)getFocusPointOfInterest\n{\n    return self.focusPointOfInterest;\n}\n\n// called when user taps on a point on screen, to re-adjust camera focus onto that tapped spot.\n// this re-adjustment is always necessary, regardless of scenarios (recording video, taking photo, etc),\n// therefore we don't have to check self.focusLock in this method.\n- (void)setAutofocusPointOfInterest:(CGPoint)pointOfInterest\n{\n    SCTraceODPCompatibleStart(2);\n    pointOfInterest = [self.resource updateWithNewProposedPointOfInterest:pointOfInterest fromUser:YES];\n    SC_GUARD_ELSE_RETURN(!CGPointEqualToPoint(pointOfInterest, self.focusPointOfInterest) ||\n                         self.isContinuousAutofocus);\n    [self _actuallySetFocusPointOfInterestIfNeeded:pointOfInterest\n                                     withFocusMode:AVCaptureFocusModeAutoFocus\n                                          taskName:@\"set autofocus\"];\n}\n\n- (void)continuousAutofocus\n{\n    SCTraceODPCompatibleStart(2);\n    SC_GUARD_ELSE_RETURN(!self.isContinuousAutofocus);\n    CGPoint pointOfInterest = [self.resource updateWithNewProposedPointOfInterest:CGPointMake(0.5, 0.5) fromUser:NO];\n    [self _actuallySetFocusPointOfInterestIfNeeded:pointOfInterest\n                                     withFocusMode:AVCaptureFocusModeContinuousAutoFocus\n                                          taskName:@\"set continuous autofocus\"];\n}\n\n- (void)setFocusLock:(BOOL)focusLock\n{\n    // Disabled focus lock for face detection and focus handler.\n}\n\n- (void)setSmoothFocus:(BOOL)smoothFocus\n{\n    SCTraceODPCompatibleStart(2);\n    SC_GUARD_ELSE_RETURN(smoothFocus != self.device.smoothAutoFocusEnabled);\n    [self.device runTask:@\"set smooth autofocus\"\n        withLockedConfiguration:^() {\n            [self.device setSmoothAutoFocusEnabled:smoothFocus];\n        }];\n}\n\n- (void)setVisible:(BOOL)visible\n{\n    SCTraceODPCompatibleStart(2);\n    SC_GUARD_ELSE_RETURN(_isVisible != visible);\n    self.isVisible = visible;\n    if (visible) {\n        [[SCManagedCapturer sharedInstance] addListener:self];\n    } else {\n        [[SCManagedCapturer sharedInstance] removeListener:self];\n        [self.resource reset];\n    }\n}\n\n- (void)_actuallySetFocusPointOfInterestIfNeeded:(CGPoint)pointOfInterest\n                                   withFocusMode:(AVCaptureFocusMode)focusMode\n                                        taskName:(NSString *)taskName\n{\n    SCTraceODPCompatibleStart(2);\n    SC_GUARD_ELSE_RETURN(!CGPointEqualToPoint(pointOfInterest, self.focusPointOfInterest) &&\n                         [self.device isFocusModeSupported:focusMode] && [self.device isFocusPointOfInterestSupported]);\n    [self.device runTask:taskName\n        withLockedConfiguration:^() {\n            // Set focus point before changing focus mode\n            // Be noticed that order does matter\n            self.device.focusPointOfInterest = pointOfInterest;\n            self.device.focusMode = focusMode;\n        }];\n\n    self.focusPointOfInterest = pointOfInterest;\n    self.isContinuousAutofocus = (focusMode == AVCaptureFocusModeContinuousAutoFocus);\n}\n\n#pragma mark - SCManagedCapturerListener\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer\n    didDetectFaceBounds:(NSDictionary<NSNumber *, NSValue *> *)faceBoundsByFaceID\n{\n    SCTraceODPCompatibleStart(2);\n    SC_GUARD_ELSE_RETURN(self.isVisible);\n    CGPoint pointOfInterest = [self.resource updateWithNewDetectedFaceBounds:faceBoundsByFaceID];\n    // If pointOfInterest is equal to CGPointMake(0.5, 0.5), it means no valid face is found, so that we should reset to\n    // AVCaptureFocusModeContinuousAutoFocus. Otherwise, focus on the point and set the mode as\n    // AVCaptureFocusModeAutoFocus.\n    // TODO(Jiyang): Refactor SCManagedCaptureFaceDetectionAdjustingPOIResource to include focusMode and exposureMode.\n    AVCaptureFocusMode focusMode = CGPointEqualToPoint(pointOfInterest, CGPointMake(0.5, 0.5))\n                                       ? AVCaptureFocusModeContinuousAutoFocus\n                                       : AVCaptureFocusModeAutoFocus;\n    [self _actuallySetFocusPointOfInterestIfNeeded:pointOfInterest\n                                     withFocusMode:focusMode\n                                          taskName:@\"set autofocus from face detection\"];\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCaptureDeviceFocusHandler.h",
    "content": "//\n//  SCManagedCaptureDeviceFocusHandler.h\n//  Snapchat\n//\n//  Created by Jiyang Zhu on 3/7/18.\n//  Copyright © 2018 Snapchat, Inc. All rights reserved.\n//\n\n#import <CoreGraphics/CoreGraphics.h>\n#import <Foundation/Foundation.h>\n\n@protocol SCManagedCaptureDeviceFocusHandler <NSObject>\n\n- (CGPoint)getFocusPointOfInterest;\n\n/// Called when subject area changes.\n- (void)continuousAutofocus;\n\n/// Called when user taps.\n- (void)setAutofocusPointOfInterest:(CGPoint)pointOfInterest;\n\n- (void)setSmoothFocus:(BOOL)smoothFocus;\n\n- (void)setFocusLock:(BOOL)focusLock;\n\n- (void)setVisible:(BOOL)visible;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCaptureDeviceHandler.h",
    "content": "//\n//  SCManagedCaptureDeviceHandler.h\n//  Snapchat\n//\n//  Created by Jiyang Zhu on 3/8/18.\n//  Copyright © 2018 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCManagedCaptureDevice.h\"\n\n#import <SCBase/SCMacros.h>\n\n#import <Foundation/Foundation.h>\n\n@class SCCaptureResource;\n\n@interface SCManagedCaptureDeviceHandler : NSObject <SCManagedCaptureDeviceDelegate>\n\nSC_INIT_AND_NEW_UNAVAILABLE\n\n- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCaptureDeviceHandler.m",
    "content": "//\n//  SCManagedCaptureDeviceHandler.m\n//  Snapchat\n//\n//  Created by Jiyang Zhu on 3/8/18.\n//  Copyright © 2018 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCManagedCaptureDeviceHandler.h\"\n\n#import \"SCCaptureResource.h\"\n#import \"SCManagedCapturer.h\"\n#import \"SCManagedCapturerLogging.h\"\n#import \"SCManagedCapturerState.h\"\n#import \"SCManagedCapturerStateBuilder.h\"\n\n#import <SCFoundation/SCAssertWrapper.h>\n#import <SCFoundation/SCQueuePerformer.h>\n#import <SCFoundation/SCThreadHelpers.h>\n#import <SCFoundation/SCTraceODPCompatible.h>\n\n@interface SCManagedCaptureDeviceHandler ()\n\n@property (nonatomic, weak) SCCaptureResource *captureResource;\n\n@end\n\n@implementation SCManagedCaptureDeviceHandler\n\n- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource\n{\n    self = [super init];\n    if (self) {\n        SCAssert(captureResource, @\"SCCaptureResource should not be nil.\");\n        _captureResource = captureResource;\n    }\n    return self;\n}\n\n- (void)managedCaptureDevice:(SCManagedCaptureDevice *)device didChangeAdjustingExposure:(BOOL)adjustingExposure\n{\n    SC_GUARD_ELSE_RETURN(device == _captureResource.device);\n    SCTraceODPCompatibleStart(2);\n    SCLogCapturerInfo(@\"KVO Changes adjustingExposure %d\", adjustingExposure);\n    [_captureResource.queuePerformer perform:^{\n        _captureResource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:_captureResource.state]\n            setAdjustingExposure:adjustingExposure] build];\n        SCManagedCapturerState *state = [_captureResource.state copy];\n        runOnMainThreadAsynchronously(^{\n            [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didChangeState:state];\n            [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]\n                             didChangeAdjustingExposure:state];\n        });\n    }];\n}\n\n- (void)managedCaptureDevice:(SCManagedCaptureDevice *)device didChangeExposurePoint:(CGPoint)exposurePoint\n{\n    SC_GUARD_ELSE_RETURN(device == self.captureResource.device);\n    SCTraceODPCompatibleStart(2);\n    runOnMainThreadAsynchronously(^{\n        [self.captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]\n                                 didChangeExposurePoint:exposurePoint];\n    });\n}\n\n- (void)managedCaptureDevice:(SCManagedCaptureDevice *)device didChangeFocusPoint:(CGPoint)focusPoint\n{\n    SC_GUARD_ELSE_RETURN(device == self.captureResource.device);\n    SCTraceODPCompatibleStart(2);\n    runOnMainThreadAsynchronously(^{\n        [self.captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]\n                                    didChangeFocusPoint:focusPoint];\n    });\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCaptureDeviceLinearInterpolationZoomHandler.h",
    "content": "//\n//  SCManagedCaptureDeviceLinearInterpolationZoomHandler.h\n//  Snapchat\n//\n//  Created by Joe Qiao on 03/01/2018.\n//\n\n#import \"SCManagedCaptureDeviceDefaultZoomHandler.h\"\n\n@interface SCManagedCaptureDeviceLinearInterpolationZoomHandler : SCManagedCaptureDeviceDefaultZoomHandler\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCaptureDeviceLinearInterpolationZoomHandler.m",
    "content": "//\n//  SCManagedCaptureDeviceLinearInterpolationZoomHandler.m\n//  Snapchat\n//\n//  Created by Joe Qiao on 03/01/2018.\n//\n\n#import \"SCManagedCaptureDeviceLinearInterpolationZoomHandler.h\"\n\n#import \"SCCameraTweaks.h\"\n#import \"SCManagedCaptureDeviceDefaultZoomHandler_Private.h\"\n#import \"SCManagedCapturerLogging.h\"\n\n#import <SCFoundation/SCAssertWrapper.h>\n#import <SCFoundation/SCMathUtils.h>\n\n@interface SCManagedCaptureDeviceLinearInterpolationZoomHandler ()\n\n@property (nonatomic, strong) CADisplayLink *displayLink;\n@property (nonatomic, assign) double timestamp;\n@property (nonatomic, assign) float targetFactor;\n@property (nonatomic, assign) float intermediateFactor;\n@property (nonatomic, assign) int trend;\n@property (nonatomic, assign) float stepLength;\n\n@end\n\n@implementation SCManagedCaptureDeviceLinearInterpolationZoomHandler\n\n- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource\n{\n    self = [super initWithCaptureResource:captureResource];\n    if (self) {\n        _timestamp = -1.0;\n        _targetFactor = 1.0;\n        _intermediateFactor = _targetFactor;\n        _trend = 1;\n        _stepLength = 0.0;\n    }\n\n    return self;\n}\n\n- (void)dealloc\n{\n    [self _invalidate];\n}\n\n- (void)setZoomFactor:(CGFloat)zoomFactor forDevice:(SCManagedCaptureDevice *)device immediately:(BOOL)immediately\n{\n    if (self.currentDevice != device) {\n        if (_displayLink) {\n            // if device changed, interupt smoothing process\n            // and reset to target zoom factor immediately\n            [self _resetToZoomFactor:_targetFactor];\n        }\n        self.currentDevice = device;\n        immediately = YES;\n    }\n\n    if (immediately) {\n        [self _resetToZoomFactor:zoomFactor];\n    } else {\n        [self _addTargetZoomFactor:zoomFactor];\n    }\n}\n\n#pragma mark - Configurable\n// smoothen if the update time interval is greater than the threshold\n- (double)_thresholdTimeIntervalToSmoothen\n{\n    return SCCameraTweaksSmoothZoomThresholdTime();\n}\n\n- (double)_thresholdFactorDiffToSmoothen\n{\n    return SCCameraTweaksSmoothZoomThresholdFactor();\n}\n\n- (int)_intermediateFactorFramesPerSecond\n{\n    return SCCameraTweaksSmoothZoomIntermediateFramesPerSecond();\n}\n\n- (double)_delayTolerantTime\n{\n    return SCCameraTweaksSmoothZoomDelayTolerantTime();\n}\n\n// minimum step length between two intermediate factors,\n// the greater the better as long as could provide a 'smooth experience' during smoothing process\n- (float)_minimumStepLength\n{\n    return SCCameraTweaksSmoothZoomMinStepLength();\n}\n\n#pragma mark - Private methods\n- (void)_addTargetZoomFactor:(float)factor\n{\n    SCAssertMainThread();\n\n    SCLogCapturerInfo(@\"Smooth Zoom - [1] t=%f zf=%f\", CACurrentMediaTime(), factor);\n    if (SCFloatEqual(factor, _targetFactor)) {\n        return;\n    }\n    _targetFactor = factor;\n\n    float diff = _targetFactor - _intermediateFactor;\n    if ([self _isDuringSmoothingProcess]) {\n        // during smoothing, only update data\n        [self _updateDataWithDiff:diff];\n    } else {\n        double curTimestamp = CACurrentMediaTime();\n        if (!SCFloatEqual(_timestamp, -1.0) && (curTimestamp - _timestamp) > [self _thresholdTimeIntervalToSmoothen] &&\n            ABS(diff) > [self _thresholdFactorDiffToSmoothen]) {\n            // need smoothing\n            [self _updateDataWithDiff:diff];\n            if ([self _nextStep]) {\n                // use timer to interpolate intermediate factors to avoid sharp jump\n                _displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(_nextStep)];\n                _displayLink.preferredFramesPerSecond = [self _intermediateFactorFramesPerSecond];\n                [_displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];\n            }\n        } else {\n            _timestamp = curTimestamp;\n            _intermediateFactor = factor;\n\n            SCLogCapturerInfo(@\"Smooth Zoom - [2] t=%f zf=%f\", CACurrentMediaTime(), _intermediateFactor);\n            [self _setZoomFactor:_intermediateFactor forManagedCaptureDevice:self.currentDevice];\n        }\n    }\n}\n\n- (void)_resetToZoomFactor:(float)factor\n{\n    [self _invalidate];\n\n    _timestamp = -1.0;\n    _targetFactor = factor;\n    _intermediateFactor = _targetFactor;\n\n    [self _setZoomFactor:_intermediateFactor forManagedCaptureDevice:self.currentDevice];\n}\n\n- (BOOL)_nextStep\n{\n    _timestamp = CACurrentMediaTime();\n    _intermediateFactor += (_trend * _stepLength);\n\n    BOOL hasNext = YES;\n    if (_trend < 0.0) {\n        _intermediateFactor = MAX(_intermediateFactor, _targetFactor);\n    } else {\n        _intermediateFactor = MIN(_intermediateFactor, _targetFactor);\n    }\n\n    SCLogCapturerInfo(@\"Smooth Zoom - [3] t=%f zf=%f\", CACurrentMediaTime(), _intermediateFactor);\n    [self _setZoomFactor:_intermediateFactor forManagedCaptureDevice:self.currentDevice];\n\n    if (SCFloatEqual(_intermediateFactor, _targetFactor)) {\n        // finish smoothening\n        [self _invalidate];\n        hasNext = NO;\n    }\n\n    return hasNext;\n}\n\n- (void)_invalidate\n{\n    [_displayLink invalidate];\n    _displayLink = nil;\n    _trend = 1;\n    _stepLength = 0.0;\n}\n\n- (void)_updateDataWithDiff:(CGFloat)diff\n{\n    _trend = diff < 0.0 ? -1 : 1;\n    _stepLength =\n        MAX(_stepLength, MAX([self _minimumStepLength],\n                             ABS(diff) / ([self _delayTolerantTime] * [self _intermediateFactorFramesPerSecond])));\n}\n\n- (BOOL)_isDuringSmoothingProcess\n{\n    return (_displayLink ? YES : NO);\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCaptureDeviceLockOnRecordExposureHandler.h",
    "content": "//\n//  SCManagedCaptureDeviceLockOnRecordExposureHandler.h\n//  Snapchat\n//\n//  Created by Derek Peirce on 3/24/17.\n//  Copyright © 2017 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCManagedCaptureDeviceExposureHandler.h\"\n\n#import <AVFoundation/AVFoundation.h>\n\n// An exposure handler that prevents any changes in exposure as soon as recording begins\n@interface SCManagedCaptureDeviceLockOnRecordExposureHandler : NSObject <SCManagedCaptureDeviceExposureHandler>\n\n- (instancetype)initWithDevice:(AVCaptureDevice *)device\n               pointOfInterest:(CGPoint)pointOfInterest\n                      allowTap:(BOOL)allowTap;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCaptureDeviceLockOnRecordExposureHandler.m",
    "content": "//\n//  SCManagedCaptureDeviceLockOnRecordExposureHandler.m\n//  Snapchat\n//\n//  Created by Derek Peirce on 3/24/17.\n//  Copyright © 2017 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCManagedCaptureDeviceLockOnRecordExposureHandler.h\"\n\n#import \"AVCaptureDevice+ConfigurationLock.h\"\n#import \"SCExposureState.h\"\n#import \"SCManagedCaptureDeviceExposureHandler.h\"\n\n#import <SCFoundation/SCTrace.h>\n\n@import AVFoundation;\n\n@implementation SCManagedCaptureDeviceLockOnRecordExposureHandler {\n    CGPoint _exposurePointOfInterest;\n    AVCaptureDevice *_device;\n    // allows the exposure to change when the user taps to refocus\n    BOOL _allowTap;\n    SCExposureState *_exposureState;\n}\n\n- (instancetype)initWithDevice:(AVCaptureDevice *)device\n               pointOfInterest:(CGPoint)pointOfInterest\n                      allowTap:(BOOL)allowTap\n{\n    if (self = [super init]) {\n        _device = device;\n        _exposurePointOfInterest = pointOfInterest;\n        _allowTap = allowTap;\n    }\n    return self;\n}\n\n- (CGPoint)getExposurePointOfInterest\n{\n    return _exposurePointOfInterest;\n}\n\n- (void)setExposurePointOfInterest:(CGPoint)pointOfInterest fromUser:(BOOL)fromUser\n{\n    SCTraceStart();\n    BOOL locked = _device.exposureMode == AVCaptureExposureModeLocked ||\n                  _device.exposureMode == AVCaptureExposureModeCustom ||\n                  _device.exposureMode == AVCaptureExposureModeAutoExpose;\n    if (!locked || (fromUser && _allowTap)) {\n        AVCaptureExposureMode exposureMode =\n            (locked ? AVCaptureExposureModeAutoExpose : AVCaptureExposureModeContinuousAutoExposure);\n        if ([_device isExposureModeSupported:exposureMode] && [_device isExposurePointOfInterestSupported]) {\n            [_device runTask:@\"set exposure point\"\n                withLockedConfiguration:^() {\n                    // Set exposure point before changing focus mode\n                    // Be noticed that order does matter\n                    _device.exposurePointOfInterest = pointOfInterest;\n                    _device.exposureMode = exposureMode;\n                }];\n        }\n        _exposurePointOfInterest = pointOfInterest;\n    }\n}\n\n- (void)setStableExposure:(BOOL)stableExposure\n{\n    AVCaptureExposureMode exposureMode =\n        stableExposure ? AVCaptureExposureModeLocked : AVCaptureExposureModeContinuousAutoExposure;\n    if ([_device isExposureModeSupported:exposureMode]) {\n        [_device runTask:@\"set stable exposure\"\n            withLockedConfiguration:^() {\n                _device.exposureMode = exposureMode;\n            }];\n    }\n}\n\n- (void)setVisible:(BOOL)visible\n{\n    if (visible) {\n        if (_device.exposureMode == AVCaptureExposureModeLocked ||\n            _device.exposureMode == AVCaptureExposureModeCustom) {\n            [_exposureState applyISOAndExposureDurationToDevice:_device];\n        }\n    } else {\n        _exposureState = [[SCExposureState alloc] initWithDevice:_device];\n    }\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCaptureDeviceSavitzkyGolayZoomHandler.h",
    "content": "//\n//  SCManagedCaptureDeviceSavitzkyGolayZoomHandler.h\n//  Snapchat\n//\n//  Created by Yu-Kuan Lai on 4/12/17.\n//  Copyright © 2017 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCManagedCaptureDeviceDefaultZoomHandler.h\"\n\n@interface SCManagedCaptureDeviceSavitzkyGolayZoomHandler : SCManagedCaptureDeviceDefaultZoomHandler\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCaptureDeviceSavitzkyGolayZoomHandler.m",
    "content": "//\n//  SCManagedCaptureDeviceSavitzkyGolayZoomHandler.m\n//  Snapchat\n//\n//  Created by Yu-Kuan Lai on 4/12/17.\n//  Copyright © 2017 Snapchat, Inc. All rights reserved.\n//  https://en.wikipedia.org/wiki/Savitzky%E2%80%93Golay_filter\n//\n\n#import \"SCManagedCaptureDeviceSavitzkyGolayZoomHandler.h\"\n\n#import \"SCManagedCaptureDevice.h\"\n#import \"SCManagedCaptureDeviceDefaultZoomHandler_Private.h\"\n\n#import <SCFoundation/SCQueuePerformer.h>\n#import <SCFoundation/SCTraceODPCompatible.h>\n\nstatic NSUInteger const kSCSavitzkyGolayWindowSize = 9;\nstatic CGFloat const kSCUpperSharpZoomThreshold = 1.15;\n\n@interface SCManagedCaptureDeviceSavitzkyGolayZoomHandler ()\n\n@property (nonatomic, strong) NSMutableArray *zoomFactorHistoryArray;\n\n@end\n\n@implementation SCManagedCaptureDeviceSavitzkyGolayZoomHandler\n\n- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource\n{\n    self = [super initWithCaptureResource:captureResource];\n    if (self) {\n        _zoomFactorHistoryArray = [[NSMutableArray alloc] init];\n    }\n\n    return self;\n}\n\n- (void)setZoomFactor:(CGFloat)zoomFactor forDevice:(SCManagedCaptureDevice *)device immediately:(BOOL)immediately\n{\n    if (self.currentDevice != device) {\n        // reset if device changed\n        self.currentDevice = device;\n        [self _resetZoomFactor:zoomFactor forDevice:self.currentDevice];\n        return;\n    }\n\n    if (immediately || zoomFactor == 1 || _zoomFactorHistoryArray.count == 0) {\n        // reset if zoomFactor is 1 or this is the first data point\n        [self _resetZoomFactor:zoomFactor forDevice:device];\n        return;\n    }\n\n    CGFloat lastVal = [[_zoomFactorHistoryArray lastObject] floatValue];\n    CGFloat upperThreshold = lastVal * kSCUpperSharpZoomThreshold;\n    if (zoomFactor > upperThreshold) {\n        // sharp change in zoomFactor, reset\n        [self _resetZoomFactor:zoomFactor forDevice:device];\n        return;\n    }\n\n    [_zoomFactorHistoryArray addObject:@(zoomFactor)];\n    if ([_zoomFactorHistoryArray count] > kSCSavitzkyGolayWindowSize) {\n        [_zoomFactorHistoryArray removeObjectAtIndex:0];\n    }\n\n    float filteredZoomFactor =\n        SC_CLAMP([self _savitzkyGolayFilteredZoomFactor], kSCMinVideoZoomFactor, kSCMaxVideoZoomFactor);\n    [self _setZoomFactor:filteredZoomFactor forManagedCaptureDevice:device];\n}\n\n- (CGFloat)_savitzkyGolayFilteredZoomFactor\n{\n    if ([_zoomFactorHistoryArray count] == kSCSavitzkyGolayWindowSize) {\n        CGFloat filteredZoomFactor =\n            59 * [_zoomFactorHistoryArray[4] floatValue] +\n            54 * ([_zoomFactorHistoryArray[3] floatValue] + [_zoomFactorHistoryArray[5] floatValue]) +\n            39 * ([_zoomFactorHistoryArray[2] floatValue] + [_zoomFactorHistoryArray[6] floatValue]) +\n            14 * ([_zoomFactorHistoryArray[1] floatValue] + [_zoomFactorHistoryArray[7] floatValue]) -\n            21 * ([_zoomFactorHistoryArray[0] floatValue] + [_zoomFactorHistoryArray[8] floatValue]);\n        filteredZoomFactor /= 231;\n        return filteredZoomFactor;\n    } else {\n        return [[_zoomFactorHistoryArray lastObject] floatValue]; // use zoomFactor directly if we have less than 9\n    }\n}\n\n- (void)_resetZoomFactor:(CGFloat)zoomFactor forDevice:(SCManagedCaptureDevice *)device\n{\n    [_zoomFactorHistoryArray removeAllObjects];\n    [_zoomFactorHistoryArray addObject:@(zoomFactor)];\n    [self _setZoomFactor:zoomFactor forManagedCaptureDevice:device];\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCaptureDeviceSubjectAreaHandler.h",
    "content": "//\n//  SCManagedCaptureDeviceSubjectAreaHandler.h\n//  Snapchat\n//\n//  Created by Xiaokang Liu on 19/03/2018.\n//\n// This class is used to handle the AVCaptureDeviceSubjectAreaDidChangeNotification notification for SCManagedCapturer.\n// To reset device's settings when the subject area changed\n\n#import <SCBase/SCMacros.h>\n\n#import <Foundation/Foundation.h>\n\n@class SCCaptureResource;\n@protocol SCCapturer;\n\n@interface SCManagedCaptureDeviceSubjectAreaHandler : NSObject\nSC_INIT_AND_NEW_UNAVAILABLE\n- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource NS_DESIGNATED_INITIALIZER;\n\n- (void)stopObserving;\n- (void)startObserving;\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCaptureDeviceSubjectAreaHandler.m",
    "content": "//\n//  SCManagedCaptureDeviceSubjectAreaHandler.m\n//  Snapchat\n//\n//  Created by Xiaokang Liu on 19/03/2018.\n//\n\n#import \"SCManagedCaptureDeviceSubjectAreaHandler.h\"\n\n#import \"SCCameraTweaks.h\"\n#import \"SCCaptureResource.h\"\n#import \"SCCaptureWorker.h\"\n#import \"SCManagedCaptureDevice+SCManagedCapturer.h\"\n#import \"SCManagedCapturer.h\"\n#import \"SCManagedCapturerState.h\"\n\n#import <SCFoundation/SCAssertWrapper.h>\n#import <SCFoundation/SCQueuePerformer.h>\n\n@interface SCManagedCaptureDeviceSubjectAreaHandler () {\n    __weak SCCaptureResource *_captureResource;\n}\n@end\n\n@implementation SCManagedCaptureDeviceSubjectAreaHandler\n- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource\n{\n    self = [super init];\n    if (self) {\n        SCAssert(captureResource, @\"\");\n        _captureResource = captureResource;\n    }\n    return self;\n}\n\n- (void)stopObserving\n{\n    [[NSNotificationCenter defaultCenter] removeObserver:self\n                                                    name:AVCaptureDeviceSubjectAreaDidChangeNotification\n                                                  object:nil];\n}\n\n- (void)startObserving\n{\n    [[NSNotificationCenter defaultCenter] addObserver:self\n                                             selector:@selector(_subjectAreaDidChange:)\n                                                 name:AVCaptureDeviceSubjectAreaDidChangeNotification\n                                               object:nil];\n}\n\n#pragma mark - Private methods\n- (void)_subjectAreaDidChange:(NSDictionary *)notification\n{\n    [_captureResource.queuePerformer perform:^{\n        if (_captureResource.device.isConnected && !_captureResource.state.arSessionActive) {\n            // Reset to continuous autofocus when the subject area changed\n            [_captureResource.device continuousAutofocus];\n            [_captureResource.device setExposurePointOfInterest:CGPointMake(0.5, 0.5) fromUser:NO];\n            if (SCCameraTweaksEnablePortraitModeAutofocus()) {\n                [SCCaptureWorker setPortraitModePointOfInterestAsynchronously:CGPointMake(0.5, 0.5)\n                                                            completionHandler:nil\n                                                                     resource:_captureResource];\n            }\n        }\n    }];\n}\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCaptureDeviceThresholdExposureHandler.h",
    "content": "//\n//  SCManagedCaptureDeviceThresholdExposureHandler.h\n//  Snapchat\n//\n//  Created by Derek Peirce on 4/11/17.\n//  Copyright © 2017 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCManagedCaptureDeviceExposureHandler.h\"\n\n#import <AVFoundation/AVFoundation.h>\n\n@interface SCManagedCaptureDeviceThresholdExposureHandler : NSObject <SCManagedCaptureDeviceExposureHandler>\n\n- (instancetype)initWithDevice:(AVCaptureDevice *)device\n               pointOfInterest:(CGPoint)pointOfInterest\n                     threshold:(CGFloat)threshold;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCaptureDeviceThresholdExposureHandler.m",
    "content": "//\n//  SCManagedCaptureDeviceThresholdExposureHandler.m\n//  Snapchat\n//\n//  Created by Derek Peirce on 4/11/17.\n//  Copyright © 2017 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCManagedCaptureDeviceThresholdExposureHandler.h\"\n\n#import \"AVCaptureDevice+ConfigurationLock.h\"\n#import \"SCCameraTweaks.h\"\n#import \"SCExposureState.h\"\n#import \"SCManagedCaptureDeviceExposureHandler.h\"\n\n#import <SCFoundation/SCTrace.h>\n\n#import <FBKVOController/FBKVOController.h>\n\n@import AVFoundation;\n\n@implementation SCManagedCaptureDeviceThresholdExposureHandler {\n    AVCaptureDevice *_device;\n    CGPoint _exposurePointOfInterest;\n    CGFloat _threshold;\n    // allows the exposure to change when the user taps to refocus\n    SCExposureState *_exposureState;\n    FBKVOController *_kvoController;\n}\n\n- (instancetype)initWithDevice:(AVCaptureDevice *)device\n               pointOfInterest:(CGPoint)pointOfInterest\n                     threshold:(CGFloat)threshold\n{\n    if (self = [super init]) {\n        _device = device;\n        _exposurePointOfInterest = pointOfInterest;\n        _threshold = threshold;\n        _kvoController = [FBKVOController controllerWithObserver:self];\n        @weakify(self);\n        [_kvoController observe:device\n                        keyPath:NSStringFromSelector(@selector(exposureMode))\n                        options:NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew\n                          block:^(id observer, id object, NSDictionary *change) {\n                              @strongify(self);\n                              AVCaptureExposureMode old =\n                                  (AVCaptureExposureMode)[(NSNumber *)change[NSKeyValueChangeOldKey] intValue];\n                              AVCaptureExposureMode new =\n                                  (AVCaptureExposureMode)[(NSNumber *)change[NSKeyValueChangeNewKey] intValue];\n                              if (old == AVCaptureExposureModeAutoExpose && new == AVCaptureExposureModeLocked) {\n                                  // auto expose is done, go back to custom\n                                  self->_exposureState = [[SCExposureState alloc] initWithDevice:self->_device];\n                                  [self->_exposureState applyISOAndExposureDurationToDevice:self->_device];\n                              }\n                          }];\n        [_kvoController observe:device\n                        keyPath:NSStringFromSelector(@selector(exposureTargetOffset))\n                        options:NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew\n                          block:^(id observer, id object, NSDictionary *change) {\n                              @strongify(self);\n                              if (self->_device.exposureMode == AVCaptureExposureModeCustom) {\n                                  CGFloat offset = [(NSNumber *)change[NSKeyValueChangeOldKey] floatValue];\n                                  if (fabs(offset) > self->_threshold) {\n                                      [self->_device runTask:@\"set exposure point\"\n                                          withLockedConfiguration:^() {\n                                              // Set exposure point before changing focus mode\n                                              // Be noticed that order does matter\n                                              self->_device.exposurePointOfInterest = CGPointMake(0.5, 0.5);\n                                              self->_device.exposureMode = AVCaptureExposureModeAutoExpose;\n                                          }];\n                                  }\n                              }\n                          }];\n    }\n    return self;\n}\n\n- (CGPoint)getExposurePointOfInterest\n{\n    return _exposurePointOfInterest;\n}\n\n- (void)setExposurePointOfInterest:(CGPoint)pointOfInterest fromUser:(BOOL)fromUser\n{\n    SCTraceStart();\n    BOOL locked = _device.exposureMode == AVCaptureExposureModeLocked ||\n                  _device.exposureMode == AVCaptureExposureModeCustom ||\n                  _device.exposureMode == AVCaptureExposureModeAutoExpose;\n    if (!locked || fromUser) {\n        AVCaptureExposureMode exposureMode =\n            (locked ? AVCaptureExposureModeAutoExpose : AVCaptureExposureModeContinuousAutoExposure);\n        if ([_device isExposureModeSupported:exposureMode] && [_device isExposurePointOfInterestSupported]) {\n            [_device runTask:@\"set exposure point\"\n                withLockedConfiguration:^() {\n                    // Set exposure point before changing focus mode\n                    // Be noticed that order does matter\n                    _device.exposurePointOfInterest = pointOfInterest;\n                    _device.exposureMode = exposureMode;\n                }];\n        }\n        _exposurePointOfInterest = pointOfInterest;\n    }\n}\n\n- (void)setStableExposure:(BOOL)stableExposure\n{\n    if (stableExposure) {\n        _exposureState = [[SCExposureState alloc] initWithDevice:_device];\n        [_exposureState applyISOAndExposureDurationToDevice:_device];\n    } else {\n        AVCaptureExposureMode exposureMode = AVCaptureExposureModeContinuousAutoExposure;\n        if ([_device isExposureModeSupported:exposureMode]) {\n            [_device runTask:@\"set exposure point\"\n                withLockedConfiguration:^() {\n                    _device.exposureMode = AVCaptureExposureModeContinuousAutoExposure;\n                }];\n        }\n    }\n}\n\n- (void)setVisible:(BOOL)visible\n{\n    if (visible) {\n        if (_device.exposureMode == AVCaptureExposureModeLocked ||\n            _device.exposureMode == AVCaptureExposureModeCustom) {\n            [_exposureState applyISOAndExposureDurationToDevice:_device];\n        }\n    } else {\n        _exposureState = [[SCExposureState alloc] initWithDevice:_device];\n    }\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCaptureFaceDetectionAdjustingPOIResource.h",
    "content": "//\n//  SCManagedCaptureFaceDetectionAdjustingPOIResource.h\n//  Snapchat\n//\n//  Created by Jiyang Zhu on 3/7/18.\n//  Copyright © 2018 Snapchat, Inc. All rights reserved.\n//\n//  This class is used to keep several properties for face detection and focus/exposure. It provides methods to help\n//  FaceDetectionAutoFocusHandler and FaceDetectionAutoExposureHandler to deal with the point of interest setting events\n//  from user taps, subject area changes, and face detection, by updating itself and return the actual point of\n//  interest.\n\n#import <CoreGraphics/CoreGraphics.h>\n#import <Foundation/Foundation.h>\n\ntypedef NS_ENUM(NSInteger, SCManagedCaptureFaceDetectionAdjustingPOIMode) {\n    SCManagedCaptureFaceDetectionAdjustingPOIModeNone = 0,\n    SCManagedCaptureFaceDetectionAdjustingPOIModeFixedOnPointWithFace,\n    SCManagedCaptureFaceDetectionAdjustingPOIModeFixedOnPointWithoutFace,\n};\n\n@interface SCManagedCaptureFaceDetectionAdjustingPOIResource : NSObject\n\n@property (nonatomic, assign) CGPoint pointOfInterest;\n\n@property (nonatomic, strong) NSDictionary<NSNumber *, NSValue *> *faceBoundsByFaceID;\n@property (nonatomic, assign) SCManagedCaptureFaceDetectionAdjustingPOIMode adjustingPOIMode;\n@property (nonatomic, assign) BOOL shouldTargetOnFaceAutomatically;\n@property (nonatomic, strong) NSNumber *targetingFaceID;\n@property (nonatomic, assign) CGRect targetingFaceBounds;\n\n- (instancetype)initWithDefaultPointOfInterest:(CGPoint)pointOfInterest\n               shouldTargetOnFaceAutomatically:(BOOL)shouldTargetOnFaceAutomatically;\n\n- (void)reset;\n\n/**\n Update SCManagedCaptureFaceDetectionAdjustingPOIResource when a new POI adjustment comes. It will find the face that\n the proposedPoint belongs to, return the center of the face, if the adjustingPOIMode and fromUser meets the\n requirements.\n\n @param proposedPoint\n The point of interest that upper level wants to set.\n @param fromUser\n Whether the setting is from user's tap or not.\n @return\n The actual point of interest that should be applied.\n */\n- (CGPoint)updateWithNewProposedPointOfInterest:(CGPoint)proposedPoint fromUser:(BOOL)fromUser;\n\n/**\n Update SCManagedCaptureFaceDetectionAdjustingPOIResource when new detected face bounds comes.\n\n @param faceBoundsByFaceID\n A dictionary. Key: FaceID as NSNumber. Value: FaceBounds as CGRect.\n @return\n The actual point of interest that should be applied.\n */\n- (CGPoint)updateWithNewDetectedFaceBounds:(NSDictionary<NSNumber *, NSValue *> *)faceBoundsByFaceID;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCaptureFaceDetectionAdjustingPOIResource.m",
    "content": "//\n//  SCManagedCaptureFaceDetectionAdjustingPOIResource.m\n//  Snapchat\n//\n//  Created by Jiyang Zhu on 3/7/18.\n//  Copyright © 2018 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCManagedCaptureFaceDetectionAdjustingPOIResource.h\"\n\n#import <SCFoundation/SCLog.h>\n#import <SCFoundation/SCTrace.h>\n#import <SCFoundation/SCTraceODPCompatible.h>\n\n@implementation SCManagedCaptureFaceDetectionAdjustingPOIResource {\n    CGPoint _defaultPointOfInterest;\n}\n\n#pragma mark - Public Methods\n\n- (instancetype)initWithDefaultPointOfInterest:(CGPoint)pointOfInterest\n               shouldTargetOnFaceAutomatically:(BOOL)shouldTargetOnFaceAutomatically\n{\n    if (self = [super init]) {\n        _pointOfInterest = pointOfInterest;\n        _defaultPointOfInterest = pointOfInterest;\n        _shouldTargetOnFaceAutomatically = shouldTargetOnFaceAutomatically;\n    }\n    return self;\n}\n\n- (void)reset\n{\n    SCTraceODPCompatibleStart(2);\n    self.adjustingPOIMode = SCManagedCaptureFaceDetectionAdjustingPOIModeNone;\n    self.targetingFaceID = nil;\n    self.targetingFaceBounds = CGRectZero;\n    self.faceBoundsByFaceID = nil;\n    self.pointOfInterest = _defaultPointOfInterest;\n}\n\n- (CGPoint)updateWithNewProposedPointOfInterest:(CGPoint)proposedPoint fromUser:(BOOL)fromUser\n{\n    SCTraceODPCompatibleStart(2);\n    if (fromUser) {\n        NSNumber *faceID =\n            [self _getFaceIDOfFaceBoundsContainingPoint:proposedPoint fromFaceBounds:self.faceBoundsByFaceID];\n        if (faceID && [faceID integerValue] >= 0) {\n            CGPoint point = [self _getPointOfInterestWithFaceID:faceID fromFaceBounds:self.faceBoundsByFaceID];\n            if ([self _isPointOfInterestValid:point]) {\n                [self _setPointOfInterest:point\n                          targetingFaceID:faceID\n                         adjustingPOIMode:SCManagedCaptureFaceDetectionAdjustingPOIModeFixedOnPointWithFace];\n            } else {\n                [self _setPointOfInterest:proposedPoint\n                          targetingFaceID:nil\n                         adjustingPOIMode:SCManagedCaptureFaceDetectionAdjustingPOIModeFixedOnPointWithoutFace];\n            }\n        } else {\n            [self _setPointOfInterest:proposedPoint\n                      targetingFaceID:nil\n                     adjustingPOIMode:SCManagedCaptureFaceDetectionAdjustingPOIModeFixedOnPointWithoutFace];\n        }\n    } else {\n        [self _setPointOfInterest:proposedPoint\n                  targetingFaceID:nil\n                 adjustingPOIMode:SCManagedCaptureFaceDetectionAdjustingPOIModeNone];\n    }\n    return self.pointOfInterest;\n}\n\n- (CGPoint)updateWithNewDetectedFaceBounds:(NSDictionary<NSNumber *, NSValue *> *)faceBoundsByFaceID\n{\n    SCTraceODPCompatibleStart(2);\n    self.faceBoundsByFaceID = faceBoundsByFaceID;\n    switch (self.adjustingPOIMode) {\n    case SCManagedCaptureFaceDetectionAdjustingPOIModeNone: {\n        if (self.shouldTargetOnFaceAutomatically) {\n            [self _focusOnPreferredFaceInFaceBounds:self.faceBoundsByFaceID];\n        }\n    } break;\n    case SCManagedCaptureFaceDetectionAdjustingPOIModeFixedOnPointWithFace: {\n        BOOL isFocusingOnCurrentTargetingFaceSuccess =\n            [self _focusOnFaceWithTargetFaceID:self.targetingFaceID inFaceBounds:self.faceBoundsByFaceID];\n        if (!isFocusingOnCurrentTargetingFaceSuccess && self.shouldTargetOnFaceAutomatically) {\n            // If the targeted face has disappeared, and shouldTargetOnFaceAutomatically is YES, automatically target on\n            // the next preferred face.\n            [self _focusOnPreferredFaceInFaceBounds:self.faceBoundsByFaceID];\n        }\n    } break;\n    case SCManagedCaptureFaceDetectionAdjustingPOIModeFixedOnPointWithoutFace:\n        // The point of interest should be fixed at a non-face point where user tapped before.\n        break;\n    }\n    return self.pointOfInterest;\n}\n\n#pragma mark - Internal Methods\n\n- (BOOL)_focusOnPreferredFaceInFaceBounds:(NSDictionary<NSNumber *, NSValue *> *)faceBoundsByFaceID\n{\n    SCTraceODPCompatibleStart(2);\n    NSNumber *preferredFaceID = [self _getPreferredFaceIDFromFaceBounds:faceBoundsByFaceID];\n    return [self _focusOnFaceWithTargetFaceID:preferredFaceID inFaceBounds:faceBoundsByFaceID];\n}\n\n- (BOOL)_focusOnFaceWithTargetFaceID:(NSNumber *)preferredFaceID\n                        inFaceBounds:(NSDictionary<NSNumber *, NSValue *> *)faceBoundsByFaceID\n{\n    SCTraceODPCompatibleStart(2);\n    SC_GUARD_ELSE_RETURN_VALUE(preferredFaceID, NO);\n    NSValue *faceBoundsValue = [faceBoundsByFaceID objectForKey:preferredFaceID];\n    if (faceBoundsValue) {\n        CGRect faceBounds = [faceBoundsValue CGRectValue];\n        CGPoint proposedPoint = CGPointMake(CGRectGetMidX(faceBounds), CGRectGetMidY(faceBounds));\n        if ([self _isPointOfInterestValid:proposedPoint]) {\n            if ([self _shouldChangeToNewPoint:proposedPoint withNewFaceID:preferredFaceID newFaceBounds:faceBounds]) {\n                [self _setPointOfInterest:proposedPoint\n                          targetingFaceID:preferredFaceID\n                         adjustingPOIMode:SCManagedCaptureFaceDetectionAdjustingPOIModeFixedOnPointWithFace];\n            }\n            return YES;\n        }\n    }\n    [self reset];\n    return NO;\n}\n\n- (void)_setPointOfInterest:(CGPoint)pointOfInterest\n            targetingFaceID:(NSNumber *)targetingFaceID\n           adjustingPOIMode:(SCManagedCaptureFaceDetectionAdjustingPOIMode)adjustingPOIMode\n{\n    SCTraceODPCompatibleStart(2);\n    self.pointOfInterest = pointOfInterest;\n    self.targetingFaceID = targetingFaceID;\n    if (targetingFaceID) { // If targetingFaceID exists, record the current face bounds.\n        self.targetingFaceBounds = [[self.faceBoundsByFaceID objectForKey:targetingFaceID] CGRectValue];\n    } else { // Otherwise, reset targetingFaceBounds to zero.\n        self.targetingFaceBounds = CGRectZero;\n    }\n    self.adjustingPOIMode = adjustingPOIMode;\n}\n\n- (BOOL)_isPointOfInterestValid:(CGPoint)pointOfInterest\n{\n    return (pointOfInterest.x >= 0 && pointOfInterest.x <= 1 && pointOfInterest.y >= 0 && pointOfInterest.y <= 1);\n}\n\n- (NSNumber *)_getPreferredFaceIDFromFaceBounds:(NSDictionary<NSNumber *, NSValue *> *)faceBoundsByFaceID\n{\n    SCTraceODPCompatibleStart(2);\n    SC_GUARD_ELSE_RETURN_VALUE(faceBoundsByFaceID.count > 0, nil);\n\n    // Find out the bounds with the max area.\n    __block NSNumber *preferredFaceID = nil;\n    __block CGFloat maxArea = 0;\n    [faceBoundsByFaceID\n        enumerateKeysAndObjectsUsingBlock:^(NSNumber *_Nonnull key, NSValue *_Nonnull obj, BOOL *_Nonnull stop) {\n            CGRect faceBounds = [obj CGRectValue];\n            CGFloat area = CGRectGetWidth(faceBounds) * CGRectGetHeight(faceBounds);\n            if (area > maxArea) {\n                preferredFaceID = key;\n                maxArea = area;\n            }\n        }];\n\n    return preferredFaceID;\n}\n\n- (CGPoint)_getPointOfInterestWithFaceID:(NSNumber *)faceID\n                          fromFaceBounds:(NSDictionary<NSNumber *, NSValue *> *)faceBoundsByFaceID\n{\n    SCTraceODPCompatibleStart(2);\n    NSValue *faceBoundsValue = [faceBoundsByFaceID objectForKey:faceID];\n    if (faceBoundsValue) {\n        CGRect faceBounds = [faceBoundsValue CGRectValue];\n        CGPoint point = CGPointMake(CGRectGetMidX(faceBounds), CGRectGetMidY(faceBounds));\n        return point;\n    } else {\n        return CGPointMake(-1, -1); // An invalid point.\n    }\n}\n\n/**\n Setting a new focus/exposure point needs high CPU usage, so we only set a new POI when we have to. This method is to\n return whether setting this new point if necessary.\n If not, there is no need to change the POI.\n */\n- (BOOL)_shouldChangeToNewPoint:(CGPoint)newPoint\n                  withNewFaceID:(NSNumber *)newFaceID\n                  newFaceBounds:(CGRect)newFaceBounds\n{\n    SCTraceODPCompatibleStart(2);\n    BOOL shouldChange = NO;\n    if (!newFaceID || !self.targetingFaceID ||\n        ![newFaceID isEqualToNumber:self.targetingFaceID]) { // Return YES if it is a new face.\n        shouldChange = YES;\n    } else if (CGRectEqualToRect(self.targetingFaceBounds, CGRectZero) ||\n               !CGRectContainsPoint(self.targetingFaceBounds,\n                                    newPoint)) { // Return YES if the new point if out of the current face bounds.\n        shouldChange = YES;\n    } else {\n        CGFloat currentBoundsArea =\n            CGRectGetWidth(self.targetingFaceBounds) * CGRectGetHeight(self.targetingFaceBounds);\n        CGFloat newBoundsArea = CGRectGetWidth(newFaceBounds) * CGRectGetHeight(newFaceBounds);\n        if (newBoundsArea >= currentBoundsArea * 1.2 ||\n            newBoundsArea <=\n                currentBoundsArea *\n                    0.8) { // Return YES if the area of new bounds if over 20% more or 20% less than the current one.\n            shouldChange = YES;\n        }\n    }\n    return shouldChange;\n}\n\n- (NSNumber *)_getFaceIDOfFaceBoundsContainingPoint:(CGPoint)point\n                                     fromFaceBounds:(NSDictionary<NSNumber *, NSValue *> *)faceBoundsByFaceID\n{\n    SC_GUARD_ELSE_RETURN_VALUE(faceBoundsByFaceID.count > 0, nil);\n    __block NSNumber *faceID = nil;\n    [faceBoundsByFaceID\n        enumerateKeysAndObjectsUsingBlock:^(NSNumber *_Nonnull key, NSValue *_Nonnull obj, BOOL *_Nonnull stop) {\n            CGRect faceBounds = [obj CGRectValue];\n            if (CGRectContainsPoint(faceBounds, point)) {\n                faceID = key;\n                *stop = YES;\n            }\n        }];\n    return faceID;\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCapturePreviewLayerController.h",
    "content": "//\n//  SCManagedCapturePreviewLayerController.h\n//  Snapchat\n//\n//  Created by Liu Liu on 5/5/15.\n//  Copyright (c) 2015 Snapchat, Inc. All rights reserved.\n//\n\n#import <SCCameraFoundation/SCManagedVideoDataSource.h>\n#import <SCFoundation/SCAssertWrapper.h>\n#import <SCGhostToSnappable/SCGhostToSnappableSignal.h>\n\n#import <AVFoundation/AVFoundation.h>\n#import <Foundation/Foundation.h>\n#import <Metal/Metal.h>\n#import <UIKit/UIKit.h>\n\n@protocol SCCapturer;\n@class LSAGLView, SCBlackCameraDetector, SCManagedCapturePreviewLayerController;\n\n@protocol SCManagedCapturePreviewLayerControllerDelegate\n\n- (SCBlackCameraDetector *)blackCameraDetectorForManagedCapturePreviewLayerController:\n    (SCManagedCapturePreviewLayerController *)controller;\n- (sc_create_g2s_ticket_f)g2sTicketForManagedCapturePreviewLayerController:\n    (SCManagedCapturePreviewLayerController *)controller;\n\n@end\n\n/**\n * SCManagedCapturePreviewLayerController controls display of frame in a view. The controller has 3\n * different methods for this.\n * AVCaptureVideoPreviewLayer: This is a feed coming straight from the camera and does not allow any\n * image processing or modification of the frames displayed.\n * LSAGLView: OpenGL based video for displaying video that is being processed (Lenses etc.)\n * CAMetalLayer: Metal layer drawing textures on a vertex quad for display on screen.\n */\n@interface SCManagedCapturePreviewLayerController : NSObject <SCManagedSampleBufferDisplayController>\n\n@property (nonatomic, strong, readonly) UIView *view;\n\n@property (nonatomic, strong, readonly) AVCaptureVideoPreviewLayer *videoPreviewLayer;\n\n@property (nonatomic, strong, readonly) LSAGLView *videoPreviewGLView;\n\n@property (nonatomic, weak) id<SCManagedCapturePreviewLayerControllerDelegate> delegate;\n\n+ (instancetype)sharedInstance;\n\n- (void)pause;\n\n- (void)resume;\n\n- (UIView *)newStandInViewWithRect:(CGRect)rect;\n\n- (void)setManagedCapturer:(id<SCCapturer>)managedCapturer;\n\n// This method returns a token that you can hold on to. As long as the token is hold,\n// an outdated view will be hold unless the app backgrounded.\n- (NSString *)keepDisplayingOutdatedPreview;\n\n// End displaying the outdated frame with an issued keep token. If there is no one holds\n// any token any more, this outdated view will be flushed.\n- (void)endDisplayingOutdatedPreview:(NSString *)keepToken;\n\n// Create views for Metal, this method need to be called on the main thread.\n- (void)setupPreviewLayer;\n\n// Create render pipeline state, setup shaders for Metal, this need to be called off the main thread.\n- (void)setupRenderPipeline;\n\n- (void)applicationDidEnterBackground;\n\n- (void)applicationWillEnterForeground;\n\n- (void)applicationWillResignActive;\n\n- (void)applicationDidBecomeActive;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCapturePreviewLayerController.m",
    "content": "//\n//  SCManagedCapturePreviewLayerController.m\n//  Snapchat\n//\n//  Created by Liu Liu on 5/5/15.\n//  Copyright (c) 2015 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCManagedCapturePreviewLayerController.h\"\n\n#import \"SCBlackCameraDetector.h\"\n#import \"SCCameraTweaks.h\"\n#import \"SCManagedCapturePreviewView.h\"\n#import \"SCManagedCapturer.h\"\n#import \"SCManagedCapturerListener.h\"\n#import \"SCManagedCapturerUtils.h\"\n#import \"SCMetalUtils.h\"\n\n#import <SCFoundation/NSData+Random.h>\n#import <SCFoundation/SCCoreGraphicsUtils.h>\n#import <SCFoundation/SCDeviceName.h>\n#import <SCFoundation/SCLog.h>\n#import <SCFoundation/SCQueuePerformer.h>\n#import <SCFoundation/SCTrace.h>\n#import <SCFoundation/SCTraceODPCompatible.h>\n#import <SCFoundation/UIScreen+SCSafeAreaInsets.h>\n#import <SCGhostToSnappable/SCGhostToSnappableSignal.h>\n\n#import <FBKVOController/FBKVOController.h>\n\n#define SCLogPreviewLayerInfo(fmt, ...) SCLogCoreCameraInfo(@\"[PreviewLayerController] \" fmt, ##__VA_ARGS__)\n#define SCLogPreviewLayerWarning(fmt, ...) SCLogCoreCameraWarning(@\"[PreviewLayerController] \" fmt, ##__VA_ARGS__)\n#define SCLogPreviewLayerError(fmt, ...) SCLogCoreCameraError(@\"[PreviewLayerController] \" fmt, ##__VA_ARGS__)\n\nconst static CGSize kSCManagedCapturePreviewDefaultRenderSize = {\n    .width = 720, .height = 1280,\n};\n\nconst static CGSize kSCManagedCapturePreviewRenderSize1080p = {\n    .width = 1080, .height = 1920,\n};\n\n#if !TARGET_IPHONE_SIMULATOR\n\nstatic NSInteger const kSCMetalCannotAcquireDrawableLimit = 2;\n\n@interface CAMetalLayer (SCSecretFature)\n\n// Call discardContents.\n- (void)sc_secretFeature;\n\n@end\n\n@implementation CAMetalLayer (SCSecretFature)\n\n- (void)sc_secretFeature\n{\n    // \"discardContents\"\n    char buffer[] = {0x9b, 0x96, 0x8c, 0x9c, 0x9e, 0x8d, 0x9b, 0xbc, 0x90, 0x91, 0x8b, 0x9a, 0x91, 0x8b, 0x8c, 0};\n    unsigned long len = strlen(buffer);\n    for (unsigned idx = 0; idx < len; ++idx) {\n        buffer[idx] = ~buffer[idx];\n    }\n    SEL selector = NSSelectorFromString([NSString stringWithUTF8String:buffer]);\n    if ([self respondsToSelector:selector]) {\n        NSMethodSignature *signature = [self methodSignatureForSelector:selector];\n        NSInvocation *invocation = [NSInvocation invocationWithMethodSignature:signature];\n        [invocation setTarget:self];\n        [invocation setSelector:selector];\n        [invocation invoke];\n    }\n    // For anyone curious, here is the actual implementation for discardContents in 10.3 (With Hopper v4, arm64)\n    // From glance, this seems pretty safe to call.\n    // void -[CAMetalLayer(CAMetalLayerPrivate) discardContents](int arg0)\n    // {\n    //     *(r31 + 0xffffffffffffffe0) = r20;\n    //     *(0xfffffffffffffff0 + r31) = r19;\n    //     r31 = r31 + 0xffffffffffffffe0;\n    //     *(r31 + 0x10) = r29;\n    //     *(0x20 + r31) = r30;\n    //     r29 = r31 + 0x10;\n    //     r19 = *(arg0 + sign_extend_64(*(int32_t *)0x1a6300510));\n    //     if (r19 != 0x0) {\n    //         r0 = loc_1807079dc(*0x1a7811fc8, r19);\n    //         r0 = _CAImageQueueConsumeUnconsumed(*(r19 + 0x10));\n    //         r0 = _CAImageQueueFlush(*(r19 + 0x10));\n    //         r29 = *(r31 + 0x10);\n    //         r30 = *(0x20 + r31);\n    //         r20 = *r31;\n    //         r19 = *(r31 + 0x10);\n    //         r31 = r31 + 0x20;\n    //         r0 = loc_1807079dc(*0x1a7811fc8, zero_extend_64(0x0));\n    //     } else {\n    //         r29 = *(r31 + 0x10);\n    //         r30 = *(0x20 + r31);\n    //         r20 = *r31;\n    //         r19 = *(r31 + 0x10);\n    //         r31 = r31 + 0x20;\n    //     }\n    //     return;\n    // }\n}\n\n@end\n\n#endif\n\n@interface SCManagedCapturePreviewLayerController () <SCManagedCapturerListener>\n\n@property (nonatomic) BOOL renderSuspended;\n\n@end\n\n@implementation SCManagedCapturePreviewLayerController {\n    SCManagedCapturePreviewView *_view;\n    CGSize _drawableSize;\n    SCQueuePerformer *_performer;\n    FBKVOController *_renderingKVO;\n#if !TARGET_IPHONE_SIMULATOR\n    CAMetalLayer *_metalLayer;\n    id<MTLCommandQueue> _commandQueue;\n    id<MTLRenderPipelineState> _renderPipelineState;\n    CVMetalTextureCacheRef _textureCache;\n    dispatch_semaphore_t _commandBufferSemaphore;\n    // If the current view contains an outdated display (or any display)\n    BOOL _containOutdatedPreview;\n    // If we called empty outdated display already, but for some reason, hasn't emptied it yet.\n    BOOL _requireToFlushOutdatedPreview;\n    NSMutableSet *_tokenSet;\n    NSUInteger _cannotAcquireDrawable;\n#endif\n}\n\n+ (instancetype)sharedInstance\n{\n    static dispatch_once_t onceToken;\n    static SCManagedCapturePreviewLayerController *managedCapturePreviewLayerController;\n    dispatch_once(&onceToken, ^{\n        managedCapturePreviewLayerController = [[SCManagedCapturePreviewLayerController alloc] init];\n    });\n    return managedCapturePreviewLayerController;\n}\n\n- (instancetype)init\n{\n    self = [super init];\n    if (self) {\n#if !TARGET_IPHONE_SIMULATOR\n        // We only allow one renders at a time (Sorry, no double / triple buffering).\n        // It has to be created early here, otherwise integrity of other parts of the code is not\n        // guaranteed.\n        // TODO: I need to reason more about the initialization sequence.\n        _commandBufferSemaphore = dispatch_semaphore_create(1);\n        // Set _renderSuspended to be YES so that we won't render until it is fully setup.\n        _renderSuspended = YES;\n        _tokenSet = [NSMutableSet set];\n#endif\n        // If the screen is less than default size, we should fallback.\n        CGFloat nativeScale = [UIScreen mainScreen].nativeScale;\n        CGSize screenSize = [UIScreen mainScreen].fixedCoordinateSpace.bounds.size;\n        CGSize renderSize = [SCDeviceName isIphoneX] ? kSCManagedCapturePreviewRenderSize1080p\n                                                     : kSCManagedCapturePreviewDefaultRenderSize;\n        if (screenSize.width * nativeScale < renderSize.width) {\n            _drawableSize = CGSizeMake(screenSize.width * nativeScale, screenSize.height * nativeScale);\n        } else {\n            _drawableSize = SCSizeIntegral(\n                SCSizeCropToAspectRatio(renderSize, SCSizeGetAspectRatio(SCManagedCapturerAllScreenSize())));\n        }\n        _performer = [[SCQueuePerformer alloc] initWithLabel:\"SCManagedCapturePreviewLayerController\"\n                                            qualityOfService:QOS_CLASS_USER_INITIATED\n                                                   queueType:DISPATCH_QUEUE_SERIAL\n                                                     context:SCQueuePerformerContextCoreCamera];\n\n        _renderingKVO = [[FBKVOController alloc] initWithObserver:self];\n        [_renderingKVO observe:self\n                       keyPath:@keypath(self, renderSuspended)\n                       options:NSKeyValueObservingOptionNew | NSKeyValueObservingOptionOld\n                         block:^(id observer, id object, NSDictionary *change) {\n                             BOOL oldValue = [change[NSKeyValueChangeOldKey] boolValue];\n                             BOOL newValue = [change[NSKeyValueChangeNewKey] boolValue];\n                             if (oldValue != newValue) {\n                                 [[_delegate blackCameraDetectorForManagedCapturePreviewLayerController:self]\n                                     capturePreviewDidBecomeVisible:!newValue];\n                             }\n                         }];\n    }\n    return self;\n}\n\n- (void)pause\n{\n#if !TARGET_IPHONE_SIMULATOR\n    SCTraceStart();\n    SCLogPreviewLayerInfo(@\"pause Metal rendering performer waiting\");\n    [_performer performAndWait:^() {\n        self.renderSuspended = YES;\n    }];\n    SCLogPreviewLayerInfo(@\"pause Metal rendering performer finished\");\n#endif\n}\n\n- (void)resume\n{\n#if !TARGET_IPHONE_SIMULATOR\n    SCTraceStart();\n    SCLogPreviewLayerInfo(@\"resume Metal rendering performer waiting\");\n    [_performer performAndWait:^() {\n        self.renderSuspended = NO;\n    }];\n    SCLogPreviewLayerInfo(@\"resume Metal rendering performer finished\");\n#endif\n}\n\n- (void)setupPreviewLayer\n{\n#if !TARGET_IPHONE_SIMULATOR\n    SCTraceStart();\n    SCAssertMainThread();\n    SC_GUARD_ELSE_RETURN(SCDeviceSupportsMetal());\n\n    if (!_metalLayer) {\n        _metalLayer = [CAMetalLayer new];\n        SCLogPreviewLayerInfo(@\"setup metalLayer:%@\", _metalLayer);\n\n        if (!_view) {\n            // Create capture preview view and setup the metal layer\n            [self view];\n        } else {\n            [_view setupMetalLayer:_metalLayer];\n        }\n    }\n#endif\n}\n\n- (UIView *)newStandInViewWithRect:(CGRect)rect\n{\n    return [self.view resizableSnapshotViewFromRect:rect afterScreenUpdates:YES withCapInsets:UIEdgeInsetsZero];\n}\n\n- (void)setupRenderPipeline\n{\n#if !TARGET_IPHONE_SIMULATOR\n    SCTraceStart();\n    SC_GUARD_ELSE_RETURN(SCDeviceSupportsMetal());\n    SCAssertNotMainThread();\n    id<MTLDevice> device = SCGetManagedCaptureMetalDevice();\n    id<MTLLibrary> shaderLibrary = [device newDefaultLibrary];\n    _commandQueue = [device newCommandQueue];\n    MTLRenderPipelineDescriptor *renderPipelineDescriptor = [MTLRenderPipelineDescriptor new];\n    renderPipelineDescriptor.colorAttachments[0].pixelFormat = MTLPixelFormatBGRA8Unorm;\n    renderPipelineDescriptor.vertexFunction = [shaderLibrary newFunctionWithName:@\"yuv_vertex_reshape\"];\n    renderPipelineDescriptor.fragmentFunction = [shaderLibrary newFunctionWithName:@\"yuv_fragment_texture\"];\n    MTLVertexDescriptor *vertexDescriptor = [MTLVertexDescriptor vertexDescriptor];\n    vertexDescriptor.attributes[0].format = MTLVertexFormatFloat2; // position\n    vertexDescriptor.attributes[0].offset = 0;\n    vertexDescriptor.attributes[0].bufferIndex = 0;\n    vertexDescriptor.attributes[1].format = MTLVertexFormatFloat2; // texCoords\n    vertexDescriptor.attributes[1].offset = 2 * sizeof(float);\n    vertexDescriptor.attributes[1].bufferIndex = 0;\n    vertexDescriptor.layouts[0].stepRate = 1;\n    vertexDescriptor.layouts[0].stepFunction = MTLVertexStepFunctionPerVertex;\n    vertexDescriptor.layouts[0].stride = 4 * sizeof(float);\n    renderPipelineDescriptor.vertexDescriptor = vertexDescriptor;\n    _renderPipelineState = [device newRenderPipelineStateWithDescriptor:renderPipelineDescriptor error:nil];\n    CVMetalTextureCacheCreate(kCFAllocatorDefault, nil, device, nil, &_textureCache);\n    _metalLayer.device = device;\n    _metalLayer.drawableSize = _drawableSize;\n    _metalLayer.pixelFormat = MTLPixelFormatBGRA8Unorm;\n    _metalLayer.framebufferOnly = YES; // It is default to Yes.\n    [_performer performAndWait:^() {\n        self.renderSuspended = NO;\n    }];\n    SCLogPreviewLayerInfo(@\"did setup render pipeline\");\n#endif\n}\n\n- (UIView *)view\n{\n    SCTraceStart();\n    SCAssertMainThread();\n    if (!_view) {\n#if TARGET_IPHONE_SIMULATOR\n        _view = [[SCManagedCapturePreviewView alloc] initWithFrame:[UIScreen mainScreen].fixedCoordinateSpace.bounds\n                                                       aspectRatio:SCSizeGetAspectRatio(_drawableSize)\n                                                        metalLayer:nil];\n#else\n        _view = [[SCManagedCapturePreviewView alloc] initWithFrame:[UIScreen mainScreen].fixedCoordinateSpace.bounds\n                                                       aspectRatio:SCSizeGetAspectRatio(_drawableSize)\n                                                        metalLayer:_metalLayer];\n        SCLogPreviewLayerInfo(@\"created SCManagedCapturePreviewView:%@\", _view);\n#endif\n    }\n    return _view;\n}\n\n- (void)setManagedCapturer:(id<SCCapturer>)managedCapturer\n{\n    SCTraceStart();\n    SCLogPreviewLayerInfo(@\"setManagedCapturer:%@\", managedCapturer);\n    if (SCDeviceSupportsMetal()) {\n        [managedCapturer addSampleBufferDisplayController:self context:SCCapturerContext];\n    }\n    [managedCapturer addListener:self];\n}\n\n- (void)applicationDidEnterBackground\n{\n#if !TARGET_IPHONE_SIMULATOR\n    SCTraceStart();\n    SCAssertMainThread();\n    SC_GUARD_ELSE_RETURN(SCDeviceSupportsMetal());\n    SCLogPreviewLayerInfo(@\"applicationDidEnterBackground waiting for performer\");\n    [_performer performAndWait:^() {\n        CVMetalTextureCacheFlush(_textureCache, 0);\n        [_tokenSet removeAllObjects];\n        self.renderSuspended = YES;\n    }];\n    SCLogPreviewLayerInfo(@\"applicationDidEnterBackground signal performer finishes\");\n#endif\n}\n\n- (void)applicationWillResignActive\n{\n    SC_GUARD_ELSE_RETURN(SCDeviceSupportsMetal());\n    SCTraceStart();\n    SCAssertMainThread();\n#if !TARGET_IPHONE_SIMULATOR\n    SCLogPreviewLayerInfo(@\"pause Metal rendering\");\n    [_performer performAndWait:^() {\n        self.renderSuspended = YES;\n    }];\n#endif\n}\n\n- (void)applicationDidBecomeActive\n{\n    SC_GUARD_ELSE_RETURN(SCDeviceSupportsMetal());\n    SCTraceStart();\n    SCAssertMainThread();\n#if !TARGET_IPHONE_SIMULATOR\n    SCLogPreviewLayerInfo(@\"resume Metal rendering waiting for performer\");\n    [_performer performAndWait:^() {\n        self.renderSuspended = NO;\n    }];\n    SCLogPreviewLayerInfo(@\"resume Metal rendering performer finished\");\n#endif\n}\n\n- (void)applicationWillEnterForeground\n{\n#if !TARGET_IPHONE_SIMULATOR\n    SCTraceStart();\n    SCAssertMainThread();\n    SC_GUARD_ELSE_RETURN(SCDeviceSupportsMetal());\n    SCLogPreviewLayerInfo(@\"applicationWillEnterForeground waiting for performer\");\n    [_performer performAndWait:^() {\n        self.renderSuspended = NO;\n        if (_containOutdatedPreview && _tokenSet.count == 0) {\n            [self _flushOutdatedPreview];\n        }\n    }];\n    SCLogPreviewLayerInfo(@\"applicationWillEnterForeground performer finished\");\n#endif\n}\n\n- (NSString *)keepDisplayingOutdatedPreview\n{\n    SCTraceStart();\n    NSString *token = [NSData randomBase64EncodedStringOfLength:8];\n#if !TARGET_IPHONE_SIMULATOR\n    SCLogPreviewLayerInfo(@\"keepDisplayingOutdatedPreview waiting for performer\");\n    [_performer performAndWait:^() {\n        [_tokenSet addObject:token];\n    }];\n    SCLogPreviewLayerInfo(@\"keepDisplayingOutdatedPreview performer finished\");\n#endif\n    return token;\n}\n\n- (void)endDisplayingOutdatedPreview:(NSString *)keepToken\n{\n#if !TARGET_IPHONE_SIMULATOR\n    SC_GUARD_ELSE_RETURN(SCDeviceSupportsMetal());\n    // I simply use a lock for this. If it becomes a bottleneck, I can figure something else out.\n    SCTraceStart();\n    SCLogPreviewLayerInfo(@\"endDisplayingOutdatedPreview waiting for performer\");\n    [_performer performAndWait:^() {\n        [_tokenSet removeObject:keepToken];\n        if (_tokenSet.count == 0 && _requireToFlushOutdatedPreview && _containOutdatedPreview && !_renderSuspended) {\n            [self _flushOutdatedPreview];\n        }\n    }];\n    SCLogPreviewLayerInfo(@\"endDisplayingOutdatedPreview performer finished\");\n#endif\n}\n\n#pragma mark - SCManagedSampleBufferDisplayController\n\n- (void)enqueueSampleBuffer:(CMSampleBufferRef)sampleBuffer\n{\n#if !TARGET_IPHONE_SIMULATOR\n    // Just drop the frame if it is rendering.\n    SC_GUARD_ELSE_RUN_AND_RETURN_VALUE(dispatch_semaphore_wait(_commandBufferSemaphore, DISPATCH_TIME_NOW) == 0,\n                                       SCLogPreviewLayerInfo(@\"waiting for commandBufferSemaphore signaled\"), );\n    // Just drop the frame, simple.\n    [_performer performAndWait:^() {\n        if (_renderSuspended) {\n            SCLogGeneralInfo(@\"Preview rendering suspends and current sample buffer is dropped\");\n            dispatch_semaphore_signal(_commandBufferSemaphore);\n            return;\n        }\n        @autoreleasepool {\n            const BOOL isFirstPreviewFrame = !_containOutdatedPreview;\n            if (isFirstPreviewFrame) {\n                // Signal that we receieved the first frame (otherwise this will be YES already).\n                SCGhostToSnappableSignalDidReceiveFirstPreviewFrame();\n                sc_create_g2s_ticket_f func = [_delegate g2sTicketForManagedCapturePreviewLayerController:self];\n                SCG2SActivateManiphestTicketQueueWithTicketCreationFunction(func);\n            }\n            CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);\n\n            CVPixelBufferLockBaseAddress(imageBuffer, kCVPixelBufferLock_ReadOnly);\n            size_t pixelWidth = CVPixelBufferGetWidth(imageBuffer);\n            size_t pixelHeight = CVPixelBufferGetHeight(imageBuffer);\n            id<MTLTexture> yTexture =\n                SCMetalTextureFromPixelBuffer(imageBuffer, 0, MTLPixelFormatR8Unorm, _textureCache);\n            id<MTLTexture> cbCrTexture =\n                SCMetalTextureFromPixelBuffer(imageBuffer, 1, MTLPixelFormatRG8Unorm, _textureCache);\n            CVPixelBufferUnlockBaseAddress(imageBuffer, kCVPixelBufferLock_ReadOnly);\n\n            SC_GUARD_ELSE_RUN_AND_RETURN(yTexture && cbCrTexture, dispatch_semaphore_signal(_commandBufferSemaphore));\n            id<MTLCommandBuffer> commandBuffer = _commandQueue.commandBuffer;\n            id<CAMetalDrawable> drawable = _metalLayer.nextDrawable;\n            if (!drawable) {\n                // Count how many times I cannot acquire drawable.\n                ++_cannotAcquireDrawable;\n                if (_cannotAcquireDrawable >= kSCMetalCannotAcquireDrawableLimit) {\n                    // Calling [_metalLayer discardContents] to flush the CAImageQueue\n                    SCLogGeneralInfo(@\"Cannot acquire drawable, reboot Metal ..\");\n                    [_metalLayer sc_secretFeature];\n                }\n                dispatch_semaphore_signal(_commandBufferSemaphore);\n                return;\n            }\n            _cannotAcquireDrawable = 0; // Reset to 0 in case we can acquire drawable.\n            MTLRenderPassDescriptor *renderPassDescriptor = [MTLRenderPassDescriptor new];\n            renderPassDescriptor.colorAttachments[0].texture = drawable.texture;\n            id<MTLRenderCommandEncoder> renderEncoder =\n                [commandBuffer renderCommandEncoderWithDescriptor:renderPassDescriptor];\n            [renderEncoder setRenderPipelineState:_renderPipelineState];\n            [renderEncoder setFragmentTexture:yTexture atIndex:0];\n            [renderEncoder setFragmentTexture:cbCrTexture atIndex:1];\n            // TODO: Prob this out of the image buffer.\n            // 90 clock-wise rotated texture coordinate.\n            // Also do aspect fill.\n            float normalizedHeight, normalizedWidth;\n            if (pixelWidth * _drawableSize.width > _drawableSize.height * pixelHeight) {\n                normalizedHeight = 1.0;\n                normalizedWidth = pixelWidth * (_drawableSize.width / pixelHeight) / _drawableSize.height;\n            } else {\n                normalizedHeight = pixelHeight * (_drawableSize.height / pixelWidth) / _drawableSize.width;\n                normalizedWidth = 1.0;\n            }\n            const float vertices[] = {\n                -normalizedHeight, -normalizedWidth, 1, 1, // lower left  -> upper right\n                normalizedHeight,  -normalizedWidth, 1, 0, // lower right -> lower right\n                -normalizedHeight, normalizedWidth,  0, 1, // upper left  -> upper left\n                normalizedHeight,  normalizedWidth,  0, 0, // upper right -> lower left\n            };\n            [renderEncoder setVertexBytes:vertices length:sizeof(vertices) atIndex:0];\n            [renderEncoder drawPrimitives:MTLPrimitiveTypeTriangleStrip vertexStart:0 vertexCount:4];\n            [renderEncoder endEncoding];\n            // I need to set a minimum duration for the drawable.\n            // There is a bug on iOS 10.3, if I present as soon as I can, I am keeping the GPU\n            // at 30fps even you swipe between views, that causes undesirable visual jarring.\n            // By set a minimum duration, even it is incrediably small (I tried 10ms, and here 60fps works),\n            // the OS seems can adjust the frame rate much better when swiping.\n            // This is an iOS 10.3 new method.\n            if ([commandBuffer respondsToSelector:@selector(presentDrawable:afterMinimumDuration:)]) {\n                [(id)commandBuffer presentDrawable:drawable afterMinimumDuration:(1.0 / 60)];\n            } else {\n                [commandBuffer presentDrawable:drawable];\n            }\n            [commandBuffer addCompletedHandler:^(id<MTLCommandBuffer> commandBuffer) {\n                dispatch_semaphore_signal(_commandBufferSemaphore);\n            }];\n            if (isFirstPreviewFrame) {\n                if ([drawable respondsToSelector:@selector(addPresentedHandler:)] &&\n                    [drawable respondsToSelector:@selector(presentedTime)]) {\n                    [(id)drawable addPresentedHandler:^(id<MTLDrawable> presentedDrawable) {\n                        SCGhostToSnappableSignalDidRenderFirstPreviewFrame([(id)presentedDrawable presentedTime]);\n                    }];\n                } else {\n                    [commandBuffer addCompletedHandler:^(id<MTLCommandBuffer> commandBuffer) {\n                        // Using CACurrentMediaTime to approximate.\n                        SCGhostToSnappableSignalDidRenderFirstPreviewFrame(CACurrentMediaTime());\n                    }];\n                }\n            }\n            // We enqueued an sample buffer to display, therefore, it contains an outdated display (to be clean up).\n            _containOutdatedPreview = YES;\n            [commandBuffer commit];\n        }\n    }];\n#endif\n}\n\n- (void)flushOutdatedPreview\n{\n    SCTraceStart();\n#if !TARGET_IPHONE_SIMULATOR\n    // This method cannot drop frames (otherwise we will have residual on the screen).\n    SCLogPreviewLayerInfo(@\"flushOutdatedPreview waiting for performer\");\n    [_performer performAndWait:^() {\n        _requireToFlushOutdatedPreview = YES;\n        SC_GUARD_ELSE_RETURN(!_renderSuspended);\n        // Have to make sure we have no token left before return.\n        SC_GUARD_ELSE_RETURN(_tokenSet.count == 0);\n        [self _flushOutdatedPreview];\n    }];\n    SCLogPreviewLayerInfo(@\"flushOutdatedPreview performer finished\");\n#endif\n}\n\n- (void)_flushOutdatedPreview\n{\n    SCTraceStart();\n    SCAssertPerformer(_performer);\n#if !TARGET_IPHONE_SIMULATOR\n    SCLogPreviewLayerInfo(@\"flushOutdatedPreview containOutdatedPreview:%d\", _containOutdatedPreview);\n    // I don't care if this has renderSuspended or not, assuming I did the right thing.\n    // Emptied, no need to do this any more on foregrounding.\n    SC_GUARD_ELSE_RETURN(_containOutdatedPreview);\n    _containOutdatedPreview = NO;\n    _requireToFlushOutdatedPreview = NO;\n    [_metalLayer sc_secretFeature];\n#endif\n}\n\n#pragma mark - SCManagedCapturerListener\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer\n    didChangeVideoPreviewLayer:(AVCaptureVideoPreviewLayer *)videoPreviewLayer\n{\n    SCTraceStart();\n    SCAssertMainThread();\n    // Force to load the view\n    [self view];\n    _view.videoPreviewLayer = videoPreviewLayer;\n    SCLogPreviewLayerInfo(@\"didChangeVideoPreviewLayer:%@\", videoPreviewLayer);\n}\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeVideoPreviewGLView:(LSAGLView *)videoPreviewGLView\n{\n    SCTraceStart();\n    SCAssertMainThread();\n    // Force to load the view\n    [self view];\n    _view.videoPreviewGLView = videoPreviewGLView;\n    SCLogPreviewLayerInfo(@\"didChangeVideoPreviewGLView:%@\", videoPreviewGLView);\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCapturePreviewView.h",
    "content": "//\n//  SCManagedCapturePreviewView.h\n//  Snapchat\n//\n//  Created by Liu Liu on 5/5/15.\n//  Copyright (c) 2015 Snapchat, Inc. All rights reserved.\n//\n\n#import <AVFoundation/AVFoundation.h>\n#import <UIKit/UIKit.h>\n\n@class LSAGLView;\n\n@interface SCManagedCapturePreviewView : UIView\n\n- (instancetype)initWithFrame:(CGRect)frame NS_UNAVAILABLE;\n\n- (instancetype)initWithFrame:(CGRect)frame aspectRatio:(CGFloat)aspectRatio metalLayer:(CALayer *)metalLayer;\n// This method is called only once in case the metalLayer is nil previously.\n- (void)setupMetalLayer:(CALayer *)metalLayer;\n\n@property (nonatomic, strong) AVCaptureVideoPreviewLayer *videoPreviewLayer;\n@property (nonatomic, strong) LSAGLView *videoPreviewGLView;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCapturePreviewView.m",
    "content": "//\n//  SCManagedCapturePreviewView.m\n//  Snapchat\n//\n//  Created by Liu Liu on 5/5/15.\n//  Copyright (c) 2015 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCManagedCapturePreviewView.h\"\n\n#import \"SCCameraTweaks.h\"\n#import \"SCManagedCapturePreviewLayerController.h\"\n#import \"SCManagedCapturePreviewViewDebugView.h\"\n#import \"SCMetalUtils.h\"\n\n#import <SCFoundation/SCCoreGraphicsUtils.h>\n#import <SCFoundation/SCLog.h>\n#import <SCFoundation/SCTrace.h>\n\n#import <Looksery/LSAGLView.h>\n\n@implementation SCManagedCapturePreviewView {\n    CGFloat _aspectRatio;\n    CALayer *_containerLayer;\n    CALayer *_metalLayer;\n    SCManagedCapturePreviewViewDebugView *_debugView;\n}\n\n- (instancetype)initWithFrame:(CGRect)frame aspectRatio:(CGFloat)aspectRatio metalLayer:(CALayer *)metalLayer\n{\n    SCTraceStart();\n    SCAssertMainThread();\n    self = [super initWithFrame:frame];\n    if (self) {\n        _aspectRatio = aspectRatio;\n        if (SCDeviceSupportsMetal()) {\n            [CATransaction begin];\n            [CATransaction setDisableActions:YES];\n            _metalLayer = metalLayer;\n            _metalLayer.frame = [self _layerFrame];\n            [self.layer insertSublayer:_metalLayer below:[self.layer sublayers][0]];\n            [CATransaction commit];\n        } else {\n            _containerLayer = [[CALayer alloc] init];\n            _containerLayer.frame = [self _layerFrame];\n            // Using a container layer such that the software zooming is happening on this layer\n            [self.layer insertSublayer:_containerLayer below:[self.layer sublayers][0]];\n        }\n        if ([self _shouldShowDebugView]) {\n            _debugView = [[SCManagedCapturePreviewViewDebugView alloc] init];\n            [self addSubview:_debugView];\n        }\n    }\n    return self;\n}\n\n- (void)_layoutVideoPreviewLayer\n{\n    SCAssertMainThread();\n    [CATransaction begin];\n    [CATransaction setDisableActions:YES];\n    if (SCDeviceSupportsMetal()) {\n        _metalLayer.frame = [self _layerFrame];\n    } else {\n        if (_videoPreviewLayer) {\n            SCLogGeneralInfo(@\"container layer frame %@, video preview layer frame %@\",\n                             NSStringFromCGRect(_containerLayer.frame), NSStringFromCGRect(_videoPreviewLayer.frame));\n        }\n        // Using bounds because we don't really care about the position at this point.\n        _containerLayer.frame = [self _layerFrame];\n        _videoPreviewLayer.frame = _containerLayer.bounds;\n        _videoPreviewLayer.position =\n            CGPointMake(CGRectGetWidth(_containerLayer.bounds) * 0.5, CGRectGetHeight(_containerLayer.bounds) * 0.5);\n    }\n    [CATransaction commit];\n}\n\n- (void)_layoutVideoPreviewGLView\n{\n    SCCAssertMainThread();\n    _videoPreviewGLView.frame = [self _layerFrame];\n}\n\n- (CGRect)_layerFrame\n{\n    CGRect frame = SCRectMakeWithCenterAndSize(\n        SCRectGetMid(self.bounds), SCSizeIntegral(SCSizeExpandToAspectRatio(self.bounds.size, _aspectRatio)));\n\n    CGFloat x = frame.origin.x;\n    x = isnan(x) ? 0.0 : (isfinite(x) ? x : INFINITY);\n\n    CGFloat y = frame.origin.y;\n    y = isnan(y) ? 0.0 : (isfinite(y) ? y : INFINITY);\n\n    CGFloat width = frame.size.width;\n    width = isnan(width) ? 0.0 : (isfinite(width) ? width : INFINITY);\n\n    CGFloat height = frame.size.height;\n    height = isnan(height) ? 0.0 : (isfinite(height) ? height : INFINITY);\n\n    return CGRectMake(x, y, width, height);\n}\n\n- (void)setVideoPreviewLayer:(AVCaptureVideoPreviewLayer *)videoPreviewLayer\n{\n    SCAssertMainThread();\n    if (_videoPreviewLayer != videoPreviewLayer) {\n        [_videoPreviewLayer removeFromSuperlayer];\n        _videoPreviewLayer = videoPreviewLayer;\n        [_containerLayer addSublayer:_videoPreviewLayer];\n        [self _layoutVideoPreviewLayer];\n    }\n}\n\n- (void)setupMetalLayer:(CALayer *)metalLayer\n{\n    SCAssert(!_metalLayer, @\"_metalLayer should be nil.\");\n    SCAssert(metalLayer, @\"metalLayer must exists.\");\n    SCAssertMainThread();\n    _metalLayer = metalLayer;\n    [self.layer insertSublayer:_metalLayer below:[self.layer sublayers][0]];\n    [self _layoutVideoPreviewLayer];\n}\n\n- (void)setVideoPreviewGLView:(LSAGLView *)videoPreviewGLView\n{\n    SCAssertMainThread();\n    if (_videoPreviewGLView != videoPreviewGLView) {\n        [_videoPreviewGLView removeFromSuperview];\n        _videoPreviewGLView = videoPreviewGLView;\n        [self addSubview:_videoPreviewGLView];\n        [self _layoutVideoPreviewGLView];\n    }\n}\n\n#pragma mark - Overridden methods\n\n- (void)layoutSubviews\n{\n    SCAssertMainThread();\n    [super layoutSubviews];\n    [self _layoutVideoPreviewLayer];\n    [self _layoutVideoPreviewGLView];\n    [self _layoutDebugViewIfNeeded];\n}\n\n- (void)setHidden:(BOOL)hidden\n{\n    SCAssertMainThread();\n    [super setHidden:hidden];\n    if (hidden) {\n        SCLogGeneralInfo(@\"[SCManagedCapturePreviewView] - isHidden is being set to YES\");\n    }\n}\n\n#pragma mark - Debug View\n\n- (BOOL)_shouldShowDebugView\n{\n    // Only show debug view in internal builds and tweak settings are turned on.\n    return SCIsInternalBuild() &&\n           (SCCameraTweaksEnableFocusPointObservation() || SCCameraTweaksEnableExposurePointObservation());\n}\n\n- (void)_layoutDebugViewIfNeeded\n{\n    SCAssertMainThread();\n    SC_GUARD_ELSE_RETURN([self _shouldShowDebugView]);\n    _debugView.frame = self.bounds;\n    [self bringSubviewToFront:_debugView];\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCapturePreviewViewDebugView.h",
    "content": "//\n//  SCManagedCapturePreviewViewDebugView.h\n//  Snapchat\n//\n//  Created by Jiyang Zhu on 1/19/18.\n//  Copyright © 2018 Snapchat, Inc. All rights reserved.\n//\n\n#import <AVFoundation/AVFoundation.h>\n#import <UIKit/UIKit.h>\n\n@interface SCManagedCapturePreviewViewDebugView : UIView\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCapturePreviewViewDebugView.m",
    "content": "//\n//  SCManagedCapturePreviewViewDebugView.m\n//  Snapchat\n//\n//  Created by Jiyang Zhu on 1/19/18.\n//  Copyright © 2018 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCManagedCapturePreviewViewDebugView.h\"\n\n#import \"SCManagedCapturer.h\"\n#import \"SCManagedCapturerListener.h\"\n\n#import <SCFoundation/SCAssertWrapper.h>\n#import <SCFoundation/SCThreadHelpers.h>\n#import <SCFoundation/UIFont+AvenirNext.h>\n\n@import CoreText;\n\nstatic CGFloat const kSCManagedCapturePreviewViewDebugViewCrossHairLineWidth = 1.0;\nstatic CGFloat const kSCManagedCapturePreviewViewDebugViewCrossHairWidth = 20.0;\n\n@interface SCManagedCapturePreviewViewDebugView () <SCManagedCapturerListener>\n\n@property (assign, nonatomic) CGPoint focusPoint;\n@property (assign, nonatomic) CGPoint exposurePoint;\n@property (strong, nonatomic) NSDictionary<NSNumber *, NSValue *> *faceBoundsByFaceID;\n\n@end\n\n@implementation SCManagedCapturePreviewViewDebugView\n\n- (instancetype)initWithFrame:(CGRect)frame\n{\n    self = [super initWithFrame:frame];\n    if (self) {\n        self.userInteractionEnabled = NO;\n        self.backgroundColor = [UIColor clearColor];\n        _focusPoint = [self _convertPointOfInterest:CGPointMake(0.5, 0.5)];\n        _exposurePoint = [self _convertPointOfInterest:CGPointMake(0.5, 0.5)];\n        [[SCManagedCapturer sharedInstance] addListener:self];\n    }\n    return self;\n}\n\n- (void)drawRect:(CGRect)rect\n{\n    CGContextRef context = UIGraphicsGetCurrentContext();\n\n    if (self.focusPoint.x > 0 || self.focusPoint.y > 0) {\n        [self _drawCrossHairAtPoint:self.focusPoint inContext:context withColor:[UIColor greenColor] isXShaped:YES];\n    }\n\n    if (self.exposurePoint.x > 0 || self.exposurePoint.y > 0) {\n        [self _drawCrossHairAtPoint:self.exposurePoint inContext:context withColor:[UIColor yellowColor] isXShaped:NO];\n    }\n\n    if (self.faceBoundsByFaceID.count > 0) {\n        [self.faceBoundsByFaceID\n            enumerateKeysAndObjectsUsingBlock:^(NSNumber *_Nonnull key, NSValue *_Nonnull obj, BOOL *_Nonnull stop) {\n                CGRect faceRect = [obj CGRectValue];\n                NSInteger faceID = [key integerValue];\n                [self _drawRectangle:faceRect\n                                text:[NSString sc_stringWithFormat:@\"ID: %@\", key]\n                           inContext:context\n                           withColor:[UIColor colorWithRed:((faceID % 3) == 0)\n                                                     green:((faceID % 3) == 1)\n                                                      blue:((faceID % 3) == 2)\n                                                     alpha:1.0]];\n            }];\n    }\n}\n\n- (void)dealloc\n{\n    [[SCManagedCapturer sharedInstance] removeListener:self];\n}\n\n/**\n Draw a crosshair with center point, context, color and shape.\n\n @param isXShaped \"X\" or \"+\"\n */\n- (void)_drawCrossHairAtPoint:(CGPoint)center\n                    inContext:(CGContextRef)context\n                    withColor:(UIColor *)color\n                    isXShaped:(BOOL)isXShaped\n{\n    CGFloat width = kSCManagedCapturePreviewViewDebugViewCrossHairWidth;\n\n    CGContextSetStrokeColorWithColor(context, color.CGColor);\n    CGContextSetLineWidth(context, kSCManagedCapturePreviewViewDebugViewCrossHairLineWidth);\n    CGContextBeginPath(context);\n\n    if (isXShaped) {\n        CGContextMoveToPoint(context, center.x - width / 2, center.y - width / 2);\n        CGContextAddLineToPoint(context, center.x + width / 2, center.y + width / 2);\n        CGContextMoveToPoint(context, center.x + width / 2, center.y - width / 2);\n        CGContextAddLineToPoint(context, center.x - width / 2, center.y + width / 2);\n    } else {\n        CGContextMoveToPoint(context, center.x - width / 2, center.y);\n        CGContextAddLineToPoint(context, center.x + width / 2, center.y);\n        CGContextMoveToPoint(context, center.x, center.y - width / 2);\n        CGContextAddLineToPoint(context, center.x, center.y + width / 2);\n    }\n\n    CGContextStrokePath(context);\n}\n\n/**\n Draw a rectangle, with a text on the top left.\n */\n- (void)_drawRectangle:(CGRect)rect text:(NSString *)text inContext:(CGContextRef)context withColor:(UIColor *)color\n{\n    CGContextSetStrokeColorWithColor(context, color.CGColor);\n    CGContextSetLineWidth(context, kSCManagedCapturePreviewViewDebugViewCrossHairLineWidth);\n    CGContextBeginPath(context);\n\n    CGContextMoveToPoint(context, CGRectGetMinX(rect), CGRectGetMinY(rect));\n    CGContextAddLineToPoint(context, CGRectGetMinX(rect), CGRectGetMaxY(rect));\n    CGContextAddLineToPoint(context, CGRectGetMaxX(rect), CGRectGetMaxY(rect));\n    CGContextAddLineToPoint(context, CGRectGetMaxX(rect), CGRectGetMinY(rect));\n    CGContextAddLineToPoint(context, CGRectGetMinX(rect), CGRectGetMinY(rect));\n\n    NSMutableParagraphStyle *textStyle = [[NSMutableParagraphStyle alloc] init];\n    textStyle.alignment = NSTextAlignmentLeft;\n    NSDictionary *attributes = @{\n        NSFontAttributeName : [UIFont boldSystemFontOfSize:16],\n        NSForegroundColorAttributeName : color,\n        NSParagraphStyleAttributeName : textStyle\n    };\n\n    [text drawInRect:rect withAttributes:attributes];\n\n    CGContextStrokePath(context);\n}\n\n- (CGPoint)_convertPointOfInterest:(CGPoint)point\n{\n    SCAssertMainThread();\n    CGPoint convertedPoint =\n        CGPointMake((1 - point.y) * CGRectGetWidth(self.bounds), point.x * CGRectGetHeight(self.bounds));\n    if ([[SCManagedCapturer sharedInstance] isVideoMirrored]) {\n        convertedPoint.x = CGRectGetWidth(self.bounds) - convertedPoint.x;\n    }\n    return convertedPoint;\n}\n\n- (NSDictionary<NSNumber *, NSValue *> *)_convertFaceBounds:(NSDictionary<NSNumber *, NSValue *> *)faceBoundsByFaceID\n{\n    SCAssertMainThread();\n    NSMutableDictionary<NSNumber *, NSValue *> *convertedFaceBoundsByFaceID =\n        [NSMutableDictionary dictionaryWithCapacity:faceBoundsByFaceID.count];\n    for (NSNumber *key in faceBoundsByFaceID.allKeys) {\n        CGRect faceBounds = [[faceBoundsByFaceID objectForKey:key] CGRectValue];\n        CGRect convertedBounds = CGRectMake(CGRectGetMinY(faceBounds) * CGRectGetWidth(self.bounds),\n                                            CGRectGetMinX(faceBounds) * CGRectGetHeight(self.bounds),\n                                            CGRectGetHeight(faceBounds) * CGRectGetWidth(self.bounds),\n                                            CGRectGetWidth(faceBounds) * CGRectGetHeight(self.bounds));\n        if (![[SCManagedCapturer sharedInstance] isVideoMirrored]) {\n            convertedBounds.origin.x = CGRectGetWidth(self.bounds) - CGRectGetMaxX(convertedBounds);\n        }\n        [convertedFaceBoundsByFaceID setObject:[NSValue valueWithCGRect:convertedBounds] forKey:key];\n    }\n    return convertedFaceBoundsByFaceID;\n}\n\n#pragma mark - SCManagedCapturerListener\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeExposurePoint:(CGPoint)exposurePoint\n{\n    runOnMainThreadAsynchronouslyIfNecessary(^{\n        self.exposurePoint = [self _convertPointOfInterest:exposurePoint];\n        [self setNeedsDisplay];\n    });\n}\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeFocusPoint:(CGPoint)focusPoint\n{\n    runOnMainThreadAsynchronouslyIfNecessary(^{\n        self.focusPoint = [self _convertPointOfInterest:focusPoint];\n        [self setNeedsDisplay];\n    });\n}\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer\n    didDetectFaceBounds:(NSDictionary<NSNumber *, NSValue *> *)faceBoundsByFaceID\n{\n    runOnMainThreadAsynchronouslyIfNecessary(^{\n        self.faceBoundsByFaceID = [self _convertFaceBounds:faceBoundsByFaceID];\n        [self setNeedsDisplay];\n    });\n}\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeCaptureDevicePosition:(SCManagedCapturerState *)state\n{\n    runOnMainThreadAsynchronouslyIfNecessary(^{\n        self.faceBoundsByFaceID = nil;\n        self.focusPoint = [self _convertPointOfInterest:CGPointMake(0.5, 0.5)];\n        self.exposurePoint = [self _convertPointOfInterest:CGPointMake(0.5, 0.5)];\n        [self setNeedsDisplay];\n    });\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCaptureSession.h",
    "content": "//\n//  SCManagedCaptureSession.h\n//  Snapchat\n//\n//  Created by Derek Wang on 02/03/2018.\n//\n\n#import <SCBase/SCMacros.h>\n\n#import <AVFoundation/AVFoundation.h>\n#import <Foundation/Foundation.h>\n\n/**\n `SCManagedCaptureSession` is a wrapper class of `AVCaptureSession`. The purpose of this class is to provide additional\n functionalities to `AVCaptureSession`.\n For example, for black camera detection, we need to monitor when some method is called. Another example is that we can\n treat it as a more stable version of `AVCaptureSession` by moving some `AVCaptureSession` fixing logic to this class,\n and it provides reliable interfaces to the outside. That would be the next step.\n It also tries to mimic the `AVCaptureSession` by implmenting some methods in `AVCaptureSession`. The original methods\n in `AVCaptureSession` should not be used anymore\n */\n\n@class SCBlackCameraDetector;\n\nNS_ASSUME_NONNULL_BEGIN\n@interface SCManagedCaptureSession : NSObject\n\n/**\n Expose avSession property\n */\n@property (nonatomic, strong, readonly) AVCaptureSession *avSession;\n\n/**\n Expose avSession isRunning property for convenience.\n */\n@property (nonatomic, readonly, assign) BOOL isRunning;\n\n/**\n Wrap [AVCaptureSession startRunning] method. Monitor startRunning method. [AVCaptureSession startRunning] should not be\n called\n */\n- (void)startRunning;\n/**\n Wrap [AVCaptureSession stopRunning] method. Monitor stopRunning method. [AVCaptureSession stopRunning] should not be\n called\n */\n- (void)stopRunning;\n\n/**\n Wrap [AVCaptureSession beginConfiguration]. Monitor beginConfiguration method\n */\n- (void)beginConfiguration;\n/**\n Wrap [AVCaptureSession commitConfiguration]. Monitor commitConfiguration method\n */\n- (void)commitConfiguration;\n/**\n Configurate internal AVCaptureSession with block\n @params block. configuration block with AVCaptureSession as parameter\n */\n- (void)performConfiguration:(void (^)(void))block;\n\n- (instancetype)initWithBlackCameraDetector:(SCBlackCameraDetector *)detector NS_DESIGNATED_INITIALIZER;\nSC_INIT_AND_NEW_UNAVAILABLE\n\n@end\nNS_ASSUME_NONNULL_END\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCaptureSession.m",
    "content": "//\n//  SCManagedCaptureSession.m\n//  Snapchat\n//\n//  Created by Derek Wang on 02/03/2018.\n//\n\n#import \"SCManagedCaptureSession.h\"\n\n#import \"SCBlackCameraDetector.h\"\n\n#import <SCFoundation/SCTraceODPCompatible.h>\n\n@interface SCManagedCaptureSession () {\n    SCBlackCameraDetector *_blackCameraDetector;\n}\n\n@end\n\n@implementation SCManagedCaptureSession\n\n- (instancetype)initWithBlackCameraDetector:(SCBlackCameraDetector *)detector\n{\n    self = [super init];\n    if (self) {\n        _avSession = [[AVCaptureSession alloc] init];\n        _blackCameraDetector = detector;\n    }\n    return self;\n}\n\n- (void)startRunning\n{\n    SCTraceODPCompatibleStart(2);\n    [_blackCameraDetector sessionWillCallStartRunning];\n    [_avSession startRunning];\n    [_blackCameraDetector sessionDidCallStartRunning];\n}\n\n- (void)stopRunning\n{\n    SCTraceODPCompatibleStart(2);\n    [_blackCameraDetector sessionWillCallStopRunning];\n    [_avSession stopRunning];\n    [_blackCameraDetector sessionDidCallStopRunning];\n}\n\n- (void)performConfiguration:(nonnull void (^)(void))block\n{\n    SC_GUARD_ELSE_RETURN(block);\n    [self beginConfiguration];\n    block();\n    [self commitConfiguration];\n}\n\n- (void)beginConfiguration\n{\n    [_avSession beginConfiguration];\n}\n\n- (void)commitConfiguration\n{\n    SCTraceODPCompatibleStart(2);\n    [_blackCameraDetector sessionWillCommitConfiguration];\n    [_avSession commitConfiguration];\n    [_blackCameraDetector sessionDidCommitConfiguration];\n}\n\n- (BOOL)isRunning\n{\n    return _avSession.isRunning;\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCapturer.h",
    "content": "//  SCManagedCapturer.h\n//  Snapchat\n//\n//  Created by Liu Liu on 4/20/15.\n\n#import \"SCCapturer.h\"\n#import \"SCManagedCapturerListener.h\"\n#import \"SCManagedCapturerUtils.h\"\n\n#import <Foundation/Foundation.h>\n\n/*\n  SCManagedCapturer is a shell class. Its job is to provide an singleton instance which follows protocol of\n  SCManagedCapturerImpl. The reason we use this pattern is because we are building SCManagedCapturerV2. This setup\n  offers\n  possbility for us to code V2 without breaking the existing app, and can test the new implementation via Tweak.\n */\n\n@interface SCManagedCapturer : NSObject\n\n+ (id<SCCapturer>)sharedInstance;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCapturer.m",
    "content": "//\n//  SCManagedCapturer.m\n//  Snapchat\n//\n//  Created by Lin Jia on 9/28/17.\n//\n\n#import \"SCManagedCapturer.h\"\n\n#import \"SCCameraTweaks.h\"\n#import \"SCCaptureCore.h\"\n#import \"SCManagedCapturerV1.h\"\n\n@implementation SCManagedCapturer\n\n+ (id<SCCapturer>)sharedInstance\n{\n    static dispatch_once_t onceToken;\n    static id<SCCapturer> managedCapturer;\n    dispatch_once(&onceToken, ^{\n        managedCapturer = [[SCCaptureCore alloc] init];\n    });\n    return managedCapturer;\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCapturerARSessionHandler.h",
    "content": "//\n//  SCManagedCapturerARSessionHandler.h\n//  Snapchat\n//\n//  Created by Xiaokang Liu on 16/03/2018.\n//\n// This class is used to handle the AVCaptureSession event when ARSession is enabled.\n// The stopARSessionRunning will be blocked till the AVCaptureSessionDidStopRunningNotification event has been received\n// successfully,\n// after then we can restart AVCaptureSession gracefully.\n\n#import <SCBase/SCMacros.h>\n\n#import <Foundation/Foundation.h>\n\n@class SCCaptureResource;\n\n@interface SCManagedCapturerARSessionHandler : NSObject\n\nSC_INIT_AND_NEW_UNAVAILABLE\n- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource NS_DESIGNATED_INITIALIZER;\n\n- (void)stopObserving;\n\n- (void)stopARSessionRunning NS_AVAILABLE_IOS(11_0);\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCapturerARSessionHandler.m",
    "content": "//\n//  SCManagedCapturerARSessionHandler.m\n//  Snapchat\n//\n//  Created by Xiaokang Liu on 16/03/2018.\n//\n\n#import \"SCManagedCapturerARSessionHandler.h\"\n\n#import \"SCCaptureResource.h\"\n#import \"SCManagedCaptureSession.h\"\n\n#import <SCBase/SCAvailability.h>\n#import <SCFoundation/SCAssertWrapper.h>\n#import <SCFoundation/SCQueuePerformer.h>\n\n@import ARKit;\n\nstatic CGFloat const kSCManagedCapturerARKitShutdownTimeoutDuration = 2;\n\n@interface SCManagedCapturerARSessionHandler () {\n    SCCaptureResource *__weak _captureResource;\n    dispatch_semaphore_t _arSesssionShutdownSemaphore;\n}\n\n@end\n\n@implementation SCManagedCapturerARSessionHandler\n\n- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource\n{\n    self = [super init];\n    if (self) {\n        SCAssert(captureResource, @\"\");\n        _captureResource = captureResource;\n        _arSesssionShutdownSemaphore = dispatch_semaphore_create(0);\n    }\n    return self;\n}\n\n- (void)stopObserving\n{\n    [[NSNotificationCenter defaultCenter] removeObserver:self\n                                                    name:AVCaptureSessionDidStopRunningNotification\n                                                  object:nil];\n}\n\n- (void)stopARSessionRunning\n{\n    SCAssertPerformer(_captureResource.queuePerformer);\n    SCAssert(SC_AT_LEAST_IOS_11, @\"Shoule be only call from iOS 11+\");\n    if (@available(iOS 11.0, *)) {\n        // ARSession stops its internal AVCaptureSession asynchronously. We listen for its callback and actually restart\n        // our own capture session once it's finished shutting down so the two ARSessions don't conflict.\n        [[NSNotificationCenter defaultCenter] addObserver:self\n                                                 selector:@selector(_completeARSessionShutdown:)\n                                                     name:AVCaptureSessionDidStopRunningNotification\n                                                   object:nil];\n        [_captureResource.arSession pause];\n        dispatch_semaphore_wait(\n            _arSesssionShutdownSemaphore,\n            dispatch_time(DISPATCH_TIME_NOW, (int64_t)(kSCManagedCapturerARKitShutdownTimeoutDuration * NSEC_PER_SEC)));\n    }\n}\n\n- (void)_completeARSessionShutdown:(NSNotification *)note\n{\n    // This notification is only registered for IMMEDIATELY before arkit shutdown.\n    // Explicitly guard that the notification object IS NOT the main session's.\n    SC_GUARD_ELSE_RETURN(![note.object isEqual:_captureResource.managedSession.avSession]);\n    [[NSNotificationCenter defaultCenter] removeObserver:self\n                                                    name:AVCaptureSessionDidStopRunningNotification\n                                                  object:nil];\n    dispatch_semaphore_signal(_arSesssionShutdownSemaphore);\n}\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCapturerListener.h",
    "content": "//#!announcer.rb\n//\n//  SCManagedCaptuerListener\n//  Snapchat\n//\n//  Created by Liu Liu on 4/23/15.\n//  Copyright (c) 2015 Liu Liu. All rights reserved.\n//\n\n#import \"SCCapturer.h\"\n#import \"SCManagedCaptureDevice.h\"\n#import \"SCManagedRecordedVideo.h\"\n#import \"SCVideoCaptureSessionInfo.h\"\n\n#import <SCFoundation/SCFuture.h>\n\n#import <AVFoundation/AVFoundation.h>\n#import <Foundation/Foundation.h>\n\n@class SCManagedCapturer;\n@class SCManagedCapturerState;\n@class LSAGLView;\n@class SCManagedCapturerSampleMetadata;\n\n@protocol SCManagedCapturerListener <NSObject>\n\n@optional\n\n// All these calbacks are invoked on main queue\n\n// Start / stop / reset\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer didStartRunning:(SCManagedCapturerState *)state;\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer didStopRunning:(SCManagedCapturerState *)state;\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer didResetFromRuntimeError:(SCManagedCapturerState *)state;\n\n// Change state methods\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeState:(SCManagedCapturerState *)state;\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeNightModeActive:(SCManagedCapturerState *)state;\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangePortraitModeActive:(SCManagedCapturerState *)state;\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeFlashActive:(SCManagedCapturerState *)state;\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeLensesActive:(SCManagedCapturerState *)state;\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeARSessionActive:(SCManagedCapturerState *)state;\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer\n    didChangeFlashSupportedAndTorchSupported:(SCManagedCapturerState *)state;\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeZoomFactor:(SCManagedCapturerState *)state;\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeLowLightCondition:(SCManagedCapturerState *)state;\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeAdjustingExposure:(SCManagedCapturerState *)state;\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeCaptureDevicePosition:(SCManagedCapturerState *)state;\n\n// The video preview layer is not maintained as a state, therefore, its change is not related to the state of\n// the camera at all, listener show only manage the setup of the videoPreviewLayer.\n// Since the AVCaptureVideoPreviewLayer can only attach to one AVCaptureSession per app, it is recommended you\n// have a view and controller which manages the video preview layer, and for upper layer, only manage that view\n// or view controller, which maintains the pointer consistency. The video preview layer is required to recreate\n// every now and then because otherwise we will have cases that the old video preview layer may contain\n// residual images.\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer\n    didChangeVideoPreviewLayer:(AVCaptureVideoPreviewLayer *)videoPreviewLayer;\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeVideoPreviewGLView:(LSAGLView *)videoPreviewGLView;\n\n// Video recording-related methods\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer\n didBeginVideoRecording:(SCManagedCapturerState *)state\n                session:(SCVideoCaptureSessionInfo)session;\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer\n didBeginAudioRecording:(SCManagedCapturerState *)state\n                session:(SCVideoCaptureSessionInfo)session;\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer\n    willFinishRecording:(SCManagedCapturerState *)state\n                session:(SCVideoCaptureSessionInfo)session\n    recordedVideoFuture:(SCFuture<id<SCManagedRecordedVideo>> *)recordedVideoFuture\n              videoSize:(CGSize)videoSize\n       placeholderImage:(UIImage *)placeholderImage;\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer\n     didFinishRecording:(SCManagedCapturerState *)state\n                session:(SCVideoCaptureSessionInfo)session\n          recordedVideo:(SCManagedRecordedVideo *)recordedVideo;\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer\n       didFailRecording:(SCManagedCapturerState *)state\n                session:(SCVideoCaptureSessionInfo)session\n                  error:(NSError *)error;\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer\n     didCancelRecording:(SCManagedCapturerState *)state\n                session:(SCVideoCaptureSessionInfo)session;\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer\n            didGetError:(NSError *)error\n                forType:(SCManagedVideoCapturerInfoType)type\n                session:(SCVideoCaptureSessionInfo)session;\n\n- (void)managedCapturerDidCallLenseResume:(id<SCCapturer>)managedCapturer session:(SCVideoCaptureSessionInfo)session;\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer\n    didAppendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer\n                sampleMetadata:(SCManagedCapturerSampleMetadata *)sampleMetadata;\n\n// Photo methods\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer\n       willCapturePhoto:(SCManagedCapturerState *)state\n         sampleMetadata:(SCManagedCapturerSampleMetadata *)sampleMetadata;\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer didCapturePhoto:(SCManagedCapturerState *)state;\n\n- (BOOL)managedCapturer:(id<SCCapturer>)managedCapturer isUnderDeviceMotion:(SCManagedCapturerState *)state;\n\n- (BOOL)managedCapturer:(id<SCCapturer>)managedCapturer shouldProcessFileInput:(SCManagedCapturerState *)state;\n\n// Face detection\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer\n    didDetectFaceBounds:(NSDictionary<NSNumber *, NSValue *> *)faceBoundsByFaceID;\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeExposurePoint:(CGPoint)exposurePoint;\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeFocusPoint:(CGPoint)focusPoint;\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCapturerListenerAnnouncer.h",
    "content": "// Generated by the announcer.rb  DO NOT EDIT!!\n\n#import \"SCManagedCapturerListener.h\"\n\n#import <Foundation/Foundation.h>\n\n@interface SCManagedCapturerListenerAnnouncer : NSObject <SCManagedCapturerListener>\n\n- (BOOL)addListener:(id<SCManagedCapturerListener>)listener;\n- (void)removeListener:(id<SCManagedCapturerListener>)listener;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCapturerListenerAnnouncer.mm",
    "content": "// Generated by the announcer.rb  DO NOT EDIT!!\n\n#import \"SCManagedCapturerListenerAnnouncer.h\"\n\n#include <mutex>\nusing std::lock_guard;\nusing std::mutex;\n#include <vector>\nusing std::find;\nusing std::make_shared;\nusing std::shared_ptr;\nusing std::vector;\n\n@implementation SCManagedCapturerListenerAnnouncer {\n    mutex _mutex;\n    shared_ptr<vector<__weak id<SCManagedCapturerListener>>> _listeners;\n}\n\n- (NSString *)description\n{\n    auto listeners = atomic_load(&self->_listeners);\n    NSMutableString *desc = [NSMutableString string];\n    [desc appendFormat:@\"<SCManagedCapturerListenerAnnouncer %p>: [\", self];\n    for (int i = 0; i < listeners->size(); ++i) {\n        [desc appendFormat:@\"%@\", (*listeners)[i]];\n        if (i != listeners->size() - 1) {\n            [desc appendString:@\", \"];\n        }\n    }\n    [desc appendString:@\"]\"];\n    return desc;\n}\n\n- (BOOL)addListener:(id<SCManagedCapturerListener>)listener\n{\n    lock_guard<mutex> lock(_mutex);\n    auto listeners = make_shared<vector<__weak id<SCManagedCapturerListener>>>();\n    if (_listeners != nil) {\n        // The listener we want to add already exists\n        if (find(_listeners->begin(), _listeners->end(), listener) != _listeners->end()) {\n            return NO;\n        }\n        for (auto &one : *_listeners) {\n            if (one != nil) {\n                listeners->push_back(one);\n            }\n        }\n        listeners->push_back(listener);\n        atomic_store(&self->_listeners, listeners);\n    } else {\n        listeners->push_back(listener);\n        atomic_store(&self->_listeners, listeners);\n    }\n    return YES;\n}\n\n- (void)removeListener:(id<SCManagedCapturerListener>)listener\n{\n    lock_guard<mutex> lock(_mutex);\n    if (_listeners == nil) {\n        return;\n    }\n    // If the only item in the listener list is the one we want to remove, store it back to nil again\n    if (_listeners->size() == 1 && (*_listeners)[0] == listener) {\n        atomic_store(&self->_listeners, shared_ptr<vector<__weak id<SCManagedCapturerListener>>>());\n        return;\n    }\n    auto listeners = make_shared<vector<__weak id<SCManagedCapturerListener>>>();\n    for (auto &one : *_listeners) {\n        if (one != nil && one != listener) {\n            listeners->push_back(one);\n        }\n    }\n    atomic_store(&self->_listeners, listeners);\n}\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer didStartRunning:(SCManagedCapturerState *)state\n{\n    auto listeners = atomic_load(&self->_listeners);\n    if (listeners) {\n        for (id<SCManagedCapturerListener> listener : *listeners) {\n            if ([listener respondsToSelector:@selector(managedCapturer:didStartRunning:)]) {\n                [listener managedCapturer:managedCapturer didStartRunning:state];\n            }\n        }\n    }\n}\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer didStopRunning:(SCManagedCapturerState *)state\n{\n    auto listeners = atomic_load(&self->_listeners);\n    if (listeners) {\n        for (id<SCManagedCapturerListener> listener : *listeners) {\n            if ([listener respondsToSelector:@selector(managedCapturer:didStopRunning:)]) {\n                [listener managedCapturer:managedCapturer didStopRunning:state];\n            }\n        }\n    }\n}\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer didResetFromRuntimeError:(SCManagedCapturerState *)state\n{\n    auto listeners = atomic_load(&self->_listeners);\n    if (listeners) {\n        for (id<SCManagedCapturerListener> listener : *listeners) {\n            if ([listener respondsToSelector:@selector(managedCapturer:didResetFromRuntimeError:)]) {\n                [listener managedCapturer:managedCapturer didResetFromRuntimeError:state];\n            }\n        }\n    }\n}\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeState:(SCManagedCapturerState *)state\n{\n    auto listeners = atomic_load(&self->_listeners);\n    if (listeners) {\n        for (id<SCManagedCapturerListener> listener : *listeners) {\n            if ([listener respondsToSelector:@selector(managedCapturer:didChangeState:)]) {\n                [listener managedCapturer:managedCapturer didChangeState:state];\n            }\n        }\n    }\n}\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeNightModeActive:(SCManagedCapturerState *)state\n{\n    auto listeners = atomic_load(&self->_listeners);\n    if (listeners) {\n        for (id<SCManagedCapturerListener> listener : *listeners) {\n            if ([listener respondsToSelector:@selector(managedCapturer:didChangeNightModeActive:)]) {\n                [listener managedCapturer:managedCapturer didChangeNightModeActive:state];\n            }\n        }\n    }\n}\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangePortraitModeActive:(SCManagedCapturerState *)state\n{\n    auto listeners = atomic_load(&self->_listeners);\n    if (listeners) {\n        for (id<SCManagedCapturerListener> listener : *listeners) {\n            if ([listener respondsToSelector:@selector(managedCapturer:didChangePortraitModeActive:)]) {\n                [listener managedCapturer:managedCapturer didChangePortraitModeActive:state];\n            }\n        }\n    }\n}\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeFlashActive:(SCManagedCapturerState *)state\n{\n    auto listeners = atomic_load(&self->_listeners);\n    if (listeners) {\n        for (id<SCManagedCapturerListener> listener : *listeners) {\n            if ([listener respondsToSelector:@selector(managedCapturer:didChangeFlashActive:)]) {\n                [listener managedCapturer:managedCapturer didChangeFlashActive:state];\n            }\n        }\n    }\n}\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeLensesActive:(SCManagedCapturerState *)state\n{\n    auto listeners = atomic_load(&self->_listeners);\n    if (listeners) {\n        for (id<SCManagedCapturerListener> listener : *listeners) {\n            if ([listener respondsToSelector:@selector(managedCapturer:didChangeLensesActive:)]) {\n                [listener managedCapturer:managedCapturer didChangeLensesActive:state];\n            }\n        }\n    }\n}\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeARSessionActive:(SCManagedCapturerState *)state\n{\n    auto listeners = atomic_load(&self->_listeners);\n    if (listeners) {\n        for (id<SCManagedCapturerListener> listener : *listeners) {\n            if ([listener respondsToSelector:@selector(managedCapturer:didChangeARSessionActive:)]) {\n                [listener managedCapturer:managedCapturer didChangeARSessionActive:state];\n            }\n        }\n    }\n}\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer\n    didChangeFlashSupportedAndTorchSupported:(SCManagedCapturerState *)state\n{\n    auto listeners = atomic_load(&self->_listeners);\n    if (listeners) {\n        for (id<SCManagedCapturerListener> listener : *listeners) {\n            if ([listener respondsToSelector:@selector(managedCapturer:didChangeFlashSupportedAndTorchSupported:)]) {\n                [listener managedCapturer:managedCapturer didChangeFlashSupportedAndTorchSupported:state];\n            }\n        }\n    }\n}\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeZoomFactor:(SCManagedCapturerState *)state\n{\n    auto listeners = atomic_load(&self->_listeners);\n    if (listeners) {\n        for (id<SCManagedCapturerListener> listener : *listeners) {\n            if ([listener respondsToSelector:@selector(managedCapturer:didChangeZoomFactor:)]) {\n                [listener managedCapturer:managedCapturer didChangeZoomFactor:state];\n            }\n        }\n    }\n}\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeLowLightCondition:(SCManagedCapturerState *)state\n{\n    auto listeners = atomic_load(&self->_listeners);\n    if (listeners) {\n        for (id<SCManagedCapturerListener> listener : *listeners) {\n            if ([listener respondsToSelector:@selector(managedCapturer:didChangeLowLightCondition:)]) {\n                [listener managedCapturer:managedCapturer didChangeLowLightCondition:state];\n            }\n        }\n    }\n}\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeAdjustingExposure:(SCManagedCapturerState *)state\n{\n    auto listeners = atomic_load(&self->_listeners);\n    if (listeners) {\n        for (id<SCManagedCapturerListener> listener : *listeners) {\n            if ([listener respondsToSelector:@selector(managedCapturer:didChangeAdjustingExposure:)]) {\n                [listener managedCapturer:managedCapturer didChangeAdjustingExposure:state];\n            }\n        }\n    }\n}\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeCaptureDevicePosition:(SCManagedCapturerState *)state\n{\n    auto listeners = atomic_load(&self->_listeners);\n    if (listeners) {\n        for (id<SCManagedCapturerListener> listener : *listeners) {\n            if ([listener respondsToSelector:@selector(managedCapturer:didChangeCaptureDevicePosition:)]) {\n                [listener managedCapturer:managedCapturer didChangeCaptureDevicePosition:state];\n            }\n        }\n    }\n}\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer\n    didChangeVideoPreviewLayer:(AVCaptureVideoPreviewLayer *)videoPreviewLayer\n{\n    auto listeners = atomic_load(&self->_listeners);\n    if (listeners) {\n        for (id<SCManagedCapturerListener> listener : *listeners) {\n            if ([listener respondsToSelector:@selector(managedCapturer:didChangeVideoPreviewLayer:)]) {\n                [listener managedCapturer:managedCapturer didChangeVideoPreviewLayer:videoPreviewLayer];\n            }\n        }\n    }\n}\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeVideoPreviewGLView:(LSAGLView *)videoPreviewGLView\n{\n    auto listeners = atomic_load(&self->_listeners);\n    if (listeners) {\n        for (id<SCManagedCapturerListener> listener : *listeners) {\n            if ([listener respondsToSelector:@selector(managedCapturer:didChangeVideoPreviewGLView:)]) {\n                [listener managedCapturer:managedCapturer didChangeVideoPreviewGLView:videoPreviewGLView];\n            }\n        }\n    }\n}\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer\n didBeginVideoRecording:(SCManagedCapturerState *)state\n                session:(SCVideoCaptureSessionInfo)session\n{\n    auto listeners = atomic_load(&self->_listeners);\n    if (listeners) {\n        for (id<SCManagedCapturerListener> listener : *listeners) {\n            if ([listener respondsToSelector:@selector(managedCapturer:didBeginVideoRecording:session:)]) {\n                [listener managedCapturer:managedCapturer didBeginVideoRecording:state session:session];\n            }\n        }\n    }\n}\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer\n didBeginAudioRecording:(SCManagedCapturerState *)state\n                session:(SCVideoCaptureSessionInfo)session\n{\n    auto listeners = atomic_load(&self->_listeners);\n    if (listeners) {\n        for (id<SCManagedCapturerListener> listener : *listeners) {\n            if ([listener respondsToSelector:@selector(managedCapturer:didBeginAudioRecording:session:)]) {\n                [listener managedCapturer:managedCapturer didBeginAudioRecording:state session:session];\n            }\n        }\n    }\n}\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer\n    willFinishRecording:(SCManagedCapturerState *)state\n                session:(SCVideoCaptureSessionInfo)session\n    recordedVideoFuture:(SCFuture<id<SCManagedRecordedVideo>> *)recordedVideoFuture\n              videoSize:(CGSize)videoSize\n       placeholderImage:(UIImage *)placeholderImage\n{\n    auto listeners = atomic_load(&self->_listeners);\n    if (listeners) {\n        for (id<SCManagedCapturerListener> listener : *listeners) {\n            if ([listener respondsToSelector:@selector(managedCapturer:\n                                                   willFinishRecording:\n                                                               session:\n                                                   recordedVideoFuture:\n                                                             videoSize:\n                                                      placeholderImage:)]) {\n                [listener managedCapturer:managedCapturer\n                      willFinishRecording:state\n                                  session:session\n                      recordedVideoFuture:recordedVideoFuture\n                                videoSize:videoSize\n                         placeholderImage:placeholderImage];\n            }\n        }\n    }\n}\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer\n     didFinishRecording:(SCManagedCapturerState *)state\n                session:(SCVideoCaptureSessionInfo)session\n          recordedVideo:(SCManagedRecordedVideo *)recordedVideo\n{\n    auto listeners = atomic_load(&self->_listeners);\n    if (listeners) {\n        for (id<SCManagedCapturerListener> listener : *listeners) {\n            if ([listener respondsToSelector:@selector(managedCapturer:didFinishRecording:session:recordedVideo:)]) {\n                [listener managedCapturer:managedCapturer\n                       didFinishRecording:state\n                                  session:session\n                            recordedVideo:recordedVideo];\n            }\n        }\n    }\n}\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer\n       didFailRecording:(SCManagedCapturerState *)state\n                session:(SCVideoCaptureSessionInfo)session\n                  error:(NSError *)error\n{\n    auto listeners = atomic_load(&self->_listeners);\n    if (listeners) {\n        for (id<SCManagedCapturerListener> listener : *listeners) {\n            if ([listener respondsToSelector:@selector(managedCapturer:didFailRecording:session:error:)]) {\n                [listener managedCapturer:managedCapturer didFailRecording:state session:session error:error];\n            }\n        }\n    }\n}\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer\n     didCancelRecording:(SCManagedCapturerState *)state\n                session:(SCVideoCaptureSessionInfo)session\n{\n    auto listeners = atomic_load(&self->_listeners);\n    if (listeners) {\n        for (id<SCManagedCapturerListener> listener : *listeners) {\n            if ([listener respondsToSelector:@selector(managedCapturer:didCancelRecording:session:)]) {\n                [listener managedCapturer:managedCapturer didCancelRecording:state session:session];\n            }\n        }\n    }\n}\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer\n            didGetError:(NSError *)error\n                forType:(SCManagedVideoCapturerInfoType)type\n                session:(SCVideoCaptureSessionInfo)session\n{\n    auto listeners = atomic_load(&self->_listeners);\n    if (listeners) {\n        for (id<SCManagedCapturerListener> listener : *listeners) {\n            if ([listener respondsToSelector:@selector(managedCapturer:didGetError:forType:session:)]) {\n                [listener managedCapturer:managedCapturer didGetError:error forType:type session:session];\n            }\n        }\n    }\n}\n\n- (void)managedCapturerDidCallLenseResume:(id<SCCapturer>)managedCapturer session:(SCVideoCaptureSessionInfo)session\n{\n    auto listeners = atomic_load(&self->_listeners);\n    if (listeners) {\n        for (id<SCManagedCapturerListener> listener : *listeners) {\n            if ([listener respondsToSelector:@selector(managedCapturerDidCallLenseResume:session:)]) {\n                [listener managedCapturerDidCallLenseResume:managedCapturer session:session];\n            }\n        }\n    }\n}\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer\n    didAppendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer\n                sampleMetadata:(SCManagedCapturerSampleMetadata *)sampleMetadata\n{\n    auto listeners = atomic_load(&self->_listeners);\n    if (listeners) {\n        for (id<SCManagedCapturerListener> listener : *listeners) {\n            if ([listener respondsToSelector:@selector(managedCapturer:didAppendVideoSampleBuffer:sampleMetadata:)]) {\n                [listener managedCapturer:managedCapturer\n                    didAppendVideoSampleBuffer:sampleBuffer\n                                sampleMetadata:sampleMetadata];\n            }\n        }\n    }\n}\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer\n       willCapturePhoto:(SCManagedCapturerState *)state\n         sampleMetadata:(SCManagedCapturerSampleMetadata *)sampleMetadata\n{\n    auto listeners = atomic_load(&self->_listeners);\n    if (listeners) {\n        for (id<SCManagedCapturerListener> listener : *listeners) {\n            if ([listener respondsToSelector:@selector(managedCapturer:willCapturePhoto:sampleMetadata:)]) {\n                [listener managedCapturer:managedCapturer willCapturePhoto:state sampleMetadata:sampleMetadata];\n            }\n        }\n    }\n}\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer didCapturePhoto:(SCManagedCapturerState *)state\n{\n    auto listeners = atomic_load(&self->_listeners);\n    if (listeners) {\n        for (id<SCManagedCapturerListener> listener : *listeners) {\n            if ([listener respondsToSelector:@selector(managedCapturer:didCapturePhoto:)]) {\n                [listener managedCapturer:managedCapturer didCapturePhoto:state];\n            }\n        }\n    }\n}\n\n- (BOOL)managedCapturer:(id<SCCapturer>)managedCapturer isUnderDeviceMotion:(SCManagedCapturerState *)state\n{\n    auto listeners = atomic_load(&self->_listeners);\n    if (listeners) {\n        for (id<SCManagedCapturerListener> listener : *listeners) {\n            if ([listener respondsToSelector:@selector(managedCapturer:didCapturePhoto:)]) {\n                return [listener managedCapturer:managedCapturer isUnderDeviceMotion:state];\n            }\n        }\n    }\n    return NO;\n}\n\n- (BOOL)managedCapturer:(id<SCCapturer>)managedCapturer shouldProcessFileInput:(SCManagedCapturerState *)state\n{\n    auto listeners = atomic_load(&self->_listeners);\n    if (listeners) {\n        for (id<SCManagedCapturerListener> listener : *listeners) {\n            if ([listener respondsToSelector:@selector(managedCapturer:didCapturePhoto:)]) {\n                return [listener managedCapturer:managedCapturer isUnderDeviceMotion:state];\n            }\n        }\n    }\n    return NO;\n}\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer\n    didDetectFaceBounds:(NSDictionary<NSNumber *, NSValue *> *)faceBoundsByFaceID\n{\n    auto listeners = atomic_load(&self->_listeners);\n    if (listeners) {\n        for (id<SCManagedCapturerListener> listener : *listeners) {\n            if ([listener respondsToSelector:@selector(managedCapturer:didDetectFaceBounds:)]) {\n                [listener managedCapturer:managedCapturer didDetectFaceBounds:faceBoundsByFaceID];\n            }\n        }\n    }\n}\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeExposurePoint:(CGPoint)exposurePoint\n{\n    auto listeners = atomic_load(&self->_listeners);\n    if (listeners) {\n        for (id<SCManagedCapturerListener> listener : *listeners) {\n            if ([listener respondsToSelector:@selector(managedCapturer:didChangeExposurePoint:)]) {\n                [listener managedCapturer:managedCapturer didChangeExposurePoint:exposurePoint];\n            }\n        }\n    }\n}\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeFocusPoint:(CGPoint)focusPoint\n{\n    auto listeners = atomic_load(&self->_listeners);\n    if (listeners) {\n        for (id<SCManagedCapturerListener> listener : *listeners) {\n            if ([listener respondsToSelector:@selector(managedCapturer:didChangeFocusPoint:)]) {\n                [listener managedCapturer:managedCapturer didChangeFocusPoint:focusPoint];\n            }\n        }\n    }\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCapturerSampleMetadata.h",
    "content": "//\n//  SCRecordingMetadata.h\n//  Snapchat\n//\n\n#import <SCBase/SCMacros.h>\n\n#import <CoreMedia/CoreMedia.h>\n#import <Foundation/Foundation.h>\n\nNS_ASSUME_NONNULL_BEGIN\n\n@interface SCManagedCapturerSampleMetadata : NSObject\n\nSC_INIT_AND_NEW_UNAVAILABLE\n\n- (instancetype)initWithPresentationTimestamp:(CMTime)presentationTimestamp\n                                  fieldOfView:(float)fieldOfView NS_DESIGNATED_INITIALIZER;\n\n@property (nonatomic, readonly) CMTime presentationTimestamp;\n\n@property (nonatomic, readonly) float fieldOfView;\n\n@end\n\nNS_ASSUME_NONNULL_END\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCapturerSampleMetadata.m",
    "content": "//\n//  SCRecordingMetadata.m\n//  Snapchat\n//\n\n#import \"SCManagedCapturerSampleMetadata.h\"\n\nNS_ASSUME_NONNULL_BEGIN\n\n@implementation SCManagedCapturerSampleMetadata\n\n- (instancetype)initWithPresentationTimestamp:(CMTime)presentationTimestamp fieldOfView:(float)fieldOfView\n{\n    self = [super init];\n    if (self) {\n        _presentationTimestamp = presentationTimestamp;\n        _fieldOfView = fieldOfView;\n    }\n    return self;\n}\n\n@end\n\nNS_ASSUME_NONNULL_END\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCapturerState.h",
    "content": "// 49126048c3d19dd5b676b8d39844cf133833b67a\n// Generated by the value-object.rb  DO NOT EDIT!!\n\n#import \"SCManagedCaptureDevice.h\"\n\n#import <AvailabilityMacros.h>\n\n#import <CoreGraphics/CoreGraphics.h>\n#import <Foundation/Foundation.h>\n\n@protocol SCManagedCapturerState <NSObject, NSCoding, NSCopying>\n\n@property (nonatomic, assign, readonly) BOOL isRunning;\n\n@property (nonatomic, assign, readonly) BOOL isNightModeActive;\n\n@property (nonatomic, assign, readonly) BOOL isPortraitModeActive;\n\n@property (nonatomic, assign, readonly) BOOL lowLightCondition;\n\n@property (nonatomic, assign, readonly) BOOL adjustingExposure;\n\n@property (nonatomic, assign, readonly) SCManagedCaptureDevicePosition devicePosition;\n\n@property (nonatomic, assign, readonly) CGFloat zoomFactor;\n\n@property (nonatomic, assign, readonly) BOOL flashSupported;\n\n@property (nonatomic, assign, readonly) BOOL torchSupported;\n\n@property (nonatomic, assign, readonly) BOOL flashActive;\n\n@property (nonatomic, assign, readonly) BOOL torchActive;\n\n@property (nonatomic, assign, readonly) BOOL lensesActive;\n\n@property (nonatomic, assign, readonly) BOOL arSessionActive;\n\n@property (nonatomic, assign, readonly) BOOL liveVideoStreaming;\n\n@property (nonatomic, assign, readonly) BOOL lensProcessorReady;\n\n@end\n\n@interface SCManagedCapturerState : NSObject <SCManagedCapturerState>\n\n@property (nonatomic, assign, readonly) BOOL isRunning;\n\n@property (nonatomic, assign, readonly) BOOL isNightModeActive;\n\n@property (nonatomic, assign, readonly) BOOL isPortraitModeActive;\n\n@property (nonatomic, assign, readonly) BOOL lowLightCondition;\n\n@property (nonatomic, assign, readonly) BOOL adjustingExposure;\n\n@property (nonatomic, assign, readonly) SCManagedCaptureDevicePosition devicePosition;\n\n@property (nonatomic, assign, readonly) CGFloat zoomFactor;\n\n@property (nonatomic, assign, readonly) BOOL flashSupported;\n\n@property (nonatomic, assign, readonly) BOOL torchSupported;\n\n@property (nonatomic, assign, readonly) BOOL flashActive;\n\n@property (nonatomic, assign, readonly) BOOL torchActive;\n\n@property (nonatomic, assign, readonly) BOOL lensesActive;\n\n@property (nonatomic, assign, readonly) BOOL arSessionActive;\n\n@property (nonatomic, assign, readonly) BOOL liveVideoStreaming;\n\n@property (nonatomic, assign, readonly) BOOL lensProcessorReady;\n\n- (instancetype)initWithIsRunning:(BOOL)isRunning\n                isNightModeActive:(BOOL)isNightModeActive\n             isPortraitModeActive:(BOOL)isPortraitModeActive\n                lowLightCondition:(BOOL)lowLightCondition\n                adjustingExposure:(BOOL)adjustingExposure\n                   devicePosition:(SCManagedCaptureDevicePosition)devicePosition\n                       zoomFactor:(CGFloat)zoomFactor\n                   flashSupported:(BOOL)flashSupported\n                   torchSupported:(BOOL)torchSupported\n                      flashActive:(BOOL)flashActive\n                      torchActive:(BOOL)torchActive\n                     lensesActive:(BOOL)lensesActive\n                  arSessionActive:(BOOL)arSessionActive\n               liveVideoStreaming:(BOOL)liveVideoStreaming\n               lensProcessorReady:(BOOL)lensProcessorReady;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCapturerState.m",
    "content": "// 49126048c3d19dd5b676b8d39844cf133833b67a\n// Generated by the value-object.rb  DO NOT EDIT!!\n\n#import \"SCManagedCapturerState.h\"\n\n#import <SCFoundation/SCValueObjectHelpers.h>\n\n#import <FastCoding/FastCoder.h>\n\n@implementation SCManagedCapturerState\n\nstatic ptrdiff_t sSCManagedCapturerStateOffsets[0];\nstatic BOOL sSCManagedCapturerStateHasOffsets;\n\n- (instancetype)initWithIsRunning:(BOOL)isRunning\n                isNightModeActive:(BOOL)isNightModeActive\n             isPortraitModeActive:(BOOL)isPortraitModeActive\n                lowLightCondition:(BOOL)lowLightCondition\n                adjustingExposure:(BOOL)adjustingExposure\n                   devicePosition:(SCManagedCaptureDevicePosition)devicePosition\n                       zoomFactor:(CGFloat)zoomFactor\n                   flashSupported:(BOOL)flashSupported\n                   torchSupported:(BOOL)torchSupported\n                      flashActive:(BOOL)flashActive\n                      torchActive:(BOOL)torchActive\n                     lensesActive:(BOOL)lensesActive\n                  arSessionActive:(BOOL)arSessionActive\n               liveVideoStreaming:(BOOL)liveVideoStreaming\n               lensProcessorReady:(BOOL)lensProcessorReady\n{\n    self = [super init];\n    if (self) {\n        _isRunning = isRunning;\n        _isNightModeActive = isNightModeActive;\n        _isPortraitModeActive = isPortraitModeActive;\n        _lowLightCondition = lowLightCondition;\n        _adjustingExposure = adjustingExposure;\n        _devicePosition = devicePosition;\n        _zoomFactor = zoomFactor;\n        _flashSupported = flashSupported;\n        _torchSupported = torchSupported;\n        _flashActive = flashActive;\n        _torchActive = torchActive;\n        _lensesActive = lensesActive;\n        _arSessionActive = arSessionActive;\n        _liveVideoStreaming = liveVideoStreaming;\n        _lensProcessorReady = lensProcessorReady;\n    }\n    return self;\n}\n\n#pragma mark - NSCopying\n\n- (instancetype)copyWithZone:(NSZone *)zone\n{\n    // Immutable object, bypass copy\n    return self;\n}\n\n#pragma mark - NSCoding\n\n- (instancetype)initWithCoder:(NSCoder *)aDecoder\n{\n    self = [super init];\n    if (self) {\n        _isRunning = [aDecoder decodeBoolForKey:@\"isRunning\"];\n        _isNightModeActive = [aDecoder decodeBoolForKey:@\"isNightModeActive\"];\n        _isPortraitModeActive = [aDecoder decodeBoolForKey:@\"isPortraitModeActive\"];\n        _lowLightCondition = [aDecoder decodeBoolForKey:@\"lowLightCondition\"];\n        _adjustingExposure = [aDecoder decodeBoolForKey:@\"adjustingExposure\"];\n        _devicePosition = (SCManagedCaptureDevicePosition)[aDecoder decodeIntegerForKey:@\"devicePosition\"];\n        _zoomFactor = [aDecoder decodeFloatForKey:@\"zoomFactor\"];\n        _flashSupported = [aDecoder decodeBoolForKey:@\"flashSupported\"];\n        _torchSupported = [aDecoder decodeBoolForKey:@\"torchSupported\"];\n        _flashActive = [aDecoder decodeBoolForKey:@\"flashActive\"];\n        _torchActive = [aDecoder decodeBoolForKey:@\"torchActive\"];\n        _lensesActive = [aDecoder decodeBoolForKey:@\"lensesActive\"];\n        _arSessionActive = [aDecoder decodeBoolForKey:@\"arSessionActive\"];\n        _liveVideoStreaming = [aDecoder decodeBoolForKey:@\"liveVideoStreaming\"];\n        _lensProcessorReady = [aDecoder decodeBoolForKey:@\"lensProcessorReady\"];\n    }\n    return self;\n}\n\n- (void)encodeWithCoder:(NSCoder *)aCoder\n{\n    [aCoder encodeBool:_isRunning forKey:@\"isRunning\"];\n    [aCoder encodeBool:_isNightModeActive forKey:@\"isNightModeActive\"];\n    [aCoder encodeBool:_isPortraitModeActive forKey:@\"isPortraitModeActive\"];\n    [aCoder encodeBool:_lowLightCondition forKey:@\"lowLightCondition\"];\n    [aCoder encodeBool:_adjustingExposure forKey:@\"adjustingExposure\"];\n    [aCoder encodeInteger:(NSInteger)_devicePosition forKey:@\"devicePosition\"];\n    [aCoder encodeFloat:_zoomFactor forKey:@\"zoomFactor\"];\n    [aCoder encodeBool:_flashSupported forKey:@\"flashSupported\"];\n    [aCoder encodeBool:_torchSupported forKey:@\"torchSupported\"];\n    [aCoder encodeBool:_flashActive forKey:@\"flashActive\"];\n    [aCoder encodeBool:_torchActive forKey:@\"torchActive\"];\n    [aCoder encodeBool:_lensesActive forKey:@\"lensesActive\"];\n    [aCoder encodeBool:_arSessionActive forKey:@\"arSessionActive\"];\n    [aCoder encodeBool:_liveVideoStreaming forKey:@\"liveVideoStreaming\"];\n    [aCoder encodeBool:_lensProcessorReady forKey:@\"lensProcessorReady\"];\n}\n\n#pragma mark - FasterCoding\n\n- (BOOL)preferFasterCoding\n{\n    return YES;\n}\n\n- (void)encodeWithFasterCoder:(id<FCFasterCoder>)fasterCoder\n{\n    [fasterCoder encodeBool:_adjustingExposure];\n    [fasterCoder encodeBool:_arSessionActive];\n    [fasterCoder encodeSInt32:_devicePosition];\n    [fasterCoder encodeBool:_flashActive];\n    [fasterCoder encodeBool:_flashSupported];\n    [fasterCoder encodeBool:_isNightModeActive];\n    [fasterCoder encodeBool:_isPortraitModeActive];\n    [fasterCoder encodeBool:_isRunning];\n    [fasterCoder encodeBool:_lensProcessorReady];\n    [fasterCoder encodeBool:_lensesActive];\n    [fasterCoder encodeBool:_liveVideoStreaming];\n    [fasterCoder encodeBool:_lowLightCondition];\n    [fasterCoder encodeBool:_torchActive];\n    [fasterCoder encodeBool:_torchSupported];\n    [fasterCoder encodeFloat64:_zoomFactor];\n}\n\n- (void)decodeWithFasterDecoder:(id<FCFasterDecoder>)fasterDecoder\n{\n    _adjustingExposure = (BOOL)[fasterDecoder decodeBool];\n    _arSessionActive = (BOOL)[fasterDecoder decodeBool];\n    _devicePosition = (SCManagedCaptureDevicePosition)[fasterDecoder decodeSInt32];\n    _flashActive = (BOOL)[fasterDecoder decodeBool];\n    _flashSupported = (BOOL)[fasterDecoder decodeBool];\n    _isNightModeActive = (BOOL)[fasterDecoder decodeBool];\n    _isPortraitModeActive = (BOOL)[fasterDecoder decodeBool];\n    _isRunning = (BOOL)[fasterDecoder decodeBool];\n    _lensProcessorReady = (BOOL)[fasterDecoder decodeBool];\n    _lensesActive = (BOOL)[fasterDecoder decodeBool];\n    _liveVideoStreaming = (BOOL)[fasterDecoder decodeBool];\n    _lowLightCondition = (BOOL)[fasterDecoder decodeBool];\n    _torchActive = (BOOL)[fasterDecoder decodeBool];\n    _torchSupported = (BOOL)[fasterDecoder decodeBool];\n    _zoomFactor = (CGFloat)[fasterDecoder decodeFloat64];\n}\n\n- (void)setBool:(BOOL)val forUInt64Key:(uint64_t)key\n{\n    switch (key) {\n    case 15633755733674300ULL:\n        _adjustingExposure = (BOOL)val;\n        break;\n    case 11461798188076803ULL:\n        _arSessionActive = (BOOL)val;\n        break;\n    case 12833337784991002ULL:\n        _flashActive = (BOOL)val;\n        break;\n    case 51252237764061994ULL:\n        _flashSupported = (BOOL)val;\n        break;\n    case 1498048848502287ULL:\n        _isNightModeActive = (BOOL)val;\n        break;\n    case 56151582267629469ULL:\n        _isPortraitModeActive = (BOOL)val;\n        break;\n    case 12346172623874083ULL:\n        _isRunning = (BOOL)val;\n        break;\n    case 67168377441917657ULL:\n        _lensProcessorReady = (BOOL)val;\n        break;\n    case 5791542045168142ULL:\n        _lensesActive = (BOOL)val;\n        break;\n    case 28486888710545224ULL:\n        _liveVideoStreaming = (BOOL)val;\n        break;\n    case 24071673583499455ULL:\n        _lowLightCondition = (BOOL)val;\n        break;\n    case 40774429934225315ULL:\n        _torchActive = (BOOL)val;\n        break;\n    case 41333098301057670ULL:\n        _torchSupported = (BOOL)val;\n        break;\n    }\n}\n\n- (void)setSInt32:(int32_t)val forUInt64Key:(uint64_t)key\n{\n    switch (key) {\n    case 66264093189780655ULL:\n        _devicePosition = (SCManagedCaptureDevicePosition)val;\n        break;\n    }\n}\n\n- (void)setFloat64:(double)val forUInt64Key:(uint64_t)key\n{\n    switch (key) {\n    case 61340640993537628ULL:\n        _zoomFactor = (CGFloat)val;\n        break;\n    }\n}\n\n+ (uint64_t)fasterCodingVersion\n{\n    return 10319810232046341562ULL;\n}\n\n+ (uint64_t *)fasterCodingKeys\n{\n    static uint64_t keys[] = {\n        15 /* Total */,\n        FC_ENCODE_KEY_TYPE(15633755733674300, FCEncodeTypeBool),\n        FC_ENCODE_KEY_TYPE(11461798188076803, FCEncodeTypeBool),\n        FC_ENCODE_KEY_TYPE(66264093189780655, FCEncodeTypeSInt32),\n        FC_ENCODE_KEY_TYPE(12833337784991002, FCEncodeTypeBool),\n        FC_ENCODE_KEY_TYPE(51252237764061994, FCEncodeTypeBool),\n        FC_ENCODE_KEY_TYPE(1498048848502287, FCEncodeTypeBool),\n        FC_ENCODE_KEY_TYPE(56151582267629469, FCEncodeTypeBool),\n        FC_ENCODE_KEY_TYPE(12346172623874083, FCEncodeTypeBool),\n        FC_ENCODE_KEY_TYPE(67168377441917657, FCEncodeTypeBool),\n        FC_ENCODE_KEY_TYPE(5791542045168142, FCEncodeTypeBool),\n        FC_ENCODE_KEY_TYPE(28486888710545224, FCEncodeTypeBool),\n        FC_ENCODE_KEY_TYPE(24071673583499455, FCEncodeTypeBool),\n        FC_ENCODE_KEY_TYPE(40774429934225315, FCEncodeTypeBool),\n        FC_ENCODE_KEY_TYPE(41333098301057670, FCEncodeTypeBool),\n        FC_ENCODE_KEY_TYPE(61340640993537628, FCEncodeTypeFloat64),\n    };\n    return keys;\n}\n\n#pragma mark - isEqual\n\n- (BOOL)isEqual:(id)object\n{\n    if (!SCObjectsIsEqual(self, object, &sSCManagedCapturerStateHasOffsets, sSCManagedCapturerStateOffsets, 15, 0)) {\n        return NO;\n    }\n    SCManagedCapturerState *other = (SCManagedCapturerState *)object;\n    if (other->_isRunning != _isRunning) {\n        return NO;\n    }\n\n    if (other->_isNightModeActive != _isNightModeActive) {\n        return NO;\n    }\n\n    if (other->_isPortraitModeActive != _isPortraitModeActive) {\n        return NO;\n    }\n\n    if (other->_lowLightCondition != _lowLightCondition) {\n        return NO;\n    }\n\n    if (other->_adjustingExposure != _adjustingExposure) {\n        return NO;\n    }\n\n    if (other->_devicePosition != _devicePosition) {\n        return NO;\n    }\n\n    if (other->_zoomFactor != _zoomFactor) {\n        return NO;\n    }\n\n    if (other->_flashSupported != _flashSupported) {\n        return NO;\n    }\n\n    if (other->_torchSupported != _torchSupported) {\n        return NO;\n    }\n\n    if (other->_flashActive != _flashActive) {\n        return NO;\n    }\n\n    if (other->_torchActive != _torchActive) {\n        return NO;\n    }\n\n    if (other->_lensesActive != _lensesActive) {\n        return NO;\n    }\n\n    if (other->_arSessionActive != _arSessionActive) {\n        return NO;\n    }\n\n    if (other->_liveVideoStreaming != _liveVideoStreaming) {\n        return NO;\n    }\n\n    if (other->_lensProcessorReady != _lensProcessorReady) {\n        return NO;\n    }\n\n    return YES;\n}\n\n- (NSUInteger)hash\n{\n    NSUInteger subhashes[] = {\n        (NSUInteger)_isRunning,         (NSUInteger)_isNightModeActive,  (NSUInteger)_isPortraitModeActive,\n        (NSUInteger)_lowLightCondition, (NSUInteger)_adjustingExposure,  (NSUInteger)_devicePosition,\n        (NSUInteger)_zoomFactor,        (NSUInteger)_flashSupported,     (NSUInteger)_torchSupported,\n        (NSUInteger)_flashActive,       (NSUInteger)_torchActive,        (NSUInteger)_lensesActive,\n        (NSUInteger)_arSessionActive,   (NSUInteger)_liveVideoStreaming, (NSUInteger)_lensProcessorReady};\n    NSUInteger result = subhashes[0];\n    for (int i = 1; i < 15; i++) {\n        unsigned long long base = (((unsigned long long)result) << 32 | subhashes[i]);\n        base = (~base) + (base << 18);\n        base ^= (base >> 31);\n        base *= 21;\n        base ^= (base >> 11);\n        base += (base << 6);\n        base ^= (base >> 22);\n        result = (NSUInteger)base;\n    }\n    return result;\n}\n\n#pragma mark - Print description in console: lldb> po #{variable name}\n\n- (NSString *)description\n{\n    NSMutableString *desc = [NSMutableString string];\n    [desc appendString:@\"{\\n\"];\n    [desc appendFormat:@\"\\tisRunning:%@\\n\", [@(_isRunning) description]];\n    [desc appendFormat:@\"\\tisNightModeActive:%@\\n\", [@(_isNightModeActive) description]];\n    [desc appendFormat:@\"\\tisPortraitModeActive:%@\\n\", [@(_isPortraitModeActive) description]];\n    [desc appendFormat:@\"\\tlowLightCondition:%@\\n\", [@(_lowLightCondition) description]];\n    [desc appendFormat:@\"\\tadjustingExposure:%@\\n\", [@(_adjustingExposure) description]];\n    [desc appendFormat:@\"\\tdevicePosition:%@\\n\", [@(_devicePosition) description]];\n    [desc appendFormat:@\"\\tzoomFactor:%@\\n\", [@(_zoomFactor) description]];\n    [desc appendFormat:@\"\\tflashSupported:%@\\n\", [@(_flashSupported) description]];\n    [desc appendFormat:@\"\\ttorchSupported:%@\\n\", [@(_torchSupported) description]];\n    [desc appendFormat:@\"\\tflashActive:%@\\n\", [@(_flashActive) description]];\n    [desc appendFormat:@\"\\ttorchActive:%@\\n\", [@(_torchActive) description]];\n    [desc appendFormat:@\"\\tlensesActive:%@\\n\", [@(_lensesActive) description]];\n    [desc appendFormat:@\"\\tarSessionActive:%@\\n\", [@(_arSessionActive) description]];\n    [desc appendFormat:@\"\\tliveVideoStreaming:%@\\n\", [@(_liveVideoStreaming) description]];\n    [desc appendFormat:@\"\\tlensProcessorReady:%@\\n\", [@(_lensProcessorReady) description]];\n    [desc appendString:@\"}\\n\"];\n\n    return [desc copy];\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCapturerState.value",
    "content": "#import <CoreGraphics/CoreGraphics.h>\n#import \"SCManagedCaptureDevice.h\"\n\ninterface SCManagedCapturerState\n    BOOL isRunning\n    BOOL isNightModeActive\n    BOOL isPortraitModeActive\n    BOOL lowLightCondition\n    BOOL adjustingExposure\n    enum SCManagedCaptureDevicePosition devicePosition\n    CGFloat zoomFactor\n    BOOL flashSupported\n    BOOL torchSupported\n    BOOL flashActive\n\tBOOL torchActive\n    BOOL lensesActive\n    BOOL arSessionActive\n    BOOL liveVideoStreaming\n    BOOL lensProcessorReady\nend\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCapturerStateBuilder.h",
    "content": "// 49126048c3d19dd5b676b8d39844cf133833b67a\n// Generated by the value-object.rb  DO NOT EDIT!!\n\n#import \"SCManagedCapturerState.h\"\n\n#import <AvailabilityMacros.h>\n\n#import <Foundation/Foundation.h>\n\n@interface SCManagedCapturerStateBuilder : NSObject\n\n+ (instancetype)withManagedCapturerState:(id<SCManagedCapturerState>)managedCapturerState;\n\n- (SCManagedCapturerState *)build;\n\n- (instancetype)setIsRunning:(BOOL)isRunning;\n\n- (instancetype)setIsNightModeActive:(BOOL)isNightModeActive;\n\n- (instancetype)setIsPortraitModeActive:(BOOL)isPortraitModeActive;\n\n- (instancetype)setLowLightCondition:(BOOL)lowLightCondition;\n\n- (instancetype)setAdjustingExposure:(BOOL)adjustingExposure;\n\n- (instancetype)setDevicePosition:(SCManagedCaptureDevicePosition)devicePosition;\n\n- (instancetype)setZoomFactor:(CGFloat)zoomFactor;\n\n- (instancetype)setFlashSupported:(BOOL)flashSupported;\n\n- (instancetype)setTorchSupported:(BOOL)torchSupported;\n\n- (instancetype)setFlashActive:(BOOL)flashActive;\n\n- (instancetype)setTorchActive:(BOOL)torchActive;\n\n- (instancetype)setLensesActive:(BOOL)lensesActive;\n\n- (instancetype)setArSessionActive:(BOOL)arSessionActive;\n\n- (instancetype)setLiveVideoStreaming:(BOOL)liveVideoStreaming;\n\n- (instancetype)setLensProcessorReady:(BOOL)lensProcessorReady;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCapturerStateBuilder.m",
    "content": "// 49126048c3d19dd5b676b8d39844cf133833b67a\n// Generated by the value-object.rb  DO NOT EDIT!!\n\n#import \"SCManagedCapturerStateBuilder.h\"\n\n#import <SCFoundation/SCValueObjectHelpers.h>\n\n#import <FastCoding/FastCoder.h>\n\n@implementation SCManagedCapturerStateBuilder {\n    BOOL _isRunning;\n    BOOL _isNightModeActive;\n    BOOL _isPortraitModeActive;\n    BOOL _lowLightCondition;\n    BOOL _adjustingExposure;\n    SCManagedCaptureDevicePosition _devicePosition;\n    CGFloat _zoomFactor;\n    BOOL _flashSupported;\n    BOOL _torchSupported;\n    BOOL _flashActive;\n    BOOL _torchActive;\n    BOOL _lensesActive;\n    BOOL _arSessionActive;\n    BOOL _liveVideoStreaming;\n    BOOL _lensProcessorReady;\n}\n\n+ (instancetype)withManagedCapturerState:(id<SCManagedCapturerState>)managedCapturerState\n{\n    SCManagedCapturerStateBuilder *builder = [[SCManagedCapturerStateBuilder alloc] init];\n    builder->_isRunning = managedCapturerState.isRunning;\n    builder->_isNightModeActive = managedCapturerState.isNightModeActive;\n    builder->_isPortraitModeActive = managedCapturerState.isPortraitModeActive;\n    builder->_lowLightCondition = managedCapturerState.lowLightCondition;\n    builder->_adjustingExposure = managedCapturerState.adjustingExposure;\n    builder->_devicePosition = managedCapturerState.devicePosition;\n    builder->_zoomFactor = managedCapturerState.zoomFactor;\n    builder->_flashSupported = managedCapturerState.flashSupported;\n    builder->_torchSupported = managedCapturerState.torchSupported;\n    builder->_flashActive = managedCapturerState.flashActive;\n    builder->_torchActive = managedCapturerState.torchActive;\n    builder->_lensesActive = managedCapturerState.lensesActive;\n    builder->_arSessionActive = managedCapturerState.arSessionActive;\n    builder->_liveVideoStreaming = managedCapturerState.liveVideoStreaming;\n    builder->_lensProcessorReady = managedCapturerState.lensProcessorReady;\n    return builder;\n}\n\n- (SCManagedCapturerState *)build\n{\n    return [[SCManagedCapturerState alloc] initWithIsRunning:_isRunning\n                                           isNightModeActive:_isNightModeActive\n                                        isPortraitModeActive:_isPortraitModeActive\n                                           lowLightCondition:_lowLightCondition\n                                           adjustingExposure:_adjustingExposure\n                                              devicePosition:_devicePosition\n                                                  zoomFactor:_zoomFactor\n                                              flashSupported:_flashSupported\n                                              torchSupported:_torchSupported\n                                                 flashActive:_flashActive\n                                                 torchActive:_torchActive\n                                                lensesActive:_lensesActive\n                                             arSessionActive:_arSessionActive\n                                          liveVideoStreaming:_liveVideoStreaming\n                                          lensProcessorReady:_lensProcessorReady];\n}\n\n- (instancetype)setIsRunning:(BOOL)isRunning\n{\n    _isRunning = isRunning;\n    return self;\n}\n\n- (instancetype)setIsNightModeActive:(BOOL)isNightModeActive\n{\n    _isNightModeActive = isNightModeActive;\n    return self;\n}\n\n- (instancetype)setIsPortraitModeActive:(BOOL)isPortraitModeActive\n{\n    _isPortraitModeActive = isPortraitModeActive;\n    return self;\n}\n\n- (instancetype)setLowLightCondition:(BOOL)lowLightCondition\n{\n    _lowLightCondition = lowLightCondition;\n    return self;\n}\n\n- (instancetype)setAdjustingExposure:(BOOL)adjustingExposure\n{\n    _adjustingExposure = adjustingExposure;\n    return self;\n}\n\n- (instancetype)setDevicePosition:(SCManagedCaptureDevicePosition)devicePosition\n{\n    _devicePosition = devicePosition;\n    return self;\n}\n\n- (instancetype)setZoomFactor:(CGFloat)zoomFactor\n{\n    _zoomFactor = zoomFactor;\n    return self;\n}\n\n- (instancetype)setFlashSupported:(BOOL)flashSupported\n{\n    _flashSupported = flashSupported;\n    return self;\n}\n\n- (instancetype)setTorchSupported:(BOOL)torchSupported\n{\n    _torchSupported = torchSupported;\n    return self;\n}\n\n- (instancetype)setFlashActive:(BOOL)flashActive\n{\n    _flashActive = flashActive;\n    return self;\n}\n\n- (instancetype)setTorchActive:(BOOL)torchActive\n{\n    _torchActive = torchActive;\n    return self;\n}\n\n- (instancetype)setLensesActive:(BOOL)lensesActive\n{\n    _lensesActive = lensesActive;\n    return self;\n}\n\n- (instancetype)setArSessionActive:(BOOL)arSessionActive\n{\n    _arSessionActive = arSessionActive;\n    return self;\n}\n\n- (instancetype)setLiveVideoStreaming:(BOOL)liveVideoStreaming\n{\n    _liveVideoStreaming = liveVideoStreaming;\n    return self;\n}\n\n- (instancetype)setLensProcessorReady:(BOOL)lensProcessorReady\n{\n    _lensProcessorReady = lensProcessorReady;\n    return self;\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCapturerUtils.h",
    "content": "//\n//  SCManagedCapturerUtils.h\n//  Snapchat\n//\n//  Created by Chao Pang on 10/4/17.\n//\n\n#import <SCBase/SCMacros.h>\n\n#import <Foundation/Foundation.h>\n#import <UIKit/UIKit.h>\n\nSC_EXTERN_C_BEGIN\n\nextern const CGFloat kSCIPhoneXCapturedImageVideoCropRatio;\n\nextern CGFloat SCManagedCapturedImageAndVideoAspectRatio(void);\n\nextern CGSize SCManagedCapturerAllScreenSize(void);\n\nextern CGSize SCAsyncImageCapturePlaceholderViewSize(void);\n\nextern CGFloat SCAdjustedAspectRatio(UIImageOrientation orientation, CGFloat aspectRatio);\n\nextern UIImage *SCCropImageToTargetAspectRatio(UIImage *image, CGFloat targetAspectRatio);\n\nextern void SCCropImageSizeToAspectRatio(size_t inputWidth, size_t inputHeight, UIImageOrientation orientation,\n                                         CGFloat aspectRatio, size_t *outputWidth, size_t *outputHeight);\n\nextern BOOL SCNeedsCropImageToAspectRatio(CGImageRef image, UIImageOrientation orientation, CGFloat aspectRatio);\n\nextern CGRect SCCalculateRectToCrop(size_t imageWidth, size_t imageHeight, size_t croppedWidth, size_t croppedHeight);\n\nextern CGImageRef SCCreateCroppedImageToAspectRatio(CGImageRef image, UIImageOrientation orientation,\n                                                    CGFloat aspectRatio);\nSC_EXTERN_C_END\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCapturerUtils.m",
    "content": "//\n//  SCManagedCapturerUtils.m\n//  Snapchat\n//\n//  Created by Chao Pang on 10/4/17.\n//\n\n#import \"SCManagedCapturerUtils.h\"\n\n#import \"SCCaptureCommon.h\"\n\n#import <SCFoundation/SCAssertWrapper.h>\n#import <SCFoundation/SCCoreGraphicsUtils.h>\n#import <SCFoundation/SCDeviceName.h>\n#import <SCFoundation/UIScreen+SCSafeAreaInsets.h>\n\n// This is to calculate the crop ratio for generating the image shown in Preview page\n// Check https://snapchat.quip.com/lU3kAoDxaAFG for our design.\nconst CGFloat kSCIPhoneXCapturedImageVideoCropRatio = (397.0 * 739.0) / (375.0 * 812.0);\n\nCGFloat SCManagedCapturedImageAndVideoAspectRatio(void)\n{\n    static dispatch_once_t onceToken;\n    static CGFloat aspectRatio;\n    dispatch_once(&onceToken, ^{\n        CGSize screenSize = [UIScreen mainScreen].fixedCoordinateSpace.bounds.size;\n        UIEdgeInsets safeAreaInsets = [UIScreen sc_safeAreaInsets];\n        aspectRatio = SCSizeGetAspectRatio(\n            CGSizeMake(screenSize.width, screenSize.height - safeAreaInsets.top - safeAreaInsets.bottom));\n    });\n    return aspectRatio;\n}\n\nCGSize SCManagedCapturerAllScreenSize(void)\n{\n    static CGSize size;\n    static dispatch_once_t onceToken;\n    dispatch_once(&onceToken, ^{\n        CGSize screenSize = [UIScreen mainScreen].fixedCoordinateSpace.bounds.size;\n        // This logic is complicated because we need to handle iPhone X properly.\n        // See https://snapchat.quip.com/lU3kAoDxaAFG for our design.\n        UIEdgeInsets safeAreaInsets = [UIScreen sc_safeAreaInsets];\n        UIEdgeInsets visualSafeInsets = [UIScreen sc_visualSafeInsets];\n        // This really is just some coordinate computations:\n        // We know in preview, our size is (screenWidth, screenHeight - topInset - bottomInset)\n        // We know that when the preview image is in the camera screen, the height is screenHeight - visualTopInset,\n        // thus, we need to figure out in camera screen, what's the bleed-over width should be\n        // (screenWidth * (screenHeight - visualTopInset) / (screenHeight - topInset - bottomInset)\n        size = CGSizeMake(roundf(screenSize.width * (screenSize.height - visualSafeInsets.top) /\n                                 (screenSize.height - safeAreaInsets.top - safeAreaInsets.bottom)),\n                          screenSize.height);\n    });\n    return size;\n}\n\nCGSize SCAsyncImageCapturePlaceholderViewSize(void)\n{\n    static CGSize size;\n    static dispatch_once_t onceToken;\n    dispatch_once(&onceToken, ^{\n        CGSize screenSize = [UIScreen mainScreen].fixedCoordinateSpace.bounds.size;\n        UIEdgeInsets safeAreaInsets = [UIScreen sc_safeAreaInsets];\n        UIEdgeInsets visualSafeInsets = [UIScreen sc_visualSafeInsets];\n        size = CGSizeMake(roundf((screenSize.height - visualSafeInsets.top) * screenSize.width /\n                                 (screenSize.height - safeAreaInsets.top - safeAreaInsets.bottom)),\n                          screenSize.height - visualSafeInsets.top);\n    });\n    return size;\n}\n\nCGFloat SCAdjustedAspectRatio(UIImageOrientation orientation, CGFloat aspectRatio)\n{\n    SCCAssert(aspectRatio != kSCManagedCapturerAspectRatioUnspecified, @\"\");\n    switch (orientation) {\n    case UIImageOrientationLeft:\n    case UIImageOrientationRight:\n    case UIImageOrientationLeftMirrored:\n    case UIImageOrientationRightMirrored:\n        return 1.0 / aspectRatio;\n    default:\n        return aspectRatio;\n    }\n}\n\nUIImage *SCCropImageToTargetAspectRatio(UIImage *image, CGFloat targetAspectRatio)\n{\n    if (SCNeedsCropImageToAspectRatio(image.CGImage, image.imageOrientation, targetAspectRatio)) {\n        CGImageRef croppedImageRef =\n            SCCreateCroppedImageToAspectRatio(image.CGImage, image.imageOrientation, targetAspectRatio);\n        UIImage *croppedImage =\n            [UIImage imageWithCGImage:croppedImageRef scale:image.scale orientation:image.imageOrientation];\n        CGImageRelease(croppedImageRef);\n        return croppedImage;\n    } else {\n        return image;\n    }\n}\n\nvoid SCCropImageSizeToAspectRatio(size_t inputWidth, size_t inputHeight, UIImageOrientation orientation,\n                                  CGFloat aspectRatio, size_t *outputWidth, size_t *outputHeight)\n{\n    SCCAssert(outputWidth != NULL && outputHeight != NULL, @\"\");\n    aspectRatio = SCAdjustedAspectRatio(orientation, aspectRatio);\n    if (inputWidth > roundf(inputHeight * aspectRatio)) {\n        *outputHeight = inputHeight;\n        *outputWidth = roundf(*outputHeight * aspectRatio);\n    } else {\n        *outputWidth = inputWidth;\n        *outputHeight = roundf(*outputWidth / aspectRatio);\n    }\n}\n\nBOOL SCNeedsCropImageToAspectRatio(CGImageRef image, UIImageOrientation orientation, CGFloat aspectRatio)\n{\n    if (aspectRatio == kSCManagedCapturerAspectRatioUnspecified) {\n        return NO;\n    }\n    aspectRatio = SCAdjustedAspectRatio(orientation, aspectRatio);\n    size_t width = CGImageGetWidth(image);\n    size_t height = CGImageGetHeight(image);\n    return (width != roundf(height * aspectRatio));\n}\n\nCGRect SCCalculateRectToCrop(size_t imageWidth, size_t imageHeight, size_t croppedWidth, size_t croppedHeight)\n{\n    if ([SCDeviceName isIphoneX]) {\n        // X is pushed all the way over to crop out top section but none of bottom\n        CGFloat x = (imageWidth - croppedWidth);\n        // Crop y symmetrically.\n        CGFloat y = roundf((imageHeight - croppedHeight) / 2.0);\n\n        return CGRectMake(x, y, croppedWidth, croppedHeight);\n    }\n    return CGRectMake((imageWidth - croppedWidth) / 2, (imageHeight - croppedHeight) / 2, croppedWidth, croppedHeight);\n}\n\nCGImageRef SCCreateCroppedImageToAspectRatio(CGImageRef image, UIImageOrientation orientation, CGFloat aspectRatio)\n{\n    SCCAssert(aspectRatio != kSCManagedCapturerAspectRatioUnspecified, @\"\");\n    size_t width = CGImageGetWidth(image);\n    size_t height = CGImageGetHeight(image);\n    size_t croppedWidth, croppedHeight;\n    if ([SCDeviceName isIphoneX]) {\n        size_t adjustedWidth = (size_t)(width * kSCIPhoneXCapturedImageVideoCropRatio);\n        size_t adjustedHeight = (size_t)(height * kSCIPhoneXCapturedImageVideoCropRatio);\n        SCCropImageSizeToAspectRatio(adjustedWidth, adjustedHeight, orientation, aspectRatio, &croppedWidth,\n                                     &croppedHeight);\n    } else {\n        SCCropImageSizeToAspectRatio(width, height, orientation, aspectRatio, &croppedWidth, &croppedHeight);\n    }\n    CGRect cropRect = SCCalculateRectToCrop(width, height, croppedWidth, croppedHeight);\n    return CGImageCreateWithImageInRect(image, cropRect);\n}\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCapturerV1.h",
    "content": "//\n//  SCManagedCapturer.h\n//  Snapchat\n//\n//  Created by Liu Liu on 4/20/15.\n//  Copyright (c) 2015 Liu Liu. All rights reserved.\n//\n\n#import \"SCCaptureCommon.h\"\n#import \"SCCapturer.h\"\n\n#import <SCFoundation/SCTraceODPCompatible.h>\n\n#import <AVFoundation/AVFoundation.h>\n#import <Foundation/Foundation.h>\n\n/**\n * Manage AVCaptureSession with SCManagedCapturerV1\n *\n * In phantom, there are a lot of places we use AVCaptureSession. However, since for each app, only one session\n * can run at the same time, we need some kind of management for the capture session.\n *\n * SCManagedCapturerV1 manages the state of capture session in following ways:\n *\n * All operations in SCManagedCapturerV1 are handled on a serial queue, to ensure its sequence. All callbacks (either\n * on the listener or the completion handler) are on the main thread. The state of SCManagedCapturerV1 are conveniently\n * maintained in a SCManagedCapturerState object, which is immutable and can be passed across threads, it mains a\n * consistent view of the capture session, if it is not delayed (thus, the state may deliver as current active device\n * is back camera on main thread, but in reality, on the serial queue, the active device switched to the front camera\n * already. However, this is OK because state.devicePosition will be back camera and with all its setup at that time.\n * Note that it is impossible to have an on-time view of the state across threads without blocking each other).\n *\n * For main use cases, you setup the capturer, add the preview layer, and then can call capture still image\n * or record video, and SCManagedCapturerV1 will do the rest (make sure it actually captures image / video, recover\n * from error, or setup our more advanced image / video post-process).\n *\n * The key classes that drive the recording flow are SCManagedVideoStreamer and SCManagedVideoFileStreamer which\n * conform to SCManagedVideoDataSource. They will stream images to consumers conforming to\n * SCManagedVideoDataSourceListener\n * such as SCManagedLensesProcessor, SCManagedDeviceCapacityAnalyzer, SCManagedVideoScanner and ultimately\n * SCManagedVideoCapturer and SCManagedStillImageCapturer which record the final output.\n *\n */\n@class SCCaptureResource;\n\nextern NSString *const kSCLensesTweaksDidChangeFileInput;\n\n@interface SCManagedCapturerV1 : NSObject <SCCapturer, SCTimeProfilable>\n\n+ (SCManagedCapturerV1 *)sharedInstance;\n\n/*\n The following APIs are reserved to be only used for SCCaptureCore aka managedCapturerV2.\n */\n- (instancetype)initWithResource:(SCCaptureResource *)resource;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCapturerV1.m",
    "content": "//\n//  SCManagedCapturer.m\n//  Snapchat\n//\n//  Created by Liu Liu on 4/20/15.\n//  Copyright (c) 2015 Liu Liu. All rights reserved.\n//\n\n#import \"SCManagedCapturerV1.h\"\n#import \"SCManagedCapturerV1_Private.h\"\n\n#import \"ARConfiguration+SCConfiguration.h\"\n#import \"NSURL+Asset.h\"\n#import \"SCBlackCameraDetector.h\"\n#import \"SCBlackCameraNoOutputDetector.h\"\n#import \"SCCameraTweaks.h\"\n#import \"SCCaptureResource.h\"\n#import \"SCCaptureSessionFixer.h\"\n#import \"SCCaptureUninitializedState.h\"\n#import \"SCCaptureWorker.h\"\n#import \"SCCapturerToken.h\"\n#import \"SCManagedAudioStreamer.h\"\n#import \"SCManagedCaptureDevice+SCManagedCapturer.h\"\n#import \"SCManagedCaptureDeviceDefaultZoomHandler.h\"\n#import \"SCManagedCaptureDeviceHandler.h\"\n#import \"SCManagedCaptureDeviceSubjectAreaHandler.h\"\n#import \"SCManagedCapturePreviewLayerController.h\"\n#import \"SCManagedCaptureSession.h\"\n#import \"SCManagedCapturerARImageCaptureProvider.h\"\n#import \"SCManagedCapturerGLViewManagerAPI.h\"\n#import \"SCManagedCapturerLSAComponentTrackerAPI.h\"\n#import \"SCManagedCapturerLensAPI.h\"\n#import \"SCManagedCapturerListenerAnnouncer.h\"\n#import \"SCManagedCapturerLogging.h\"\n#import \"SCManagedCapturerSampleMetadata.h\"\n#import \"SCManagedCapturerState.h\"\n#import \"SCManagedCapturerStateBuilder.h\"\n#import \"SCManagedDeviceCapacityAnalyzer.h\"\n#import \"SCManagedDroppedFramesReporter.h\"\n#import \"SCManagedFrameHealthChecker.h\"\n#import \"SCManagedFrontFlashController.h\"\n#import \"SCManagedStillImageCapturer.h\"\n#import \"SCManagedStillImageCapturerHandler.h\"\n#import \"SCManagedVideoARDataSource.h\"\n#import \"SCManagedVideoCapturer.h\"\n#import \"SCManagedVideoFileStreamer.h\"\n#import \"SCManagedVideoFrameSampler.h\"\n#import \"SCManagedVideoScanner.h\"\n#import \"SCManagedVideoStreamReporter.h\"\n#import \"SCManagedVideoStreamer.h\"\n#import \"SCMetalUtils.h\"\n#import \"SCProcessingPipeline.h\"\n#import \"SCProcessingPipelineBuilder.h\"\n#import \"SCScanConfiguration.h\"\n#import \"SCSingleFrameStreamCapturer.h\"\n#import \"SCSnapCreationTriggers.h\"\n#import \"SCTimedTask.h\"\n\n#import <SCBase/SCAssignment.h>\n#import <SCBase/SCLazyLoadingProxy.h>\n#import <SCBatteryLogger/SCBatteryLogger.h>\n#import <SCFoundation/NSData+Random.h>\n#import <SCFoundation/NSError+Helpers.h>\n#import <SCFoundation/NSString+SCFormat.h>\n#import <SCFoundation/SCAppEnvironment.h>\n#import <SCFoundation/SCDeviceName.h>\n#import <SCFoundation/SCQueuePerformer.h>\n#import <SCFoundation/SCThreadHelpers.h>\n#import <SCFoundation/SCTrace.h>\n#import <SCFoundation/SCTraceODPCompatible.h>\n#import <SCFoundation/SCZeroDependencyExperiments.h>\n#import <SCGhostToSnappable/SCGhostToSnappableSignal.h>\n#import <SCImageProcess/SCImageProcessVideoPlaybackSession.h>\n#import <SCLenses/SCLens.h>\n#import <SCLogger/SCCameraMetrics.h>\n#import <SCLogger/SCLogger+Performance.h>\n#import <SCUserTraceLogger/SCUserTraceLogger.h>\n\n#import <Looksery/Looksery.h>\n\n@import ARKit;\n\nstatic NSUInteger const kSCManagedCapturerFixInconsistencyMaxRetriesWithCurrentSession = 22;\nstatic CGFloat const kSCManagedCapturerFixInconsistencyARSessionDelayThreshold = 2;\nstatic CGFloat const kSCManagedCapturerFixInconsistencyARSessionHungInitThreshold = 5;\n\nstatic NSTimeInterval const kMinFixAVSessionRunningInterval = 1; // Interval to run _fixAVSessionIfNecessary\nstatic NSTimeInterval const kMinFixSessionRuntimeErrorInterval =\n    1; // Min interval that RuntimeError calls _startNewSession\n\nstatic NSString *const kSCManagedCapturerErrorDomain = @\"kSCManagedCapturerErrorDomain\";\n\nNSString *const kSCLensesTweaksDidChangeFileInput = @\"kSCLensesTweaksDidChangeFileInput\";\n\n@implementation SCManagedCapturerV1 {\n    // No ivars for CapturerV1 please, they should be in resource.\n    SCCaptureResource *_captureResource;\n}\n\n+ (SCManagedCapturerV1 *)sharedInstance\n{\n    static dispatch_once_t onceToken;\n    static SCManagedCapturerV1 *managedCapturerV1;\n    dispatch_once(&onceToken, ^{\n        managedCapturerV1 = [[SCManagedCapturerV1 alloc] init];\n    });\n    return managedCapturerV1;\n}\n\n- (instancetype)init\n{\n    SCTraceStart();\n    SCAssertMainThread();\n    SCCaptureResource *resource = [SCCaptureWorker generateCaptureResource];\n    return [self initWithResource:resource];\n}\n\n- (instancetype)initWithResource:(SCCaptureResource *)resource\n{\n    SCTraceODPCompatibleStart(2);\n    SCAssertMainThread();\n    self = [super init];\n    if (self) {\n        // Assuming I am not in background. I can be more defensive here and fetch the app state.\n        // But to avoid potential problems, won't do that until later.\n        SCLogCapturerInfo(@\"======================= cool startup =======================\");\n        // Initialization of capture resource should be done in worker to be shared between V1 and V2.\n        _captureResource = resource;\n        _captureResource.handleAVSessionStatusChange = @selector(_handleAVSessionStatusChange:);\n        _captureResource.sessionRuntimeError = @selector(_sessionRuntimeError:);\n        _captureResource.livenessConsistency = @selector(_livenessConsistency:);\n        _captureResource.deviceSubjectAreaHandler =\n            [[SCManagedCaptureDeviceSubjectAreaHandler alloc] initWithCaptureResource:_captureResource];\n        _captureResource.snapCreationTriggers = [SCSnapCreationTriggers new];\n        if (SCIsMasterBuild()) {\n            // We call _sessionRuntimeError to reset _captureResource.videoDataSource if input changes\n            [[NSNotificationCenter defaultCenter] addObserver:self\n                                                     selector:@selector(_sessionRuntimeError:)\n                                                         name:kSCLensesTweaksDidChangeFileInput\n                                                       object:nil];\n        }\n    }\n    return self;\n}\n\n- (SCBlackCameraDetector *)blackCameraDetector\n{\n    return _captureResource.blackCameraDetector;\n}\n\n- (void)recreateAVCaptureSession\n{\n    SCTraceODPCompatibleStart(2);\n    [self _startRunningWithNewCaptureSessionIfNecessary];\n}\n\n- (void)_handleAVSessionStatusChange:(NSDictionary *)change\n{\n    SCTraceODPCompatibleStart(2);\n    SC_GUARD_ELSE_RETURN(!_captureResource.state.arSessionActive);\n    SC_GUARD_ELSE_RETURN(!_captureResource.appInBackground);\n    BOOL wasRunning = [change[NSKeyValueChangeOldKey] boolValue];\n    BOOL isRunning = [change[NSKeyValueChangeNewKey] boolValue];\n    SCLogCapturerInfo(@\"avSession running status changed: %@ -> %@\", wasRunning ? @\"running\" : @\"stopped\",\n                      isRunning ? @\"running\" : @\"stopped\");\n\n    [_captureResource.blackCameraDetector sessionDidChangeIsRunning:isRunning];\n\n    if (_captureResource.isRecreateSessionFixScheduled) {\n        SCLogCapturerInfo(@\"Scheduled AVCaptureSession recreation, return\");\n        return;\n    }\n\n    if (wasRunning != isRunning) {\n        runOnMainThreadAsynchronously(^{\n            if (isRunning) {\n                [_captureResource.announcer managedCapturer:self didStartRunning:_captureResource.state];\n            } else {\n                [_captureResource.announcer managedCapturer:self didStopRunning:_captureResource.state];\n            }\n        });\n    }\n\n    if (!isRunning) {\n        [_captureResource.queuePerformer perform:^{\n            [self _fixAVSessionIfNecessary];\n        }];\n    } else {\n        if (!SCDeviceSupportsMetal()) {\n            [self _fixNonMetalSessionPreviewInconsistency];\n        }\n    }\n}\n\n- (void)_fixAVSessionIfNecessary\n{\n    SCTraceODPCompatibleStart(2);\n    SCAssert([_captureResource.queuePerformer isCurrentPerformer], @\"\");\n    SC_GUARD_ELSE_RETURN(!_captureResource.appInBackground);\n    SC_GUARD_ELSE_RETURN(_captureResource.status == SCManagedCapturerStatusRunning);\n    [[SCLogger sharedInstance] logStepToEvent:kSCCameraFixAVCaptureSession\n                                     uniqueId:@\"\"\n                                     stepName:@\"startConsistencyCheckAndFix\"];\n\n    NSTimeInterval timeNow = [NSDate timeIntervalSinceReferenceDate];\n    if (timeNow - _captureResource.lastFixSessionTimestamp < kMinFixAVSessionRunningInterval) {\n        SCLogCoreCameraInfo(@\"Fixing session in less than %f, skip\", kMinFixAVSessionRunningInterval);\n        return;\n    }\n    _captureResource.lastFixSessionTimestamp = timeNow;\n\n    if (!_captureResource.managedSession.isRunning) {\n        SCTraceStartSection(\"Fix AVSession\")\n        {\n            _captureResource.numRetriesFixAVCaptureSessionWithCurrentSession++;\n            SCGhostToSnappableSignalCameraFixInconsistency();\n            if (_captureResource.numRetriesFixAVCaptureSessionWithCurrentSession <=\n                kSCManagedCapturerFixInconsistencyARSessionDelayThreshold) {\n                SCLogCapturerInfo(@\"Fixing AVSession\");\n                [_captureResource.managedSession startRunning];\n                SCLogCapturerInfo(@\"Fixed AVSession, success : %@\", @(_captureResource.managedSession.isRunning));\n                [[SCLogger sharedInstance] logStepToEvent:kSCCameraFixAVCaptureSession\n                                                 uniqueId:@\"\"\n                                                 stepName:@\"finishCaptureSessionFix\"];\n            } else {\n                // start running with new capture session if the inconsistency fixing not succeeds\n                SCLogCapturerInfo(@\"*** Recreate and run new capture session to fix the inconsistency ***\");\n                [self _startRunningWithNewCaptureSessionIfNecessary];\n                [[SCLogger sharedInstance] logStepToEvent:kSCCameraFixAVCaptureSession\n                                                 uniqueId:@\"\"\n                                                 stepName:@\"finishNewCaptureSessionCreation\"];\n            }\n        }\n        SCTraceEndSection();\n        [[SCLogger sharedInstance]\n            logTimedEventEnd:kSCCameraFixAVCaptureSession\n                    uniqueId:@\"\"\n                  parameters:@{\n                      @\"success\" : @(_captureResource.managedSession.isRunning),\n                      @\"count\" : @(_captureResource.numRetriesFixAVCaptureSessionWithCurrentSession)\n                  }];\n    } else {\n        _captureResource.numRetriesFixAVCaptureSessionWithCurrentSession = 0;\n        [[SCLogger sharedInstance] cancelLogTimedEvent:kSCCameraFixAVCaptureSession uniqueId:@\"\"];\n    }\n    if (_captureResource.managedSession.isRunning) {\n        // If it is fixed, we signal received the first frame.\n        SCGhostToSnappableSignalDidReceiveFirstPreviewFrame();\n\n        // For non-metal preview render, we need to make sure preview is not hidden\n        if (!SCDeviceSupportsMetal()) {\n            [self _fixNonMetalSessionPreviewInconsistency];\n        }\n        runOnMainThreadAsynchronously(^{\n            [_captureResource.announcer managedCapturer:self didStartRunning:_captureResource.state];\n            // To approximate this did render timer, it is not accurate.\n            SCGhostToSnappableSignalDidRenderFirstPreviewFrame(CACurrentMediaTime());\n        });\n    } else {\n        [_captureResource.queuePerformer perform:^{\n            [self _fixAVSessionIfNecessary];\n        }\n                                           after:1];\n    }\n\n    [_captureResource.blackCameraDetector sessionDidChangeIsRunning:_captureResource.managedSession.isRunning];\n}\n\n- (void)_fixNonMetalSessionPreviewInconsistency\n{\n    SCTraceODPCompatibleStart(2);\n    SC_GUARD_ELSE_RETURN(_captureResource.status == SCManagedCapturerStatusRunning);\n    if ((!_captureResource.videoPreviewLayer.hidden) != _captureResource.managedSession.isRunning) {\n        SCTraceStartSection(\"Fix non-Metal VideoPreviewLayer\");\n        {\n            [CATransaction begin];\n            [CATransaction setDisableActions:YES];\n            [SCCaptureWorker setupVideoPreviewLayer:_captureResource];\n            [CATransaction commit];\n        }\n        SCTraceEndSection();\n    }\n}\n\n- (SCCaptureResource *)captureResource\n{\n    SCTraceODPCompatibleStart(2);\n    return _captureResource;\n}\n\n- (id<SCManagedCapturerLensAPI>)lensProcessingCore\n{\n    SCTraceODPCompatibleStart(2);\n    @weakify(self);\n    return (id<SCManagedCapturerLensAPI>)[[SCLazyLoadingProxy alloc] initWithInitializationBlock:^id {\n        @strongify(self);\n        SCReportErrorIf(self.captureResource.state.lensProcessorReady, @\"[Lenses] Lens processing core is not ready\");\n        return self.captureResource.lensProcessingCore;\n    }];\n}\n\n- (SCVideoCaptureSessionInfo)activeSession\n{\n    SCTraceODPCompatibleStart(2);\n    return [SCCaptureWorker activeSession:_captureResource];\n}\n\n- (BOOL)isLensApplied\n{\n    SCTraceODPCompatibleStart(2);\n    return [SCCaptureWorker isLensApplied:_captureResource];\n}\n\n- (BOOL)isVideoMirrored\n{\n    SCTraceODPCompatibleStart(2);\n    return [SCCaptureWorker isVideoMirrored:_captureResource];\n}\n\n#pragma mark - Setup, Start & Stop\n\n- (void)_updateHRSIEnabled\n{\n    SCTraceODPCompatibleStart(2);\n    // Since night mode is low-res, we set high resolution still image output when night mode is enabled\n    // SoftwareZoom requires higher resolution image to get better zooming result too.\n    // We also want a higher resolution on newer devices\n    BOOL is1080pSupported = [SCManagedCaptureDevice is1080pSupported];\n    BOOL shouldHRSIEnabled =\n        (_captureResource.device.isNightModeActive || _captureResource.device.softwareZoom || is1080pSupported);\n    SCLogCapturerInfo(@\"Setting HRSIEnabled to: %d. isNightModeActive:%d softwareZoom:%d is1080pSupported:%d\",\n                      shouldHRSIEnabled, _captureResource.device.isNightModeActive,\n                      _captureResource.device.softwareZoom, is1080pSupported);\n    [_captureResource.stillImageCapturer setHighResolutionStillImageOutputEnabled:shouldHRSIEnabled];\n}\n\n- (void)_updateStillImageStabilizationEnabled\n{\n    SCTraceODPCompatibleStart(2);\n    SCLogCapturerInfo(@\"Enabling still image stabilization\");\n    [_captureResource.stillImageCapturer enableStillImageStabilization];\n}\n\n- (void)setupWithDevicePositionAsynchronously:(SCManagedCaptureDevicePosition)devicePosition\n                            completionHandler:(dispatch_block_t)completionHandler\n                                      context:(NSString *)context\n{\n    SCTraceODPCompatibleStart(2);\n    SCLogCapturerInfo(@\"Setting up with devicePosition:%lu\", (unsigned long)devicePosition);\n    SCTraceResumeToken token = SCTraceCapture();\n    [[SCManagedCapturePreviewLayerController sharedInstance] setupPreviewLayer];\n    [_captureResource.queuePerformer perform:^{\n        SCTraceResume(token);\n        [self setupWithDevicePosition:devicePosition completionHandler:completionHandler];\n    }];\n}\n\n- (void)setupWithDevicePosition:(SCManagedCaptureDevicePosition)devicePosition\n              completionHandler:(dispatch_block_t)completionHandler\n{\n    SCTraceODPCompatibleStart(2);\n    SCAssertPerformer(_captureResource.queuePerformer);\n    [SCCaptureWorker setupWithCaptureResource:_captureResource devicePosition:devicePosition];\n\n    [self addListener:_captureResource.stillImageCapturer];\n    [self addListener:_captureResource.blackCameraDetector.blackCameraNoOutputDetector];\n    [self addListener:_captureResource.lensProcessingCore];\n\n    [self _updateHRSIEnabled];\n    [self _updateStillImageStabilizationEnabled];\n\n    [SCCaptureWorker updateLensesFieldOfViewTracking:_captureResource];\n\n    if (!SCDeviceSupportsMetal()) {\n        [SCCaptureWorker makeVideoPreviewLayer:_captureResource];\n    }\n\n    // I need to do this setup now. Thus, it is off the main thread. This also means my preview layer controller is\n    // entangled with the capturer.\n    [[SCManagedCapturePreviewLayerController sharedInstance] setupRenderPipeline];\n    [[SCManagedCapturePreviewLayerController sharedInstance] setManagedCapturer:self];\n    _captureResource.status = SCManagedCapturerStatusReady;\n\n    SCManagedCapturerState *state = [_captureResource.state copy];\n    AVCaptureVideoPreviewLayer *videoPreviewLayer = _captureResource.videoPreviewLayer;\n    runOnMainThreadAsynchronously(^{\n        SCLogCapturerInfo(@\"Did setup with devicePosition:%lu\", (unsigned long)devicePosition);\n        [_captureResource.announcer managedCapturer:self didChangeState:state];\n        [_captureResource.announcer managedCapturer:self didChangeCaptureDevicePosition:state];\n        if (!SCDeviceSupportsMetal()) {\n            [_captureResource.announcer managedCapturer:self didChangeVideoPreviewLayer:videoPreviewLayer];\n        }\n        if (completionHandler) {\n            completionHandler();\n        }\n    });\n}\n\n- (void)addSampleBufferDisplayController:(id<SCManagedSampleBufferDisplayController>)sampleBufferDisplayController\n                                 context:(NSString *)context\n{\n    SCTraceODPCompatibleStart(2);\n    [_captureResource.queuePerformer perform:^{\n        _captureResource.sampleBufferDisplayController = sampleBufferDisplayController;\n        [_captureResource.videoDataSource addSampleBufferDisplayController:sampleBufferDisplayController];\n    }];\n}\n\n- (SCCapturerToken *)startRunningAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler\n                                                             context:(NSString *)context\n{\n    SCTraceODPCompatibleStart(2);\n    SCTraceResumeToken resumeToken = SCTraceCapture();\n    [[SCLogger sharedInstance] updateLogTimedEventStart:kSCCameraMetricsOpen uniqueId:@\"\"];\n    SCCapturerToken *token = [[SCCapturerToken alloc] initWithIdentifier:context];\n    SCLogCapturerInfo(@\"startRunningAsynchronouslyWithCompletionHandler called. token: %@\", token);\n    [_captureResource.queuePerformer perform:^{\n        SCTraceResume(resumeToken);\n        [SCCaptureWorker startRunningWithCaptureResource:_captureResource\n                                                   token:token\n                                       completionHandler:completionHandler];\n        // After startRunning, we need to make sure _fixAVSessionIfNecessary start running.\n        // The problem: with the new KVO fix strategy, it may happen that AVCaptureSession is in stopped state, thus no\n        // KVO callback is triggered.\n        // And calling startRunningAsynchronouslyWithCompletionHandler has no effect because SCManagedCapturerStatus is\n        // in SCManagedCapturerStatusRunning state\n        [self _fixAVSessionIfNecessary];\n    }];\n    return token;\n}\n\n- (BOOL)stopRunningWithCaptureToken:(SCCapturerToken *)token\n                  completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler\n                            context:(NSString *)context\n{\n    SCTraceODPCompatibleStart(2);\n    SCAssertPerformer(_captureResource.queuePerformer);\n    SCLogCapturerInfo(@\"Stop running. token:%@ context:%@\", token, context);\n    return [SCCaptureWorker stopRunningWithCaptureResource:_captureResource\n                                                     token:token\n                                         completionHandler:completionHandler];\n}\n\n- (void)stopRunningAsynchronously:(SCCapturerToken *)token\n                completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler\n                          context:(NSString *)context\n{\n    SCTraceODPCompatibleStart(2);\n    SCLogCapturerInfo(@\"Stop running asynchronously. token:%@ context:%@\", token, context);\n    SCTraceResumeToken resumeToken = SCTraceCapture();\n    [_captureResource.queuePerformer perform:^{\n        SCTraceResume(resumeToken);\n        [SCCaptureWorker stopRunningWithCaptureResource:_captureResource\n                                                  token:token\n                                      completionHandler:completionHandler];\n    }];\n}\n\n- (void)stopRunningAsynchronously:(SCCapturerToken *)token\n                completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler\n                            after:(NSTimeInterval)delay\n                          context:(NSString *)context\n{\n    SCTraceODPCompatibleStart(2);\n    SCLogCapturerInfo(@\"Stop running asynchronously. token:%@ delay:%f\", token, delay);\n    NSTimeInterval startTime = CACurrentMediaTime();\n    [_captureResource.queuePerformer perform:^{\n        NSTimeInterval elapsedTime = CACurrentMediaTime() - startTime;\n        [_captureResource.queuePerformer perform:^{\n            SCTraceStart();\n            // If we haven't started a new running sequence yet, stop running now\n            [SCCaptureWorker stopRunningWithCaptureResource:_captureResource\n                                                      token:token\n                                          completionHandler:completionHandler];\n        }\n                                           after:MAX(delay - elapsedTime, 0)];\n    }];\n}\n\n- (void)startStreamingAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler\n                                                  context:(NSString *)context\n{\n    SCTraceODPCompatibleStart(2);\n    SCLogCapturerInfo(@\"Start streaming asynchronously\");\n    [_captureResource.queuePerformer perform:^{\n        SCTraceStart();\n        [SCCaptureWorker startStreaming:_captureResource];\n        if (completionHandler) {\n            runOnMainThreadAsynchronously(completionHandler);\n        }\n    }];\n}\n\n#pragma mark - Recording / Capture\n\n- (void)captureStillImageAsynchronouslyWithAspectRatio:(CGFloat)aspectRatio\n                                      captureSessionID:(NSString *)captureSessionID\n                                     completionHandler:\n                                         (sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler\n                                               context:(NSString *)context\n{\n    SCTraceODPCompatibleStart(2);\n    [_captureResource.queuePerformer perform:^{\n        [SCCaptureWorker captureStillImageWithCaptureResource:_captureResource\n                                                  aspectRatio:aspectRatio\n                                             captureSessionID:captureSessionID\n                                       shouldCaptureFromVideo:[self _shouldCaptureImageFromVideo]\n                                            completionHandler:completionHandler\n                                                      context:context];\n    }];\n}\n\n- (void)captureSingleVideoFrameAsynchronouslyWithCompletionHandler:\n            (sc_managed_capturer_capture_video_frame_completion_handler_t)completionHandler\n                                                           context:(NSString *)context\n{\n    SCTraceODPCompatibleStart(2);\n    [_captureResource.queuePerformer perform:^{\n        SCTraceStart();\n        SCLogCapturerInfo(@\"Start capturing single video frame\");\n        _captureResource.frameCap = [[SCSingleFrameStreamCapturer alloc] initWithCompletion:^void(UIImage *image) {\n            [_captureResource.queuePerformer perform:^{\n                [_captureResource.videoDataSource removeListener:_captureResource.frameCap];\n                _captureResource.frameCap = nil;\n            }];\n            runOnMainThreadAsynchronously(^{\n                [_captureResource.device setTorchActive:NO];\n                SCLogCapturerInfo(@\"End capturing single video frame\");\n                completionHandler(image);\n            });\n        }];\n\n        BOOL waitForTorch = NO;\n        if (!_captureResource.state.torchActive) {\n            if (_captureResource.state.flashActive) {\n                waitForTorch = YES;\n                [_captureResource.device setTorchActive:YES];\n            }\n        }\n        [_captureResource.queuePerformer perform:^{\n            [_captureResource.videoDataSource addListener:_captureResource.frameCap];\n            [SCCaptureWorker startStreaming:_captureResource];\n        }\n                                           after:(waitForTorch ? 0.5 : 0)];\n\n    }];\n}\n\n- (void)prepareForRecordingAsynchronouslyWithContext:(NSString *)context\n                                  audioConfiguration:(SCAudioConfiguration *)configuration\n{\n    SCTraceODPCompatibleStart(2);\n    [_captureResource.queuePerformer perform:^{\n        SCLogCapturerInfo(@\"prepare for recording\");\n        [_captureResource.videoCapturer prepareForRecordingWithAudioConfiguration:configuration];\n    }];\n}\n\n- (void)startRecordingAsynchronouslyWithOutputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings\n                                    audioConfiguration:(SCAudioConfiguration *)configuration\n                                           maxDuration:(NSTimeInterval)maxDuration\n                                               fileURL:(NSURL *)fileURL\n                                      captureSessionID:(NSString *)captureSessionID\n                                     completionHandler:\n                                         (sc_managed_capturer_start_recording_completion_handler_t)completionHandler\n                                               context:(NSString *)context\n{\n    SCTraceODPCompatibleStart(2);\n    [_captureResource.queuePerformer perform:^{\n        [SCCaptureWorker startRecordingWithCaptureResource:_captureResource\n                                            outputSettings:outputSettings\n                                        audioConfiguration:configuration\n                                               maxDuration:maxDuration\n                                                   fileURL:fileURL\n                                          captureSessionID:captureSessionID\n                                         completionHandler:completionHandler];\n    }];\n}\n\n- (void)stopRecordingAsynchronouslyWithContext:(NSString *)context\n{\n    SCTraceODPCompatibleStart(2);\n    [_captureResource.queuePerformer perform:^{\n        [SCCaptureWorker stopRecordingWithCaptureResource:_captureResource];\n    }];\n}\n\n- (void)cancelRecordingAsynchronouslyWithContext:(NSString *)context\n{\n    SCTraceODPCompatibleStart(2);\n    [_captureResource.queuePerformer perform:^{\n        [SCCaptureWorker cancelRecordingWithCaptureResource:_captureResource];\n    }];\n}\n\n- (void)startScanAsynchronouslyWithScanConfiguration:(SCScanConfiguration *)configuration context:(NSString *)context\n{\n    SCTraceODPCompatibleStart(2);\n    [_captureResource.queuePerformer perform:^{\n        SCTraceStart();\n        [SCCaptureWorker startScanWithScanConfiguration:configuration resource:_captureResource];\n    }];\n}\n\n- (void)stopScanAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler context:(NSString *)context\n{\n    SCTraceODPCompatibleStart(2);\n    [_captureResource.queuePerformer perform:^{\n        SCTraceStart();\n        [SCCaptureWorker stopScanWithCompletionHandler:completionHandler resource:_captureResource];\n    }];\n}\n\n- (void)sampleFrameWithCompletionHandler:(void (^)(UIImage *frame, CMTime presentationTime))completionHandler\n                                 context:(NSString *)context\n{\n    SCTraceODPCompatibleStart(2);\n    // Previously _captureResource.videoFrameSampler was conditionally created when setting up, but if this method is\n    // called it is a\n    // safe assumption the client wants it to run instead of failing silently, so always create\n    // _captureResource.videoFrameSampler\n    if (!_captureResource.videoFrameSampler) {\n        _captureResource.videoFrameSampler = [SCManagedVideoFrameSampler new];\n        [_captureResource.announcer addListener:_captureResource.videoFrameSampler];\n    }\n    SCLogCapturerInfo(@\"Sampling next frame\");\n    [_captureResource.videoFrameSampler sampleNextFrame:completionHandler];\n}\n\n- (void)addTimedTask:(SCTimedTask *)task context:(NSString *)context\n{\n    SCTraceODPCompatibleStart(2);\n    SCLogCapturerInfo(@\"Adding timed task:%@\", task);\n    [_captureResource.queuePerformer perform:^{\n        [_captureResource.videoCapturer addTimedTask:task];\n    }];\n}\n\n- (void)clearTimedTasksWithContext:(NSString *)context\n{\n    SCTraceODPCompatibleStart(2);\n    [_captureResource.queuePerformer perform:^{\n        [_captureResource.videoCapturer clearTimedTasks];\n    }];\n}\n\n#pragma mark - Utilities\n\n- (void)convertViewCoordinates:(CGPoint)viewCoordinates\n             completionHandler:(sc_managed_capturer_convert_view_coordniates_completion_handler_t)completionHandler\n                       context:(NSString *)context\n{\n    SCTraceODPCompatibleStart(2);\n    SCAssert(completionHandler, @\"completionHandler shouldn't be nil\");\n    [_captureResource.queuePerformer perform:^{\n        SCTraceStart();\n        if (SCDeviceSupportsMetal()) {\n            CGSize viewSize = [UIScreen mainScreen].fixedCoordinateSpace.bounds.size;\n            CGPoint pointOfInterest =\n                [_captureResource.device convertViewCoordinates:viewCoordinates\n                                                       viewSize:viewSize\n                                                   videoGravity:AVLayerVideoGravityResizeAspectFill];\n            runOnMainThreadAsynchronously(^{\n                completionHandler(pointOfInterest);\n            });\n        } else {\n            CGSize viewSize = _captureResource.videoPreviewLayer.bounds.size;\n            CGPoint pointOfInterest =\n                [_captureResource.device convertViewCoordinates:viewCoordinates\n                                                       viewSize:viewSize\n                                                   videoGravity:_captureResource.videoPreviewLayer.videoGravity];\n            runOnMainThreadAsynchronously(^{\n                completionHandler(pointOfInterest);\n            });\n        }\n    }];\n}\n\n- (void)detectLensCategoryOnNextFrame:(CGPoint)point\n                               lenses:(NSArray<SCLens *> *)lenses\n                           completion:(sc_managed_lenses_processor_category_point_completion_handler_t)completion\n                              context:(NSString *)context\n\n{\n    SCTraceODPCompatibleStart(2);\n    SCAssert(completion, @\"completionHandler shouldn't be nil\");\n    SCAssertMainThread();\n    [_captureResource.queuePerformer perform:^{\n        SCTraceStart();\n        SCLogCapturerInfo(@\"Detecting lens category on next frame. point:%@, lenses:%@\", NSStringFromCGPoint(point),\n                          [lenses valueForKey:NSStringFromSelector(@selector(lensId))]);\n        [_captureResource.lensProcessingCore\n            detectLensCategoryOnNextFrame:point\n                         videoOrientation:_captureResource.videoDataSource.videoOrientation\n                                   lenses:lenses\n                               completion:^(SCLensCategory *_Nullable category, NSInteger categoriesCount) {\n                                   runOnMainThreadAsynchronously(^{\n                                       if (completion) {\n                                           completion(category, categoriesCount);\n                                       }\n                                   });\n                               }];\n    }];\n}\n\n#pragma mark - Configurations\n\n- (void)setDevicePositionAsynchronously:(SCManagedCaptureDevicePosition)devicePosition\n                      completionHandler:(dispatch_block_t)completionHandler\n                                context:(NSString *)context\n{\n    SCTraceODPCompatibleStart(2);\n    SCLogCapturerInfo(@\"Setting device position asynchronously to: %lu\", (unsigned long)devicePosition);\n    [_captureResource.queuePerformer perform:^{\n        SCTraceStart();\n        BOOL devicePositionChanged = NO;\n        BOOL nightModeChanged = NO;\n        BOOL portraitModeChanged = NO;\n        BOOL zoomFactorChanged = NO;\n        BOOL flashSupportedOrTorchSupportedChanged = NO;\n        SCManagedCapturerState *state = [_captureResource.state copy];\n        if (_captureResource.state.devicePosition != devicePosition) {\n            SCManagedCaptureDevice *device = [SCManagedCaptureDevice deviceWithPosition:devicePosition];\n            if (device) {\n                if (!device.delegate) {\n                    device.delegate = _captureResource.captureDeviceHandler;\n                }\n\n                SCManagedCaptureDevice *prevDevice = _captureResource.device;\n                [SCCaptureWorker turnARSessionOff:_captureResource];\n                BOOL isStreaming = _captureResource.videoDataSource.isStreaming;\n                if (!SCDeviceSupportsMetal()) {\n                    if (isStreaming) {\n                        [_captureResource.videoDataSource stopStreaming];\n                    }\n                }\n                SCLogCapturerInfo(@\"Set device position beginConfiguration\");\n                [_captureResource.videoDataSource beginConfiguration];\n                [_captureResource.managedSession beginConfiguration];\n                // Turn off flash for the current device in case it is active\n                [_captureResource.device setTorchActive:NO];\n                if (_captureResource.state.devicePosition == SCManagedCaptureDevicePositionFront) {\n                    _captureResource.frontFlashController.torchActive = NO;\n                }\n                [_captureResource.deviceCapacityAnalyzer removeFocusListener];\n                [_captureResource.device removeDeviceAsInput:_captureResource.managedSession.avSession];\n                _captureResource.device = device;\n                BOOL deviceSet = [_captureResource.device setDeviceAsInput:_captureResource.managedSession.avSession];\n                // If we are toggling while recording, set the night mode back to not\n                // active\n                if (_captureResource.videoRecording) {\n                    [self _setNightModeActive:NO];\n                }\n                // Sync night mode, torch and flash state with the current device\n                devicePositionChanged = (_captureResource.state.devicePosition != devicePosition);\n                nightModeChanged =\n                    (_captureResource.state.isNightModeActive != _captureResource.device.isNightModeActive);\n                portraitModeChanged =\n                    devicePositionChanged &&\n                    (devicePosition == SCManagedCaptureDevicePositionBackDualCamera ||\n                     _captureResource.state.devicePosition == SCManagedCaptureDevicePositionBackDualCamera);\n                zoomFactorChanged = (_captureResource.state.zoomFactor != _captureResource.device.zoomFactor);\n                if (zoomFactorChanged && _captureResource.device.softwareZoom) {\n                    [SCCaptureWorker softwareZoomWithDevice:_captureResource.device resource:_captureResource];\n                }\n                if (_captureResource.state.flashActive != _captureResource.device.flashActive) {\n                    // preserve flashActive across devices\n                    _captureResource.device.flashActive = _captureResource.state.flashActive;\n                }\n                if (_captureResource.state.liveVideoStreaming != device.liveVideoStreamingActive) {\n                    // preserve liveVideoStreaming state across devices\n                    [_captureResource.device setLiveVideoStreaming:_captureResource.state.liveVideoStreaming\n                                                           session:_captureResource.managedSession.avSession];\n                }\n                if (devicePosition == SCManagedCaptureDevicePositionBackDualCamera &&\n                    _captureResource.state.isNightModeActive != _captureResource.device.isNightModeActive) {\n                    // preserve nightMode when switching from back camera to back dual camera\n                    [self _setNightModeActive:_captureResource.state.isNightModeActive];\n                }\n\n                flashSupportedOrTorchSupportedChanged =\n                    (_captureResource.state.flashSupported != _captureResource.device.isFlashSupported ||\n                     _captureResource.state.torchSupported != _captureResource.device.isTorchSupported);\n                SCLogCapturerInfo(@\"Set device position: %lu -> %lu, night mode: %d -> %d, zoom \"\n                                  @\"factor: %f -> %f, flash supported: %d -> %d, torch supported: %d -> %d\",\n                                  (unsigned long)_captureResource.state.devicePosition, (unsigned long)devicePosition,\n                                  _captureResource.state.isNightModeActive, _captureResource.device.isNightModeActive,\n                                  _captureResource.state.zoomFactor, _captureResource.device.zoomFactor,\n                                  _captureResource.state.flashSupported, _captureResource.device.isFlashSupported,\n                                  _captureResource.state.torchSupported, _captureResource.device.isTorchSupported);\n                _captureResource.state = [[[[[[[[SCManagedCapturerStateBuilder\n                    withManagedCapturerState:_captureResource.state] setDevicePosition:devicePosition]\n                    setIsNightModeActive:_captureResource.device.isNightModeActive]\n                    setZoomFactor:_captureResource.device.zoomFactor]\n                    setFlashSupported:_captureResource.device.isFlashSupported]\n                    setTorchSupported:_captureResource.device.isTorchSupported]\n                    setIsPortraitModeActive:devicePosition == SCManagedCaptureDevicePositionBackDualCamera] build];\n                [self _updateHRSIEnabled];\n                [self _updateStillImageStabilizationEnabled];\n                // This needs to be done after we have finished configure everything\n                // for session otherwise we\n                // may set it up without hooking up the video input yet, and will set\n                // wrong parameter for the\n                // output.\n                [_captureResource.videoDataSource setDevicePosition:devicePosition];\n                if (@available(ios 11.0, *)) {\n                    if (portraitModeChanged) {\n                        [_captureResource.videoDataSource\n                            setDepthCaptureEnabled:_captureResource.state.isPortraitModeActive];\n                        [_captureResource.device setCaptureDepthData:_captureResource.state.isPortraitModeActive\n                                                             session:_captureResource.managedSession.avSession];\n                        [_captureResource.stillImageCapturer\n                            setPortraitModeCaptureEnabled:_captureResource.state.isPortraitModeActive];\n                        if (_captureResource.state.isPortraitModeActive) {\n                            SCProcessingPipelineBuilder *processingPipelineBuilder =\n                                [[SCProcessingPipelineBuilder alloc] init];\n                            processingPipelineBuilder.portraitModeEnabled = YES;\n                            SCProcessingPipeline *pipeline = [processingPipelineBuilder build];\n                            SCLogCapturerInfo(@\"Adding processing pipeline:%@\", pipeline);\n                            [_captureResource.videoDataSource addProcessingPipeline:pipeline];\n                        } else {\n                            [_captureResource.videoDataSource removeProcessingPipeline];\n                        }\n                    }\n                }\n                [_captureResource.deviceCapacityAnalyzer setAsFocusListenerForDevice:_captureResource.device];\n\n                [SCCaptureWorker updateLensesFieldOfViewTracking:_captureResource];\n                [_captureResource.managedSession commitConfiguration];\n                [_captureResource.videoDataSource commitConfiguration];\n\n                // Checks if the flash is activated and if so switches the flash along\n                // with the camera view. Setting device's torch mode has to be called after -[AVCaptureSession\n                // commitConfiguration], otherwise flash may be not working, especially for iPhone 8/8 Plus.\n                if (_captureResource.state.torchActive ||\n                    (_captureResource.state.flashActive && _captureResource.videoRecording)) {\n                    [_captureResource.device setTorchActive:YES];\n                    if (devicePosition == SCManagedCaptureDevicePositionFront) {\n                        _captureResource.frontFlashController.torchActive = YES;\n                    }\n                }\n\n                SCLogCapturerInfo(@\"Set device position commitConfiguration\");\n                [_captureResource.droppedFramesReporter didChangeCaptureDevicePosition];\n                if (!SCDeviceSupportsMetal()) {\n                    if (isStreaming) {\n                        [SCCaptureWorker startStreaming:_captureResource];\n                    }\n                }\n                NSArray *inputs = _captureResource.managedSession.avSession.inputs;\n                if (!deviceSet) {\n                    [self _logFailureSetDevicePositionFrom:_captureResource.state.devicePosition\n                                                        to:devicePosition\n                                                    reason:@\"setDeviceForInput failed\"];\n                } else if (inputs.count == 0) {\n                    [self _logFailureSetDevicePositionFrom:_captureResource.state.devicePosition\n                                                        to:devicePosition\n                                                    reason:@\"no input\"];\n                } else if (inputs.count > 1) {\n                    [self\n                        _logFailureSetDevicePositionFrom:_captureResource.state.devicePosition\n                                                      to:devicePosition\n                                                  reason:[NSString sc_stringWithFormat:@\"multiple inputs: %@\", inputs]];\n                } else {\n                    AVCaptureDeviceInput *input = [inputs firstObject];\n                    AVCaptureDevice *resultDevice = input.device;\n                    if (resultDevice == prevDevice.device) {\n                        [self _logFailureSetDevicePositionFrom:_captureResource.state.devicePosition\n                                                            to:devicePosition\n                                                        reason:@\"stayed on previous device\"];\n                    } else if (resultDevice != _captureResource.device.device) {\n                        [self\n                            _logFailureSetDevicePositionFrom:_captureResource.state.devicePosition\n                                                          to:devicePosition\n                                                      reason:[NSString sc_stringWithFormat:@\"unknown input device: %@\",\n                                                                                           resultDevice]];\n                    }\n                }\n            } else {\n                [self _logFailureSetDevicePositionFrom:_captureResource.state.devicePosition\n                                                    to:devicePosition\n                                                reason:@\"no device\"];\n            }\n        } else {\n            SCLogCapturerInfo(@\"Device position did not change\");\n            if (_captureResource.device.position != _captureResource.state.devicePosition) {\n                [self _logFailureSetDevicePositionFrom:state.devicePosition\n                                                    to:devicePosition\n                                                reason:@\"state position set incorrectly\"];\n            }\n        }\n        BOOL stateChanged = ![_captureResource.state isEqual:state];\n        state = [_captureResource.state copy];\n        runOnMainThreadAsynchronously(^{\n            if (stateChanged) {\n                [_captureResource.announcer managedCapturer:self didChangeState:state];\n            }\n            if (devicePositionChanged) {\n                [_captureResource.announcer managedCapturer:self didChangeCaptureDevicePosition:state];\n            }\n            if (nightModeChanged) {\n                [_captureResource.announcer managedCapturer:self didChangeNightModeActive:state];\n            }\n            if (portraitModeChanged) {\n                [_captureResource.announcer managedCapturer:self didChangePortraitModeActive:state];\n            }\n            if (zoomFactorChanged) {\n                [_captureResource.announcer managedCapturer:self didChangeZoomFactor:state];\n            }\n            if (flashSupportedOrTorchSupportedChanged) {\n                [_captureResource.announcer managedCapturer:self didChangeFlashSupportedAndTorchSupported:state];\n            }\n            if (completionHandler) {\n                completionHandler();\n            }\n        });\n    }];\n}\n\n- (void)_logFailureSetDevicePositionFrom:(SCManagedCaptureDevicePosition)start\n                                      to:(SCManagedCaptureDevicePosition)end\n                                  reason:(NSString *)reason\n{\n    SCTraceODPCompatibleStart(2);\n    SCLogCapturerInfo(@\"Device position change failed: %@\", reason);\n    [[SCLogger sharedInstance] logEvent:kSCCameraMetricsCameraFlipFailure\n                             parameters:@{\n                                 @\"start\" : @(start),\n                                 @\"end\" : @(end),\n                                 @\"reason\" : reason,\n                             }];\n}\n\n- (void)setFlashActive:(BOOL)flashActive\n     completionHandler:(dispatch_block_t)completionHandler\n               context:(NSString *)context\n{\n    SCTraceODPCompatibleStart(2);\n    [_captureResource.queuePerformer perform:^{\n        SCTraceStart();\n        BOOL flashActiveOrFrontFlashEnabledChanged = NO;\n        if (_captureResource.state.flashActive != flashActive) {\n            [_captureResource.device setFlashActive:flashActive];\n            SCLogCapturerInfo(@\"Set flash active: %d -> %d\", _captureResource.state.flashActive, flashActive);\n            _captureResource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:_captureResource.state]\n                setFlashActive:flashActive] build];\n            flashActiveOrFrontFlashEnabledChanged = YES;\n        }\n        SCManagedCapturerState *state = [_captureResource.state copy];\n        runOnMainThreadAsynchronously(^{\n            if (flashActiveOrFrontFlashEnabledChanged) {\n                [_captureResource.announcer managedCapturer:self didChangeState:state];\n                [_captureResource.announcer managedCapturer:self didChangeFlashActive:state];\n            }\n            if (completionHandler) {\n                completionHandler();\n            }\n        });\n    }];\n}\n\n- (void)setLensesActive:(BOOL)lensesActive\n      completionHandler:(dispatch_block_t)completionHandler\n                context:(NSString *)context\n{\n    SCTraceODPCompatibleStart(2);\n    [self _setLensesActive:lensesActive\n        liveVideoStreaming:NO\n             filterFactory:nil\n         completionHandler:completionHandler\n                   context:context];\n}\n\n- (void)setLensesActive:(BOOL)lensesActive\n          filterFactory:(SCLookseryFilterFactory *)filterFactory\n      completionHandler:(dispatch_block_t)completionHandler\n                context:(NSString *)context\n{\n    [self _setLensesActive:lensesActive\n        liveVideoStreaming:NO\n             filterFactory:filterFactory\n         completionHandler:completionHandler\n                   context:context];\n}\n\n- (void)setLensesInTalkActive:(BOOL)lensesActive\n            completionHandler:(dispatch_block_t)completionHandler\n                      context:(NSString *)context\n{\n    // Talk requires liveVideoStreaming to be turned on\n    BOOL liveVideoStreaming = lensesActive;\n\n    dispatch_block_t activationBlock = ^{\n        [self _setLensesActive:lensesActive\n            liveVideoStreaming:liveVideoStreaming\n                 filterFactory:nil\n             completionHandler:completionHandler\n                       context:context];\n    };\n\n    @weakify(self);\n    [_captureResource.queuePerformer perform:^{\n        @strongify(self);\n        SC_GUARD_ELSE_RETURN(self);\n        // If lenses are enabled in TV3 and it was enabled not from TV3. We have to turn off lenses off at first.\n        BOOL shouldTurnOffBeforeActivation = liveVideoStreaming && !self->_captureResource.state.liveVideoStreaming &&\n                                             self->_captureResource.state.lensesActive;\n        if (shouldTurnOffBeforeActivation) {\n            [self _setLensesActive:NO\n                liveVideoStreaming:NO\n                     filterFactory:nil\n                 completionHandler:activationBlock\n                           context:context];\n        } else {\n            activationBlock();\n        }\n    }];\n}\n\n- (void)_setLensesActive:(BOOL)lensesActive\n      liveVideoStreaming:(BOOL)liveVideoStreaming\n           filterFactory:(SCLookseryFilterFactory *)filterFactory\n       completionHandler:(dispatch_block_t)completionHandler\n                 context:(NSString *)context\n{\n    SCTraceODPCompatibleStart(2);\n    SCLogCapturerInfo(@\"Setting lenses active to: %d\", lensesActive);\n    [_captureResource.queuePerformer perform:^{\n        SCTraceStart();\n        BOOL lensesActiveChanged = NO;\n        if (_captureResource.state.lensesActive != lensesActive) {\n            SCLogCapturerInfo(@\"Set lenses active: %d -> %d\", _captureResource.state.lensesActive, lensesActive);\n            _captureResource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:_captureResource.state]\n                setLensesActive:lensesActive] build];\n\n            // Update capturer settings(orientation and resolution) after changing state, because\n            // _setLiveVideoStreaming logic is depends on it\n            [self _setLiveVideoStreaming:liveVideoStreaming];\n\n            [SCCaptureWorker turnARSessionOff:_captureResource];\n\n            // Only enable sample buffer display when lenses is not active.\n            [_captureResource.videoDataSource setSampleBufferDisplayEnabled:!lensesActive];\n            [_captureResource.debugInfoDict setObject:!lensesActive ? @\"True\" : @\"False\"\n                                               forKey:@\"sampleBufferDisplayEnabled\"];\n\n            lensesActiveChanged = YES;\n            [_captureResource.lensProcessingCore setAspectRatio:_captureResource.state.liveVideoStreaming];\n            [_captureResource.lensProcessingCore setLensesActive:_captureResource.state.lensesActive\n                                                videoOrientation:_captureResource.videoDataSource.videoOrientation\n                                                   filterFactory:filterFactory];\n            BOOL modifySource = _captureResource.state.liveVideoStreaming || _captureResource.videoRecording;\n            [_captureResource.lensProcessingCore setModifySource:modifySource];\n            [_captureResource.lensProcessingCore setShouldMuteAllSounds:_captureResource.state.liveVideoStreaming];\n            if (_captureResource.fileInputDecider.shouldProcessFileInput) {\n                [_captureResource.lensProcessingCore setLensesActive:YES\n                                                    videoOrientation:_captureResource.videoDataSource.videoOrientation\n                                                       filterFactory:filterFactory];\n            }\n            [_captureResource.videoDataSource\n                setVideoStabilizationEnabledIfSupported:!_captureResource.state.lensesActive];\n\n            if (SCIsMasterBuild()) {\n                // Check that connection configuration is correct\n                if (_captureResource.state.lensesActive &&\n                    _captureResource.state.devicePosition == SCManagedCaptureDevicePositionFront) {\n                    for (AVCaptureOutput *output in _captureResource.managedSession.avSession.outputs) {\n                        if ([output isKindOfClass:[AVCaptureVideoDataOutput class]]) {\n                            AVCaptureConnection *connection = [output connectionWithMediaType:AVMediaTypeVideo];\n                            SCAssert(connection.videoMirrored &&\n                                             connection.videoOrientation == !_captureResource.state.liveVideoStreaming\n                                         ? AVCaptureVideoOrientationLandscapeRight\n                                         : AVCaptureVideoOrientationPortrait,\n                                     @\"Connection configuration is not correct\");\n                        }\n                    }\n                }\n            }\n        }\n        dispatch_block_t viewChangeHandler = ^{\n            SCManagedCapturerState *state = [_captureResource.state copy]; // update to latest state always\n            runOnMainThreadAsynchronously(^{\n                [_captureResource.announcer managedCapturer:self didChangeState:state];\n                [_captureResource.announcer managedCapturer:self didChangeLensesActive:state];\n                [_captureResource.videoPreviewGLViewManager setLensesActive:state.lensesActive];\n                if (completionHandler) {\n                    completionHandler();\n                }\n            });\n        };\n        if (lensesActiveChanged && !lensesActive && SCDeviceSupportsMetal()) {\n            // If we are turning off lenses and have sample buffer display on.\n            // We need to wait until new frame presented in sample buffer before\n            // dismiss the Lenses' OpenGL view.\n            [_captureResource.videoDataSource waitUntilSampleBufferDisplayed:_captureResource.queuePerformer.queue\n                                                           completionHandler:viewChangeHandler];\n        } else {\n            viewChangeHandler();\n        }\n    }];\n}\n\n- (void)_setLiveVideoStreaming:(BOOL)liveVideoStreaming\n{\n    SCAssertPerformer(_captureResource.queuePerformer);\n    BOOL enableLiveVideoStreaming = liveVideoStreaming;\n    if (!_captureResource.state.lensesActive && liveVideoStreaming) {\n        SCLogLensesError(@\"LiveVideoStreaming is not allowed when lenses are turned off\");\n        enableLiveVideoStreaming = NO;\n    }\n    SC_GUARD_ELSE_RETURN(enableLiveVideoStreaming != _captureResource.state.liveVideoStreaming);\n\n    // We will disable blackCameraNoOutputDetector if in live video streaming\n    // In case there is some black camera when doing video call, will consider re-enable it\n    [self _setBlackCameraNoOutputDetectorEnabled:!liveVideoStreaming];\n\n    if (!_captureResource.device.isConnected) {\n        SCLogCapturerError(@\"Can't perform configuration for live video streaming\");\n    }\n    SCLogCapturerInfo(@\"Set live video streaming: %d -> %d\", _captureResource.state.liveVideoStreaming,\n                      enableLiveVideoStreaming);\n    _captureResource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:_captureResource.state]\n        setLiveVideoStreaming:enableLiveVideoStreaming] build];\n\n    BOOL isStreaming = _captureResource.videoDataSource.isStreaming;\n    if (isStreaming) {\n        [_captureResource.videoDataSource stopStreaming];\n    }\n\n    SCLogCapturerInfo(@\"Set live video streaming beginConfiguration\");\n    [_captureResource.managedSession performConfiguration:^{\n        [_captureResource.videoDataSource beginConfiguration];\n\n        // If video chat is active we should use portrait orientation, otherwise landscape right\n        [_captureResource.videoDataSource setVideoOrientation:_captureResource.state.liveVideoStreaming\n                                                                  ? AVCaptureVideoOrientationPortrait\n                                                                  : AVCaptureVideoOrientationLandscapeRight];\n\n        [_captureResource.device setLiveVideoStreaming:_captureResource.state.liveVideoStreaming\n                                               session:_captureResource.managedSession.avSession];\n\n        [_captureResource.videoDataSource commitConfiguration];\n    }];\n\n    SCLogCapturerInfo(@\"Set live video streaming commitConfiguration\");\n\n    if (isStreaming) {\n        [_captureResource.videoDataSource startStreaming];\n    }\n}\n\n- (void)_setBlackCameraNoOutputDetectorEnabled:(BOOL)enabled\n{\n    if (enabled) {\n        [self addListener:_captureResource.blackCameraDetector.blackCameraNoOutputDetector];\n        [_captureResource.videoDataSource addListener:_captureResource.blackCameraDetector.blackCameraNoOutputDetector];\n    } else {\n        [self removeListener:_captureResource.blackCameraDetector.blackCameraNoOutputDetector];\n        [_captureResource.videoDataSource\n            removeListener:_captureResource.blackCameraDetector.blackCameraNoOutputDetector];\n    }\n}\n\n- (void)setTorchActiveAsynchronously:(BOOL)torchActive\n                   completionHandler:(dispatch_block_t)completionHandler\n                             context:(NSString *)context\n{\n    SCTraceODPCompatibleStart(2);\n    SCLogCapturerInfo(@\"Setting torch active asynchronously to: %d\", torchActive);\n    [_captureResource.queuePerformer perform:^{\n        SCTraceStart();\n        BOOL torchActiveChanged = NO;\n        if (_captureResource.state.torchActive != torchActive) {\n            [_captureResource.device setTorchActive:torchActive];\n            if (_captureResource.state.devicePosition == SCManagedCaptureDevicePositionFront) {\n                _captureResource.frontFlashController.torchActive = torchActive;\n            }\n            SCLogCapturerInfo(@\"Set torch active: %d -> %d\", _captureResource.state.torchActive, torchActive);\n            _captureResource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:_captureResource.state]\n                setTorchActive:torchActive] build];\n            torchActiveChanged = YES;\n        }\n        SCManagedCapturerState *state = [_captureResource.state copy];\n        runOnMainThreadAsynchronously(^{\n            if (torchActiveChanged) {\n                [_captureResource.announcer managedCapturer:self didChangeState:state];\n            }\n            if (completionHandler) {\n                completionHandler();\n            }\n        });\n    }];\n}\n\n- (void)setNightModeActiveAsynchronously:(BOOL)active\n                       completionHandler:(dispatch_block_t)completionHandler\n                                 context:(NSString *)context\n{\n    SCTraceODPCompatibleStart(2);\n    [_captureResource.queuePerformer perform:^{\n        SCTraceStart();\n        // Only do the configuration if current device is connected\n        if (_captureResource.device.isConnected) {\n            SCLogCapturerInfo(@\"Set night mode beginConfiguration\");\n            [_captureResource.managedSession performConfiguration:^{\n                [self _setNightModeActive:active];\n                [self _updateHRSIEnabled];\n                [self _updateStillImageStabilizationEnabled];\n            }];\n            SCLogCapturerInfo(@\"Set night mode commitConfiguration\");\n        }\n        BOOL nightModeChanged = (_captureResource.state.isNightModeActive != active);\n        if (nightModeChanged) {\n            SCLogCapturerInfo(@\"Set night mode active: %d -> %d\", _captureResource.state.isNightModeActive, active);\n            _captureResource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:_captureResource.state]\n                setIsNightModeActive:active] build];\n        }\n        SCManagedCapturerState *state = [_captureResource.state copy];\n        runOnMainThreadAsynchronously(^{\n            if (nightModeChanged) {\n                [_captureResource.announcer managedCapturer:self didChangeState:state];\n                [_captureResource.announcer managedCapturer:self didChangeNightModeActive:state];\n            }\n            if (completionHandler) {\n                completionHandler();\n            }\n        });\n    }];\n}\n\n- (void)_setNightModeActive:(BOOL)active\n{\n    SCTraceODPCompatibleStart(2);\n    [_captureResource.device setNightModeActive:active session:_captureResource.managedSession.avSession];\n    if ([SCManagedCaptureDevice isEnhancedNightModeSupported]) {\n        [self _toggleSoftwareNightmode:active];\n    }\n}\n\n- (void)_toggleSoftwareNightmode:(BOOL)active\n{\n    SCTraceODPCompatibleStart(2);\n    if (active) {\n        SCLogCapturerInfo(@\"Set enhanced night mode active\");\n        SCProcessingPipelineBuilder *processingPipelineBuilder = [[SCProcessingPipelineBuilder alloc] init];\n        processingPipelineBuilder.enhancedNightMode = YES;\n        SCProcessingPipeline *pipeline = [processingPipelineBuilder build];\n        SCLogCapturerInfo(@\"Adding processing pipeline:%@\", pipeline);\n        [_captureResource.videoDataSource addProcessingPipeline:pipeline];\n    } else {\n        SCLogCapturerInfo(@\"Removing processing pipeline\");\n        [_captureResource.videoDataSource removeProcessingPipeline];\n    }\n}\n\n- (BOOL)_shouldCaptureImageFromVideo\n{\n    SCTraceODPCompatibleStart(2);\n    BOOL isIphone5Series = [SCDeviceName isSimilarToIphone5orNewer] && ![SCDeviceName isSimilarToIphone6orNewer];\n    return isIphone5Series && !_captureResource.state.flashActive && ![self isLensApplied];\n}\n\n- (void)lockZoomWithContext:(NSString *)context\n{\n    SCTraceODPCompatibleStart(2);\n    SCAssertMainThread();\n    SCLogCapturerInfo(@\"Lock zoom\");\n    _captureResource.allowsZoom = NO;\n}\n\n- (void)unlockZoomWithContext:(NSString *)context\n{\n    SCTraceODPCompatibleStart(2);\n    SCAssertMainThread();\n    SCLogCapturerInfo(@\"Unlock zoom\");\n    // Don't let anyone unlock the zoom while ARKit is active. When ARKit shuts down, it'll unlock it.\n    SC_GUARD_ELSE_RETURN(!_captureResource.state.arSessionActive);\n    _captureResource.allowsZoom = YES;\n}\n\n- (void)setZoomFactorAsynchronously:(CGFloat)zoomFactor context:(NSString *)context\n{\n    SCTraceODPCompatibleStart(2);\n    SCAssertMainThread();\n    SC_GUARD_ELSE_RETURN(_captureResource.allowsZoom);\n    SCLogCapturerInfo(@\"Setting zoom factor to: %f\", zoomFactor);\n    [_captureResource.deviceZoomHandler setZoomFactor:zoomFactor forDevice:_captureResource.device immediately:NO];\n}\n\n- (void)resetZoomFactorAsynchronously:(CGFloat)zoomFactor\n                       devicePosition:(SCManagedCaptureDevicePosition)devicePosition\n                              context:(NSString *)context\n{\n    SCTraceODPCompatibleStart(2);\n    SCAssertMainThread();\n    SC_GUARD_ELSE_RETURN(_captureResource.allowsZoom);\n    SCLogCapturerInfo(@\"Setting zoom factor to: %f devicePosition:%lu\", zoomFactor, (unsigned long)devicePosition);\n    SCManagedCaptureDevice *device = [SCManagedCaptureDevice deviceWithPosition:devicePosition];\n    [_captureResource.deviceZoomHandler setZoomFactor:zoomFactor forDevice:device immediately:YES];\n}\n\n- (void)setExposurePointOfInterestAsynchronously:(CGPoint)pointOfInterest\n                                        fromUser:(BOOL)fromUser\n                               completionHandler:(dispatch_block_t)completionHandler\n                                         context:(NSString *)context\n{\n    SCTraceODPCompatibleStart(2);\n    [_captureResource.queuePerformer perform:^{\n        SCTraceStart();\n        if (_captureResource.device.isConnected) {\n            CGPoint exposurePoint;\n            if ([self isVideoMirrored]) {\n                exposurePoint = CGPointMake(pointOfInterest.x, 1 - pointOfInterest.y);\n            } else {\n                exposurePoint = pointOfInterest;\n            }\n            if (_captureResource.device.softwareZoom) {\n                // Fix for the zooming factor\n                [_captureResource.device\n                    setExposurePointOfInterest:CGPointMake(\n                                                   (exposurePoint.x - 0.5) / _captureResource.device.softwareZoom + 0.5,\n                                                   (exposurePoint.y - 0.5) / _captureResource.device.softwareZoom + 0.5)\n                                      fromUser:fromUser];\n            } else {\n                [_captureResource.device setExposurePointOfInterest:exposurePoint fromUser:fromUser];\n            }\n        }\n        if (completionHandler) {\n            runOnMainThreadAsynchronously(completionHandler);\n        }\n    }];\n}\n\n- (void)setAutofocusPointOfInterestAsynchronously:(CGPoint)pointOfInterest\n                                completionHandler:(dispatch_block_t)completionHandler\n                                          context:(NSString *)context\n{\n    SCTraceODPCompatibleStart(2);\n    [_captureResource.queuePerformer perform:^{\n        SCTraceStart();\n        if (_captureResource.device.isConnected) {\n            CGPoint focusPoint;\n            if ([self isVideoMirrored]) {\n                focusPoint = CGPointMake(pointOfInterest.x, 1 - pointOfInterest.y);\n            } else {\n                focusPoint = pointOfInterest;\n            }\n            if (_captureResource.device.softwareZoom) {\n                // Fix for the zooming factor\n                [_captureResource.device\n                    setAutofocusPointOfInterest:CGPointMake(\n                                                    (focusPoint.x - 0.5) / _captureResource.device.softwareZoom + 0.5,\n                                                    (focusPoint.y - 0.5) / _captureResource.device.softwareZoom + 0.5)];\n            } else {\n                [_captureResource.device setAutofocusPointOfInterest:focusPoint];\n            }\n        }\n        if (completionHandler) {\n            runOnMainThreadAsynchronously(completionHandler);\n        }\n    }];\n}\n\n- (void)setPortraitModePointOfInterestAsynchronously:(CGPoint)pointOfInterest\n                                   completionHandler:(dispatch_block_t)completionHandler\n                                             context:(NSString *)context\n{\n    SCTraceODPCompatibleStart(2);\n    [SCCaptureWorker setPortraitModePointOfInterestAsynchronously:pointOfInterest\n                                                completionHandler:completionHandler\n                                                         resource:_captureResource];\n}\n\n- (void)continuousAutofocusAndExposureAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler\n                                                                  context:(NSString *)context\n{\n    SCTraceODPCompatibleStart(2);\n    [_captureResource.queuePerformer perform:^{\n        SCTraceStart();\n        if (_captureResource.device.isConnected) {\n            [_captureResource.device continuousAutofocus];\n            [_captureResource.device setExposurePointOfInterest:CGPointMake(0.5, 0.5) fromUser:NO];\n            if (SCCameraTweaksEnablePortraitModeAutofocus()) {\n                [self setPortraitModePointOfInterestAsynchronously:CGPointMake(0.5, 0.5)\n                                                 completionHandler:nil\n                                                           context:context];\n            }\n        }\n        if (completionHandler) {\n            runOnMainThreadAsynchronously(completionHandler);\n        }\n    }];\n}\n\n#pragma mark - Add / Remove Listener\n\n- (void)addListener:(id<SCManagedCapturerListener>)listener\n{\n    SCTraceODPCompatibleStart(2);\n    // Only do the make sure thing if I added it to announcer fresh.\n    SC_GUARD_ELSE_RETURN([_captureResource.announcer addListener:listener]);\n    // After added the listener, make sure we called all these methods with its\n    // initial values\n    [_captureResource.queuePerformer perform:^{\n        SCManagedCapturerState *state = [_captureResource.state copy];\n        AVCaptureVideoPreviewLayer *videoPreviewLayer = _captureResource.videoPreviewLayer;\n        LSAGLView *videoPreviewGLView = _captureResource.videoPreviewGLViewManager.view;\n        runOnMainThreadAsynchronously(^{\n            SCTraceStart();\n            if ([listener respondsToSelector:@selector(managedCapturer:didChangeState:)]) {\n                [listener managedCapturer:self didChangeState:state];\n            }\n            if ([listener respondsToSelector:@selector(managedCapturer:didChangeCaptureDevicePosition:)]) {\n                [listener managedCapturer:self didChangeCaptureDevicePosition:state];\n            }\n            if ([listener respondsToSelector:@selector(managedCapturer:didChangeNightModeActive:)]) {\n                [listener managedCapturer:self didChangeNightModeActive:state];\n            }\n            if ([listener respondsToSelector:@selector(managedCapturer:didChangeFlashActive:)]) {\n                [listener managedCapturer:self didChangeFlashActive:state];\n            }\n            if ([listener respondsToSelector:@selector(managedCapturer:didChangeFlashSupportedAndTorchSupported:)]) {\n                [listener managedCapturer:self didChangeFlashSupportedAndTorchSupported:state];\n            }\n            if ([listener respondsToSelector:@selector(managedCapturer:didChangeZoomFactor:)]) {\n                [listener managedCapturer:self didChangeZoomFactor:state];\n            }\n            if ([listener respondsToSelector:@selector(managedCapturer:didChangeLowLightCondition:)]) {\n                [listener managedCapturer:self didChangeLowLightCondition:state];\n            }\n            if ([listener respondsToSelector:@selector(managedCapturer:didChangeAdjustingExposure:)]) {\n                [listener managedCapturer:self didChangeAdjustingExposure:state];\n            }\n            if (!SCDeviceSupportsMetal()) {\n                if ([listener respondsToSelector:@selector(managedCapturer:didChangeVideoPreviewLayer:)]) {\n                    [listener managedCapturer:self didChangeVideoPreviewLayer:videoPreviewLayer];\n                }\n            }\n            if (videoPreviewGLView &&\n                [listener respondsToSelector:@selector(managedCapturer:didChangeVideoPreviewGLView:)]) {\n                [listener managedCapturer:self didChangeVideoPreviewGLView:videoPreviewGLView];\n            }\n            if ([listener respondsToSelector:@selector(managedCapturer:didChangeLensesActive:)]) {\n                [listener managedCapturer:self didChangeLensesActive:state];\n            }\n        });\n    }];\n}\n\n- (void)removeListener:(id<SCManagedCapturerListener>)listener\n{\n    SCTraceODPCompatibleStart(2);\n    [_captureResource.announcer removeListener:listener];\n}\n\n- (void)addVideoDataSourceListener:(id<SCManagedVideoDataSourceListener>)listener\n{\n    SCTraceODPCompatibleStart(2);\n    [_captureResource.videoDataSource addListener:listener];\n}\n\n- (void)removeVideoDataSourceListener:(id<SCManagedVideoDataSourceListener>)listener\n{\n    SCTraceODPCompatibleStart(2);\n    [_captureResource.videoDataSource removeListener:listener];\n}\n\n- (void)addDeviceCapacityAnalyzerListener:(id<SCManagedDeviceCapacityAnalyzerListener>)listener\n{\n    SCTraceODPCompatibleStart(2);\n    [_captureResource.deviceCapacityAnalyzer addListener:listener];\n}\n\n- (void)removeDeviceCapacityAnalyzerListener:(id<SCManagedDeviceCapacityAnalyzerListener>)listener\n{\n    SCTraceODPCompatibleStart(2);\n    [_captureResource.deviceCapacityAnalyzer removeListener:listener];\n}\n\n#pragma mark - Debug\n\n- (NSString *)debugInfo\n{\n    SCTraceODPCompatibleStart(2);\n    NSMutableString *info = [NSMutableString new];\n    [info appendString:@\"==== SCManagedCapturer tokens ====\\n\"];\n    [_captureResource.tokenSet enumerateObjectsUsingBlock:^(SCCapturerToken *_Nonnull token, BOOL *_Nonnull stop) {\n        [info appendFormat:@\"%@\\n\", token.debugDescription];\n    }];\n    return info.copy;\n}\n\n- (NSString *)description\n{\n    return [self debugDescription];\n}\n\n- (NSString *)debugDescription\n{\n    return [NSString sc_stringWithFormat:@\"SCManagedCapturer state:\\n%@\\nVideo streamer info:\\n%@\",\n                                         _captureResource.state.debugDescription,\n                                         _captureResource.videoDataSource.description];\n}\n\n- (CMTime)firstWrittenAudioBufferDelay\n{\n    SCTraceODPCompatibleStart(2);\n    return [SCCaptureWorker firstWrittenAudioBufferDelay:_captureResource];\n}\n\n- (BOOL)audioQueueStarted\n{\n    SCTraceODPCompatibleStart(2);\n    return [SCCaptureWorker audioQueueStarted:_captureResource];\n}\n\n#pragma mark - SCTimeProfilable\n\n+ (SCTimeProfilerContext)context\n{\n    return SCTimeProfilerContextCamera;\n}\n\n// We disable and re-enable liveness timer when enter background and foreground\n\n- (void)applicationDidEnterBackground\n{\n    SCTraceODPCompatibleStart(2);\n    [SCCaptureWorker destroyLivenessConsistencyTimer:_captureResource];\n    // Hide the view when in background.\n    if (!SCDeviceSupportsMetal()) {\n        [_captureResource.queuePerformer perform:^{\n            _captureResource.appInBackground = YES;\n            [CATransaction begin];\n            [CATransaction setDisableActions:YES];\n            _captureResource.videoPreviewLayer.hidden = YES;\n            [CATransaction commit];\n        }];\n    } else {\n        [_captureResource.queuePerformer perform:^{\n            _captureResource.appInBackground = YES;\n            // If it is running, stop the streaming.\n            if (_captureResource.status == SCManagedCapturerStatusRunning) {\n                [_captureResource.videoDataSource stopStreaming];\n            }\n        }];\n    }\n    [[SCManagedCapturePreviewLayerController sharedInstance] applicationDidEnterBackground];\n}\n\n- (void)applicationWillEnterForeground\n{\n    SCTraceODPCompatibleStart(2);\n    if (!SCDeviceSupportsMetal()) {\n        [_captureResource.queuePerformer perform:^{\n            SCTraceStart();\n            _captureResource.appInBackground = NO;\n\n            if (!SCDeviceSupportsMetal()) {\n                [self _fixNonMetalSessionPreviewInconsistency];\n            }\n\n            // Doing this right now on iOS 10. It will probably work on iOS 9 as well, but need to verify.\n            if (SC_AT_LEAST_IOS_10) {\n                [self _runningConsistencyCheckAndFix];\n                // For OS version >= iOS 10, try to fix AVCaptureSession when app is entering foreground.\n                _captureResource.numRetriesFixAVCaptureSessionWithCurrentSession = 0;\n                [self _fixAVSessionIfNecessary];\n            }\n        }];\n    } else {\n        [_captureResource.queuePerformer perform:^{\n            SCTraceStart();\n            _captureResource.appInBackground = NO;\n            if (_captureResource.status == SCManagedCapturerStatusRunning) {\n                [_captureResource.videoDataSource startStreaming];\n            }\n            // Doing this right now on iOS 10. It will probably work on iOS 9 as well, but need to verify.\n            if (SC_AT_LEAST_IOS_10) {\n                [self _runningConsistencyCheckAndFix];\n                // For OS version >= iOS 10, try to fix AVCaptureSession when app is entering foreground.\n                _captureResource.numRetriesFixAVCaptureSessionWithCurrentSession = 0;\n                [self _fixAVSessionIfNecessary];\n            }\n        }];\n    }\n    [[SCManagedCapturePreviewLayerController sharedInstance] applicationWillEnterForeground];\n}\n\n- (void)applicationWillResignActive\n{\n    SCTraceODPCompatibleStart(2);\n    [[SCManagedCapturePreviewLayerController sharedInstance] applicationWillResignActive];\n    [_captureResource.queuePerformer perform:^{\n        [self _pauseCaptureSessionKVOCheck];\n    }];\n}\n\n- (void)applicationDidBecomeActive\n{\n    SCTraceODPCompatibleStart(2);\n    [[SCManagedCapturePreviewLayerController sharedInstance] applicationDidBecomeActive];\n    [_captureResource.queuePerformer perform:^{\n        SCTraceStart();\n        // Since we foreground it, do the running consistency check immediately.\n        // Reset number of retries for fixing status inconsistency\n        _captureResource.numRetriesFixInconsistencyWithCurrentSession = 0;\n        [self _runningConsistencyCheckAndFix];\n        if (!SC_AT_LEAST_IOS_10) {\n            // For OS version < iOS 10, try to fix AVCaptureSession after app becomes active.\n            _captureResource.numRetriesFixAVCaptureSessionWithCurrentSession = 0;\n            [self _fixAVSessionIfNecessary];\n        }\n        [self _resumeCaptureSessionKVOCheck];\n        if (_captureResource.status == SCManagedCapturerStatusRunning) {\n            // Reschedule the timer if we don't have it already\n            runOnMainThreadAsynchronously(^{\n                SCTraceStart();\n                [SCCaptureWorker setupLivenessConsistencyTimerIfForeground:_captureResource];\n            });\n        }\n    }];\n}\n\n- (void)_runningConsistencyCheckAndFix\n{\n    SCTraceODPCompatibleStart(2);\n    // Don't enforce consistency on simulator, as it'll constantly false-positive and restart session.\n    SC_GUARD_ELSE_RETURN(![SCDeviceName isSimulator]);\n    if (_captureResource.state.arSessionActive) {\n        [self _runningARSessionConsistencyCheckAndFix];\n    } else {\n        [self _runningAVCaptureSessionConsistencyCheckAndFix];\n    }\n}\n\n- (void)_runningARSessionConsistencyCheckAndFix\n{\n    SCTraceODPCompatibleStart(2);\n    SCAssert([_captureResource.queuePerformer isCurrentPerformer], @\"\");\n    SCAssert(_captureResource.state.arSessionActive, @\"\");\n    if (@available(iOS 11.0, *)) {\n        // Occassionally the capture session will get into a weird \"stuck\" state.\n        // If this happens, we'll see that the timestamp for the most recent frame is behind the current time.\n        // Pausinging the session for a moment and restarting to attempt to jog it loose.\n        NSTimeInterval timeSinceLastFrame = CACurrentMediaTime() - _captureResource.arSession.currentFrame.timestamp;\n        BOOL reset = NO;\n        if (_captureResource.arSession.currentFrame.camera.trackingStateReason == ARTrackingStateReasonInitializing) {\n            if (timeSinceLastFrame > kSCManagedCapturerFixInconsistencyARSessionHungInitThreshold) {\n                SCLogCapturerInfo(@\"*** Found inconsistency for ARSession timestamp (possible hung init), fix now ***\");\n                reset = YES;\n            }\n        } else if (timeSinceLastFrame > kSCManagedCapturerFixInconsistencyARSessionDelayThreshold) {\n            SCLogCapturerInfo(@\"*** Found inconsistency for ARSession timestamp (init complete), fix now ***\");\n            reset = YES;\n        }\n        if (reset) {\n            [SCCaptureWorker turnARSessionOff:_captureResource];\n            [SCCaptureWorker turnARSessionOn:_captureResource];\n        }\n    }\n}\n\n- (void)_runningAVCaptureSessionConsistencyCheckAndFix\n{\n    SCTraceODPCompatibleStart(2);\n    SCAssert([_captureResource.queuePerformer isCurrentPerformer], @\"\");\n    SCAssert(!_captureResource.state.arSessionActive, @\"\");\n    [[SCLogger sharedInstance] logStepToEvent:@\"CAMERA_OPEN_WITH_FIX_INCONSISTENCY\"\n                                     uniqueId:@\"\"\n                                     stepName:@\"startConsistencyCheckAndFix\"];\n    // If the video preview layer's hidden status is out of sync with the\n    // session's running status,\n    // fix that now. Also, we don't care that much if the status is not running.\n    if (!SCDeviceSupportsMetal()) {\n        [self _fixNonMetalSessionPreviewInconsistency];\n    }\n    // Skip the liveness consistency check if we are in background\n    if (_captureResource.appInBackground) {\n        SCLogCapturerInfo(@\"*** Skipped liveness consistency check, as we are in the background ***\");\n        return;\n    }\n    if (_captureResource.status == SCManagedCapturerStatusRunning && !_captureResource.managedSession.isRunning) {\n        SCGhostToSnappableSignalCameraFixInconsistency();\n        SCLogCapturerInfo(@\"*** Found status inconsistency for running, fix now ***\");\n        _captureResource.numRetriesFixInconsistencyWithCurrentSession++;\n        if (_captureResource.numRetriesFixInconsistencyWithCurrentSession <=\n            kSCManagedCapturerFixInconsistencyMaxRetriesWithCurrentSession) {\n            SCTraceStartSection(\"Fix non-running session\")\n            {\n                if (!SCDeviceSupportsMetal()) {\n                    [CATransaction begin];\n                    [CATransaction setDisableActions:YES];\n                    [_captureResource.managedSession startRunning];\n                    [SCCaptureWorker setupVideoPreviewLayer:_captureResource];\n                    [CATransaction commit];\n                } else {\n                    [_captureResource.managedSession startRunning];\n                }\n            }\n            SCTraceEndSection();\n        } else {\n            SCTraceStartSection(\"Create new capturer session\")\n            {\n                // start running with new capture session if the inconsistency fixing not succeeds\n                // after kSCManagedCapturerFixInconsistencyMaxRetriesWithCurrentSession retries\n                SCLogCapturerInfo(@\"*** Recreate and run new capture session to fix the inconsistency ***\");\n                [self _startRunningWithNewCaptureSession];\n            }\n            SCTraceEndSection();\n        }\n        BOOL sessionIsRunning = _captureResource.managedSession.isRunning;\n        if (sessionIsRunning && !SCDeviceSupportsMetal()) {\n            // If it is fixed, we signal received the first frame.\n            SCGhostToSnappableSignalDidReceiveFirstPreviewFrame();\n            runOnMainThreadAsynchronously(^{\n                // To approximate this did render timer, it is not accurate.\n                SCGhostToSnappableSignalDidRenderFirstPreviewFrame(CACurrentMediaTime());\n            });\n        }\n        SCLogCapturerInfo(@\"*** Applied inconsistency fix, running state : %@ ***\", sessionIsRunning ? @\"YES\" : @\"NO\");\n        if (_captureResource.managedSession.isRunning) {\n            [[SCLogger sharedInstance] logStepToEvent:@\"CAMERA_OPEN_WITH_FIX_INCONSISTENCY\"\n                                             uniqueId:@\"\"\n                                             stepName:@\"finishConsistencyCheckAndFix\"];\n            [[SCLogger sharedInstance]\n                logTimedEventEnd:@\"CAMERA_OPEN_WITH_FIX_INCONSISTENCY\"\n                        uniqueId:@\"\"\n                      parameters:@{\n                          @\"count\" : @(_captureResource.numRetriesFixInconsistencyWithCurrentSession)\n                      }];\n        }\n    } else {\n        [[SCLogger sharedInstance] cancelLogTimedEvent:@\"CAMERA_OPEN_WITH_FIX_INCONSISTENCY\" uniqueId:@\"\"];\n        // Reset number of retries for fixing status inconsistency\n        _captureResource.numRetriesFixInconsistencyWithCurrentSession = 0;\n    }\n\n    [_captureResource.blackCameraDetector sessionDidChangeIsRunning:_captureResource.managedSession.isRunning];\n}\n\n- (void)mediaServicesWereReset\n{\n    SCTraceODPCompatibleStart(2);\n    [self mediaServicesWereLost];\n    [_captureResource.queuePerformer perform:^{\n        /* If the current state requires the ARSession, restart it.\n         Explicitly flip the arSessionActive flag so that `turnSessionOn` thinks it can reset itself.\n         */\n        if (_captureResource.state.arSessionActive) {\n            _captureResource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:_captureResource.state]\n                setArSessionActive:NO] build];\n            [SCCaptureWorker turnARSessionOn:_captureResource];\n        }\n    }];\n}\n\n- (void)mediaServicesWereLost\n{\n    SCTraceODPCompatibleStart(2);\n    [_captureResource.queuePerformer perform:^{\n        if (!_captureResource.state.arSessionActive && !_captureResource.managedSession.isRunning) {\n            /*\n             If the session is running we will trigger\n             _sessionRuntimeError: so nothing else is\n             needed here.\n             */\n            [_captureResource.videoCapturer.outputURL reloadAssetKeys];\n        }\n    }];\n}\n\n- (void)_livenessConsistency:(NSTimer *)timer\n{\n    SCTraceODPCompatibleStart(2);\n    SCAssertMainThread();\n    // We can directly check the application state because this timer is scheduled\n    // on the main thread.\n    if ([UIApplication sharedApplication].applicationState == UIApplicationStateActive) {\n        [_captureResource.queuePerformer perform:^{\n            [self _runningConsistencyCheckAndFix];\n        }];\n    }\n}\n\n- (void)_sessionRuntimeError:(NSNotification *)notification\n{\n    SCTraceODPCompatibleStart(2);\n    NSError *sessionError = notification.userInfo[AVCaptureSessionErrorKey];\n    SCLogCapturerError(@\"Encountered runtime error for capture session %@\", sessionError);\n\n    NSString *errorString =\n        [sessionError.description stringByReplacingOccurrencesOfString:@\" \" withString:@\"_\"].uppercaseString\n            ?: @\"UNKNOWN_ERROR\";\n    [[SCUserTraceLogger shared]\n        logUserTraceEvent:[NSString sc_stringWithFormat:@\"AVCAPTURESESSION_RUNTIME_ERROR_%@\", errorString]];\n\n    if (sessionError.code == AVErrorMediaServicesWereReset) {\n        // If it is a AVErrorMediaServicesWereReset error, we can just call startRunning, it is much light weighted\n        [_captureResource.queuePerformer perform:^{\n            if (!SCDeviceSupportsMetal()) {\n                [CATransaction begin];\n                [CATransaction setDisableActions:YES];\n                [_captureResource.managedSession startRunning];\n                [SCCaptureWorker setupVideoPreviewLayer:_captureResource];\n                [CATransaction commit];\n            } else {\n                [_captureResource.managedSession startRunning];\n            }\n        }];\n    } else {\n        if (_captureResource.isRecreateSessionFixScheduled) {\n            SCLogCoreCameraInfo(@\"Fixing session runtime error is scheduled, skip\");\n            return;\n        }\n\n        _captureResource.isRecreateSessionFixScheduled = YES;\n        NSTimeInterval delay = 0;\n        NSTimeInterval timeNow = [NSDate timeIntervalSinceReferenceDate];\n        if (timeNow - _captureResource.lastSessionRuntimeErrorTime < kMinFixSessionRuntimeErrorInterval) {\n            SCLogCoreCameraInfo(@\"Fixing runtime error session in less than %f, delay\",\n                                kMinFixSessionRuntimeErrorInterval);\n            delay = kMinFixSessionRuntimeErrorInterval;\n        }\n        _captureResource.lastSessionRuntimeErrorTime = timeNow;\n        [_captureResource.queuePerformer perform:^{\n            SCTraceStart();\n            // Occasionaly _captureResource.avSession will throw out an error when shutting down. If this happens while\n            // ARKit is starting up,\n            // _startRunningWithNewCaptureSession will throw a wrench in ARSession startup and freeze the image.\n            SC_GUARD_ELSE_RETURN(!_captureResource.state.arSessionActive);\n            // Need to reset the flag before _startRunningWithNewCaptureSession\n            _captureResource.isRecreateSessionFixScheduled = NO;\n            [self _startRunningWithNewCaptureSession];\n            [self _fixAVSessionIfNecessary];\n        }\n                                           after:delay];\n    }\n\n    [[SCLogger sharedInstance] logUnsampledEvent:kSCCameraMetricsRuntimeError\n                                      parameters:@{\n                                          @\"error\" : sessionError == nil ? @\"Unknown error\" : sessionError.description,\n                                      }\n                                secretParameters:nil\n                                         metrics:nil];\n}\n\n- (void)_startRunningWithNewCaptureSessionIfNecessary\n{\n    SCTraceODPCompatibleStart(2);\n    if (_captureResource.isRecreateSessionFixScheduled) {\n        SCLogCapturerInfo(@\"Session recreation is scheduled, return\");\n        return;\n    }\n    _captureResource.isRecreateSessionFixScheduled = YES;\n    [_captureResource.queuePerformer perform:^{\n        // Need to reset the flag before _startRunningWithNewCaptureSession\n        _captureResource.isRecreateSessionFixScheduled = NO;\n        [self _startRunningWithNewCaptureSession];\n    }];\n}\n\n- (void)_startRunningWithNewCaptureSession\n{\n    SCTraceODPCompatibleStart(2);\n    SCAssert([_captureResource.queuePerformer isCurrentPerformer], @\"\");\n    SCLogCapturerInfo(@\"Start running with new capture session. isRecording:%d isStreaming:%d status:%lu\",\n                      _captureResource.videoRecording, _captureResource.videoDataSource.isStreaming,\n                      (unsigned long)_captureResource.status);\n\n    // Mark the start of recreating session\n    [_captureResource.blackCameraDetector sessionWillRecreate];\n\n    // Light weight fix gating\n    BOOL lightWeightFix = SCCameraTweaksSessionLightWeightFixEnabled() || SCCameraTweaksBlackCameraRecoveryEnabled();\n\n    if (!lightWeightFix) {\n        [_captureResource.deviceCapacityAnalyzer removeListener:_captureResource.stillImageCapturer];\n        [self removeListener:_captureResource.stillImageCapturer];\n        [_captureResource.videoDataSource removeListener:_captureResource.lensProcessingCore.capturerListener];\n\n        [_captureResource.videoDataSource removeListener:_captureResource.deviceCapacityAnalyzer];\n        [_captureResource.videoDataSource removeListener:_captureResource.stillImageCapturer];\n\n        if (SCIsMasterBuild()) {\n            [_captureResource.videoDataSource removeListener:_captureResource.videoStreamReporter];\n        }\n        [_captureResource.videoDataSource removeListener:_captureResource.videoScanner];\n        [_captureResource.videoDataSource removeListener:_captureResource.videoCapturer];\n        [_captureResource.videoDataSource\n            removeListener:_captureResource.blackCameraDetector.blackCameraNoOutputDetector];\n    }\n\n    [_captureResource.videoCapturer.outputURL reloadAssetKeys];\n\n    BOOL isStreaming = _captureResource.videoDataSource.isStreaming;\n    if (_captureResource.videoRecording) {\n        // Stop video recording prematurely\n        [self stopRecordingAsynchronouslyWithContext:SCCapturerContext];\n        NSError *error = [NSError\n            errorWithDomain:kSCManagedCapturerErrorDomain\n                description:\n                    [NSString\n                        sc_stringWithFormat:@\"Interrupt video recording to start new session. %@\",\n                                            @{\n                                                @\"isAVSessionRunning\" : @(_captureResource.managedSession.isRunning),\n                                                @\"numRetriesFixInconsistency\" :\n                                                    @(_captureResource.numRetriesFixInconsistencyWithCurrentSession),\n                                                @\"numRetriesFixAVCaptureSession\" :\n                                                    @(_captureResource.numRetriesFixAVCaptureSessionWithCurrentSession),\n                                                @\"lastSessionRuntimeErrorTime\" :\n                                                    @(_captureResource.lastSessionRuntimeErrorTime),\n                                            }]\n                       code:-1];\n        [[SCLogger sharedInstance] logUnsampledEvent:kSCCameraMetricsVideoRecordingInterrupted\n                                          parameters:@{\n                                              @\"error\" : error.description\n                                          }\n                                    secretParameters:nil\n                                             metrics:nil];\n    }\n    @try {\n        if (@available(iOS 11.0, *)) {\n            [_captureResource.arSession pause];\n            if (!lightWeightFix) {\n                [_captureResource.videoDataSource removeListener:_captureResource.arImageCapturer];\n            }\n        }\n        [_captureResource.managedSession stopRunning];\n        [_captureResource.device removeDeviceAsInput:_captureResource.managedSession.avSession];\n    } @catch (NSException *exception) {\n        SCLogCapturerError(@\"Encountered Exception %@\", exception);\n    } @finally {\n        // Nil out device inputs from both devices\n        [[SCManagedCaptureDevice front] resetDeviceAsInput];\n        [[SCManagedCaptureDevice back] resetDeviceAsInput];\n    }\n\n    if (!SCDeviceSupportsMetal()) {\n        // Redo the video preview to mitigate https://ph.sc-corp.net/T42584\n        [SCCaptureWorker redoVideoPreviewLayer:_captureResource];\n    }\n\n#if !TARGET_IPHONE_SIMULATOR\n    if (@available(iOS 11.0, *)) {\n        _captureResource.arSession = [[ARSession alloc] init];\n        _captureResource.arImageCapturer =\n            [_captureResource.arImageCaptureProvider arImageCapturerWith:_captureResource.queuePerformer\n                                                      lensProcessingCore:_captureResource.lensProcessingCore];\n    }\n    [self _resetAVCaptureSession];\n#endif\n    [_captureResource.managedSession.avSession setAutomaticallyConfiguresApplicationAudioSession:NO];\n    [_captureResource.device setDeviceAsInput:_captureResource.managedSession.avSession];\n\n    if (_captureResource.fileInputDecider.shouldProcessFileInput) {\n        // Keep the same logic, always create new VideoDataSource\n        [self _setupNewVideoFileDataSource];\n    } else {\n        if (!lightWeightFix) {\n            [self _setupNewVideoDataSource];\n        } else {\n            [self _setupVideoDataSourceWithNewSession];\n        }\n    }\n\n    if (_captureResource.status == SCManagedCapturerStatusRunning) {\n        if (!SCDeviceSupportsMetal()) {\n            [CATransaction begin];\n            [CATransaction setDisableActions:YES];\n            // Set the session to be the new session before start running.\n            _captureResource.videoPreviewLayer.session = _captureResource.managedSession.avSession;\n            if (!_captureResource.appInBackground) {\n                [_captureResource.managedSession startRunning];\n            }\n            [SCCaptureWorker setupVideoPreviewLayer:_captureResource];\n            [CATransaction commit];\n        } else {\n            if (!_captureResource.appInBackground) {\n                [_captureResource.managedSession startRunning];\n            }\n        }\n    }\n    // Since this start and stop happens in one block, we don't have to worry\n    // about streamingSequence issues\n    if (isStreaming) {\n        [_captureResource.videoDataSource startStreaming];\n    }\n    SCManagedCapturerState *state = [_captureResource.state copy];\n    AVCaptureVideoPreviewLayer *videoPreviewLayer = _captureResource.videoPreviewLayer;\n    runOnMainThreadAsynchronously(^{\n        [_captureResource.announcer managedCapturer:self didResetFromRuntimeError:state];\n        if (!SCDeviceSupportsMetal()) {\n            [_captureResource.announcer managedCapturer:self didChangeVideoPreviewLayer:videoPreviewLayer];\n        }\n    });\n\n    // Mark the end of recreating session\n    [_captureResource.blackCameraDetector sessionDidRecreate];\n}\n\n/**\n * Heavy-weight session fixing approach: recreating everything\n */\n- (void)_setupNewVideoDataSource\n{\n    if (@available(iOS 11.0, *)) {\n        _captureResource.videoDataSource =\n            [[SCManagedVideoStreamer alloc] initWithSession:_captureResource.managedSession.avSession\n                                                  arSession:_captureResource.arSession\n                                             devicePosition:_captureResource.state.devicePosition];\n        [_captureResource.videoDataSource addListener:_captureResource.arImageCapturer];\n        if (_captureResource.state.isPortraitModeActive) {\n            [_captureResource.videoDataSource setDepthCaptureEnabled:YES];\n\n            SCProcessingPipelineBuilder *processingPipelineBuilder = [[SCProcessingPipelineBuilder alloc] init];\n            processingPipelineBuilder.portraitModeEnabled = YES;\n            SCProcessingPipeline *pipeline = [processingPipelineBuilder build];\n            [_captureResource.videoDataSource addProcessingPipeline:pipeline];\n        }\n    } else {\n        _captureResource.videoDataSource =\n            [[SCManagedVideoStreamer alloc] initWithSession:_captureResource.managedSession.avSession\n                                             devicePosition:_captureResource.state.devicePosition];\n    }\n\n    [self _setupVideoDataSourceListeners];\n}\n\n- (void)_setupNewVideoFileDataSource\n{\n    _captureResource.videoDataSource =\n        [[SCManagedVideoFileStreamer alloc] initWithPlaybackForURL:_captureResource.fileInputDecider.fileURL];\n    [_captureResource.lensProcessingCore setLensesActive:YES\n                                        videoOrientation:_captureResource.videoDataSource.videoOrientation\n                                           filterFactory:nil];\n    runOnMainThreadAsynchronously(^{\n        [_captureResource.videoPreviewGLViewManager prepareViewIfNecessary];\n    });\n    [self _setupVideoDataSourceListeners];\n}\n\n/**\n * Light-weight session fixing approach: recreating AVCaptureSession / AVCaptureOutput, and bind it to the new session\n */\n- (void)_setupVideoDataSourceWithNewSession\n{\n    if (@available(iOS 11.0, *)) {\n        SCManagedVideoStreamer *streamer = (SCManagedVideoStreamer *)_captureResource.videoDataSource;\n        [streamer setupWithSession:_captureResource.managedSession.avSession\n                    devicePosition:_captureResource.state.devicePosition];\n        [streamer setupWithARSession:_captureResource.arSession];\n    } else {\n        SCManagedVideoStreamer *streamer = (SCManagedVideoStreamer *)_captureResource.videoDataSource;\n        [streamer setupWithSession:_captureResource.managedSession.avSession\n                    devicePosition:_captureResource.state.devicePosition];\n    }\n    [_captureResource.stillImageCapturer setupWithSession:_captureResource.managedSession.avSession];\n}\n\n- (void)_setupVideoDataSourceListeners\n{\n    if (_captureResource.videoFrameSampler) {\n        [_captureResource.announcer addListener:_captureResource.videoFrameSampler];\n    }\n\n    [_captureResource.videoDataSource addSampleBufferDisplayController:_captureResource.sampleBufferDisplayController];\n    [_captureResource.videoDataSource addListener:_captureResource.lensProcessingCore.capturerListener];\n    [_captureResource.videoDataSource addListener:_captureResource.deviceCapacityAnalyzer];\n    if (SCIsMasterBuild()) {\n        [_captureResource.videoDataSource addListener:_captureResource.videoStreamReporter];\n    }\n    [_captureResource.videoDataSource addListener:_captureResource.videoScanner];\n    [_captureResource.videoDataSource addListener:_captureResource.blackCameraDetector.blackCameraNoOutputDetector];\n    _captureResource.stillImageCapturer = [SCManagedStillImageCapturer capturerWithCaptureResource:_captureResource];\n    [_captureResource.deviceCapacityAnalyzer addListener:_captureResource.stillImageCapturer];\n    [_captureResource.videoDataSource addListener:_captureResource.stillImageCapturer];\n\n    [self addListener:_captureResource.stillImageCapturer];\n}\n\n- (void)_resetAVCaptureSession\n{\n    SCTraceODPCompatibleStart(2);\n    SCAssert([_captureResource.queuePerformer isCurrentPerformer], @\"\");\n    _captureResource.numRetriesFixAVCaptureSessionWithCurrentSession = 0;\n    // lazily initialize _captureResource.kvoController on background thread\n    if (!_captureResource.kvoController) {\n        _captureResource.kvoController = [[FBKVOController alloc] initWithObserver:self];\n    }\n    [_captureResource.kvoController unobserve:_captureResource.managedSession.avSession];\n    _captureResource.managedSession =\n        [[SCManagedCaptureSession alloc] initWithBlackCameraDetector:_captureResource.blackCameraDetector];\n    [_captureResource.kvoController observe:_captureResource.managedSession.avSession\n                                    keyPath:@keypath(_captureResource.managedSession.avSession, running)\n                                    options:NSKeyValueObservingOptionNew | NSKeyValueObservingOptionOld\n                                     action:_captureResource.handleAVSessionStatusChange];\n}\n\n- (void)_pauseCaptureSessionKVOCheck\n{\n    SCTraceODPCompatibleStart(2);\n    SCAssert([_captureResource.queuePerformer isCurrentPerformer], @\"\");\n    [_captureResource.kvoController unobserve:_captureResource.managedSession.avSession];\n}\n\n- (void)_resumeCaptureSessionKVOCheck\n{\n    SCTraceODPCompatibleStart(2);\n    SCAssert([_captureResource.queuePerformer isCurrentPerformer], @\"\");\n    [_captureResource.kvoController observe:_captureResource.managedSession.avSession\n                                    keyPath:@keypath(_captureResource.managedSession.avSession, running)\n                                    options:NSKeyValueObservingOptionNew | NSKeyValueObservingOptionOld\n                                     action:_captureResource.handleAVSessionStatusChange];\n}\n\n- (id<SCManagedVideoDataSource>)currentVideoDataSource\n{\n    SCTraceODPCompatibleStart(2);\n    return _captureResource.videoDataSource;\n}\n\n- (void)checkRestrictedCamera:(void (^)(BOOL, BOOL, AVAuthorizationStatus))callback\n{\n    SCTraceODPCompatibleStart(2);\n    [_captureResource.queuePerformer perform:^{\n        // Front and back should be available if user has no restriction on camera.\n        BOOL front = [[SCManagedCaptureDevice front] isAvailable];\n        BOOL back = [[SCManagedCaptureDevice back] isAvailable];\n        AVAuthorizationStatus status = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];\n        runOnMainThreadAsynchronously(^{\n            callback(front, back, status);\n        });\n    }];\n}\n\n- (SCSnapCreationTriggers *)snapCreationTriggers\n{\n    return _captureResource.snapCreationTriggers;\n}\n\n- (void)setBlackCameraDetector:(SCBlackCameraDetector *)blackCameraDetector\n                             deviceMotionProvider:(id<SCDeviceMotionProvider>)deviceMotionProvider\n                                 fileInputDecider:(id<SCFileInputDecider>)fileInputDecider\n                           arImageCaptureProvider:(id<SCManagedCapturerARImageCaptureProvider>)arImageCaptureProvider\n                                    glviewManager:(id<SCManagedCapturerGLViewManagerAPI>)glViewManager\n                                  lensAPIProvider:(id<SCManagedCapturerLensAPIProvider>)lensAPIProvider\n                              lsaComponentTracker:(id<SCManagedCapturerLSAComponentTrackerAPI>)lsaComponentTracker\n    managedCapturerPreviewLayerControllerDelegate:\n        (id<SCManagedCapturePreviewLayerControllerDelegate>)previewLayerControllerDelegate\n{\n    _captureResource.blackCameraDetector = blackCameraDetector;\n    _captureResource.deviceMotionProvider = deviceMotionProvider;\n    _captureResource.fileInputDecider = fileInputDecider;\n    _captureResource.arImageCaptureProvider = arImageCaptureProvider;\n    _captureResource.videoPreviewGLViewManager = glViewManager;\n    [_captureResource.videoPreviewGLViewManager configureWithCaptureResource:_captureResource];\n    _captureResource.lensAPIProvider = lensAPIProvider;\n    _captureResource.lsaTrackingComponentHandler = lsaComponentTracker;\n    [_captureResource.lsaTrackingComponentHandler configureWithCaptureResource:_captureResource];\n    _captureResource.previewLayerControllerDelegate = previewLayerControllerDelegate;\n    [SCManagedCapturePreviewLayerController sharedInstance].delegate = previewLayerControllerDelegate;\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedCapturerV1_Private.h",
    "content": "//\n//  SCManagedCapturerV1_Private.h\n//  Snapchat\n//\n//  Created by Jingtian Yang on 20/12/2017.\n//\n\n#import \"SCManagedCapturerV1.h\"\n\n@interface SCManagedCapturerV1 ()\n\n- (SCCaptureResource *)captureResource;\n\n- (void)setupWithDevicePosition:(SCManagedCaptureDevicePosition)devicePosition\n              completionHandler:(dispatch_block_t)completionHandler;\n\n- (BOOL)stopRunningWithCaptureToken:(SCCapturerToken *)token\n                  completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler\n                            context:(NSString *)context;\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedDeviceCapacityAnalyzer.h",
    "content": "//\n//  SCManagedDeviceCapacityAnalyzer.h\n//  Snapchat\n//\n//  Created by Liu Liu on 5/1/15.\n//  Copyright (c) 2015 Liu Liu. All rights reserved.\n//\n\n#import \"SCManagedDeviceCapacityAnalyzerListener.h\"\n\n#import <SCCameraFoundation/SCManagedVideoDataSourceListener.h>\n\n#import <Foundation/Foundation.h>\n\n@class SCManagedCaptureDevice;\n@protocol SCPerforming;\n\nextern NSInteger const kSCManagedDeviceCapacityAnalyzerMaxISOPresetHigh;\n\n@interface SCManagedDeviceCapacityAnalyzer : NSObject <SCManagedVideoDataSourceListener>\n\n@property (nonatomic, assign) BOOL lowLightConditionEnabled;\n\n- (instancetype)initWithPerformer:(id<SCPerforming>)performer;\n\n- (void)addListener:(id<SCManagedDeviceCapacityAnalyzerListener>)listener;\n- (void)removeListener:(id<SCManagedDeviceCapacityAnalyzerListener>)listener;\n\n- (void)setAsFocusListenerForDevice:(SCManagedCaptureDevice *)captureDevice;\n- (void)removeFocusListener;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedDeviceCapacityAnalyzer.m",
    "content": "//\n//  SCManagedDeviceCapacityAnalyzer.m\n//  Snapchat\n//\n//  Created by Liu Liu on 5/1/15.\n//  Copyright (c) 2015 Liu Liu. All rights reserved.\n//\n\n#import \"SCManagedDeviceCapacityAnalyzer.h\"\n\n#import \"SCCameraSettingUtils.h\"\n#import \"SCCameraTweaks.h\"\n#import \"SCManagedCaptureDevice+SCManagedDeviceCapacityAnalyzer.h\"\n#import \"SCManagedCaptureDevice.h\"\n#import \"SCManagedDeviceCapacityAnalyzerListenerAnnouncer.h\"\n\n#import <SCFoundation/SCAppEnvironment.h>\n#import <SCFoundation/SCDeviceName.h>\n#import <SCFoundation/SCLog.h>\n#import <SCFoundation/SCPerforming.h>\n#import <SCFoundation/SCTrace.h>\n\n#import <FBKVOController/FBKVOController.h>\n\n@import ImageIO;\n@import QuartzCore;\n\nNSInteger const kSCManagedDeviceCapacityAnalyzerMaxISOPresetHighFor6WithHRSI = 500;\n\nNSInteger const kSCManagedDeviceCapacityAnalyzerMaxISOPresetHighFor6S = 800;\n\nNSInteger const kSCManagedDeviceCapacityAnalyzerMaxISOPresetHighFor7 = 640;\n\nNSInteger const kSCManagedDeviceCapacityAnalyzerMaxISOPresetHighFor8 = 800;\n\n// After this much frames we haven't changed exposure time or ISO, we will assume that the adjustingExposure is ended.\nstatic NSInteger const kExposureUnchangedHighWatermark = 5;\n// If deadline reached, and we still haven't reached high watermark yet, we will consult the low watermark and at least\n// give the system a chance to take not-so-great pictures.\nstatic NSInteger const kExposureUnchangedLowWatermark = 1;\nstatic NSTimeInterval const kExposureUnchangedDeadline = 0.2;\n\n// It seems that between ISO 500 to 640, the brightness value is always somewhere around -0.4 to -0.5.\n// Therefore, this threshold probably will work fine.\nstatic float const kBrightnessValueThreshold = -2.25;\n// Give some margins between recognized as bright enough and not enough light.\n// If the brightness is lower than kBrightnessValueThreshold - kBrightnessValueThresholdConfidenceInterval,\n// and then we count the frame as low light frame. Only if the brightness is higher than\n// kBrightnessValueThreshold + kBrightnessValueThresholdConfidenceInterval, we think that we\n// have enough light, and reset low light frame count to 0. 0.5 is choosing because in dark\n// environment, the brightness value changes +-0.3 with minor orientation changes.\nstatic float const kBrightnessValueThresholdConfidenceInterval = 0.5;\n// If we are at good light condition for 5 frames, ready to change back\nstatic NSInteger const kLowLightBoostUnchangedLowWatermark = 7;\n// Requires we are at low light condition for ~2 seconds (assuming 20~30fps)\nstatic NSInteger const kLowLightBoostUnchangedHighWatermark = 25;\n\nstatic NSInteger const kSCLightingConditionDecisionWatermark = 15; // For 30 fps, it is 0.5 second\nstatic float const kSCLightingConditionNormalThreshold = 0;\nstatic float const kSCLightingConditionDarkThreshold = -3;\n\n@implementation SCManagedDeviceCapacityAnalyzer {\n    float _lastExposureTime;\n    int _lastISOSpeedRating;\n    NSTimeInterval _lastAdjustingExposureStartTime;\n\n    NSInteger _lowLightBoostLowLightCount;\n    NSInteger _lowLightBoostEnoughLightCount;\n    NSInteger _exposureUnchangedCount;\n    NSInteger _maxISOPresetHigh;\n\n    NSInteger _normalLightingConditionCount;\n    NSInteger _darkLightingConditionCount;\n    NSInteger _extremeDarkLightingConditionCount;\n    SCCapturerLightingConditionType _lightingCondition;\n\n    BOOL _lowLightCondition;\n    BOOL _adjustingExposure;\n\n    SCManagedDeviceCapacityAnalyzerListenerAnnouncer *_announcer;\n    FBKVOController *_observeController;\n    id<SCPerforming> _performer;\n\n    float\n        _lastBrightnessToLog; // Remember last logged brightness, only log again if it changes greater than a threshold\n}\n\n- (instancetype)initWithPerformer:(id<SCPerforming>)performer\n{\n    SCTraceStart();\n    self = [super init];\n    if (self) {\n        _performer = performer;\n        _maxISOPresetHigh = kSCManagedDeviceCapacityAnalyzerMaxISOPresetHighFor6WithHRSI;\n        if ([SCDeviceName isIphone] && [SCDeviceName isSimilarToIphone8orNewer]) {\n            _maxISOPresetHigh = kSCManagedDeviceCapacityAnalyzerMaxISOPresetHighFor8;\n        } else if ([SCDeviceName isIphone] && [SCDeviceName isSimilarToIphone7orNewer]) {\n            _maxISOPresetHigh = kSCManagedDeviceCapacityAnalyzerMaxISOPresetHighFor7;\n        } else if ([SCDeviceName isIphone] && [SCDeviceName isSimilarToIphone6SorNewer]) {\n            // iPhone 6S supports higher ISO rate for video recording, accommadating that.\n            _maxISOPresetHigh = kSCManagedDeviceCapacityAnalyzerMaxISOPresetHighFor6S;\n        }\n        _announcer = [[SCManagedDeviceCapacityAnalyzerListenerAnnouncer alloc] init];\n        _observeController = [[FBKVOController alloc] initWithObserver:self];\n    }\n    return self;\n}\n\n- (void)addListener:(id<SCManagedDeviceCapacityAnalyzerListener>)listener\n{\n    SCTraceStart();\n    [_announcer addListener:listener];\n}\n\n- (void)removeListener:(id<SCManagedDeviceCapacityAnalyzerListener>)listener\n{\n    SCTraceStart();\n    [_announcer removeListener:listener];\n}\n\n- (void)setLowLightConditionEnabled:(BOOL)lowLightConditionEnabled\n{\n    SCTraceStart();\n    if (_lowLightConditionEnabled != lowLightConditionEnabled) {\n        _lowLightConditionEnabled = lowLightConditionEnabled;\n        if (!lowLightConditionEnabled) {\n            _lowLightBoostLowLightCount = 0;\n            _lowLightBoostEnoughLightCount = 0;\n            _lowLightCondition = NO;\n            [_announcer managedDeviceCapacityAnalyzer:self didChangeLowLightCondition:_lowLightCondition];\n        }\n    }\n}\n\n- (void)managedVideoDataSource:(id<SCManagedVideoDataSource>)managedVideoDataSource\n         didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer\n                devicePosition:(SCManagedCaptureDevicePosition)devicePosition\n{\n    SCTraceStart();\n    SampleBufferMetadata metadata = {\n        .isoSpeedRating = _lastISOSpeedRating, .brightness = 0, .exposureTime = _lastExposureTime,\n    };\n    retrieveSampleBufferMetadata(sampleBuffer, &metadata);\n    if ((SCIsDebugBuild() || SCIsMasterBuild())\n        // Enable this on internal build only (excluding alpha)\n        && fabs(metadata.brightness - _lastBrightnessToLog) > 0.5f) {\n        // Log only when brightness change is greater than 0.5\n        _lastBrightnessToLog = metadata.brightness;\n        SCLogCoreCameraInfo(@\"ExposureTime: %f, ISO: %ld, Brightness: %f\", metadata.exposureTime,\n                            (long)metadata.isoSpeedRating, metadata.brightness);\n    }\n    [self _automaticallyDetectAdjustingExposure:metadata.exposureTime ISOSpeedRating:metadata.isoSpeedRating];\n    _lastExposureTime = metadata.exposureTime;\n    _lastISOSpeedRating = metadata.isoSpeedRating;\n    if (!_adjustingExposure && _lastISOSpeedRating <= _maxISOPresetHigh &&\n        _lowLightConditionEnabled) { // If we are not recording, we are not at ISO higher than we needed\n        [self _automaticallyDetectLowLightCondition:metadata.brightness];\n    }\n    [self _automaticallyDetectLightingConditionWithBrightness:metadata.brightness];\n    [_announcer managedDeviceCapacityAnalyzer:self didChangeBrightness:metadata.brightness];\n}\n\n- (void)setAsFocusListenerForDevice:(SCManagedCaptureDevice *)captureDevice\n{\n    SCTraceStart();\n    [_observeController observe:captureDevice.device\n                        keyPath:@keypath(captureDevice.device, adjustingFocus)\n                        options:NSKeyValueObservingOptionNew\n                         action:@selector(_adjustingFocusingChanged:)];\n}\n\n- (void)removeFocusListener\n{\n    SCTraceStart();\n    [_observeController unobserveAll];\n}\n\n#pragma mark - Private methods\n\n- (void)_automaticallyDetectAdjustingExposure:(float)currentExposureTime ISOSpeedRating:(NSInteger)currentISOSpeedRating\n{\n    SCTraceStart();\n    if (currentISOSpeedRating != _lastISOSpeedRating || fabsf(currentExposureTime - _lastExposureTime) > FLT_MIN) {\n        _exposureUnchangedCount = 0;\n    } else {\n        ++_exposureUnchangedCount;\n    }\n    NSTimeInterval currentTime = CACurrentMediaTime();\n    if (_exposureUnchangedCount >= kExposureUnchangedHighWatermark ||\n        (currentTime - _lastAdjustingExposureStartTime > kExposureUnchangedDeadline &&\n         _exposureUnchangedCount >= kExposureUnchangedLowWatermark)) {\n        // The exposure values haven't changed for kExposureUnchangedHighWatermark times, considering the adjustment\n        // as done. Otherwise, if we waited long enough, and the exposure unchange count at least reached low\n        // watermark, we will call it done and give it a shot.\n        if (_adjustingExposure) {\n            _adjustingExposure = NO;\n            SCLogGeneralInfo(@\"Adjusting exposure is done, unchanged count: %zd\", _exposureUnchangedCount);\n            [_announcer managedDeviceCapacityAnalyzer:self didChangeAdjustingExposure:_adjustingExposure];\n        }\n    } else {\n        // Otherwise signal that we have adjustments on exposure\n        if (!_adjustingExposure) {\n            _adjustingExposure = YES;\n            _lastAdjustingExposureStartTime = currentTime;\n            [_announcer managedDeviceCapacityAnalyzer:self didChangeAdjustingExposure:_adjustingExposure];\n        }\n    }\n}\n\n- (void)_automaticallyDetectLowLightCondition:(float)brightness\n{\n    SCTraceStart();\n    if (!_lowLightCondition && _lastISOSpeedRating == _maxISOPresetHigh) {\n        // If we are at the stage that we need to use higher ISO (because current ISO is maxed out)\n        // and the brightness is lower than the threshold\n        if (brightness < kBrightnessValueThreshold - kBrightnessValueThresholdConfidenceInterval) {\n            // Either count how many frames like this continuously we encountered\n            // Or if reached the watermark, change the low light boost mode\n            if (_lowLightBoostLowLightCount >= kLowLightBoostUnchangedHighWatermark) {\n                _lowLightCondition = YES;\n                [_announcer managedDeviceCapacityAnalyzer:self didChangeLowLightCondition:_lowLightCondition];\n            } else {\n                ++_lowLightBoostLowLightCount;\n            }\n        } else if (brightness >= kBrightnessValueThreshold + kBrightnessValueThresholdConfidenceInterval) {\n            // If the brightness is consistently better, reset the low light boost unchanged count to 0\n            _lowLightBoostLowLightCount = 0;\n        }\n    } else if (_lowLightCondition) {\n        // Check the current ISO to see if we can disable low light boost\n        if (_lastISOSpeedRating <= _maxISOPresetHigh &&\n            brightness >= kBrightnessValueThreshold + kBrightnessValueThresholdConfidenceInterval) {\n            if (_lowLightBoostEnoughLightCount >= kLowLightBoostUnchangedLowWatermark) {\n                _lowLightCondition = NO;\n                [_announcer managedDeviceCapacityAnalyzer:self didChangeLowLightCondition:_lowLightCondition];\n                _lowLightBoostEnoughLightCount = 0;\n            } else {\n                ++_lowLightBoostEnoughLightCount;\n            }\n        }\n    }\n}\n\n- (void)_adjustingFocusingChanged:(NSDictionary *)change\n{\n    SCTraceStart();\n    BOOL adjustingFocus = [change[NSKeyValueChangeNewKey] boolValue];\n    [_performer perform:^{\n        [_announcer managedDeviceCapacityAnalyzer:self didChangeAdjustingFocus:adjustingFocus];\n    }];\n}\n\n- (void)_automaticallyDetectLightingConditionWithBrightness:(float)brightness\n{\n    if (brightness >= kSCLightingConditionNormalThreshold) {\n        if (_normalLightingConditionCount > kSCLightingConditionDecisionWatermark) {\n            if (_lightingCondition != SCCapturerLightingConditionTypeNormal) {\n                _lightingCondition = SCCapturerLightingConditionTypeNormal;\n                [_announcer managedDeviceCapacityAnalyzer:self\n                               didChangeLightingCondition:SCCapturerLightingConditionTypeNormal];\n            }\n        } else {\n            _normalLightingConditionCount++;\n        }\n        _darkLightingConditionCount = 0;\n        _extremeDarkLightingConditionCount = 0;\n    } else if (brightness >= kSCLightingConditionDarkThreshold) {\n        if (_darkLightingConditionCount > kSCLightingConditionDecisionWatermark) {\n            if (_lightingCondition != SCCapturerLightingConditionTypeDark) {\n                _lightingCondition = SCCapturerLightingConditionTypeDark;\n                [_announcer managedDeviceCapacityAnalyzer:self\n                               didChangeLightingCondition:SCCapturerLightingConditionTypeDark];\n            }\n        } else {\n            _darkLightingConditionCount++;\n        }\n        _normalLightingConditionCount = 0;\n        _extremeDarkLightingConditionCount = 0;\n    } else {\n        if (_extremeDarkLightingConditionCount > kSCLightingConditionDecisionWatermark) {\n            if (_lightingCondition != SCCapturerLightingConditionTypeExtremeDark) {\n                _lightingCondition = SCCapturerLightingConditionTypeExtremeDark;\n                [_announcer managedDeviceCapacityAnalyzer:self\n                               didChangeLightingCondition:SCCapturerLightingConditionTypeExtremeDark];\n            }\n        } else {\n            _extremeDarkLightingConditionCount++;\n        }\n        _normalLightingConditionCount = 0;\n        _darkLightingConditionCount = 0;\n    }\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedDeviceCapacityAnalyzerHandler.h",
    "content": "//\n//  SCManagedDeviceCapacityAnalyzerHandler.h\n//  Snapchat\n//\n//  Created by Jingtian Yang on 11/12/2017.\n//\n\n#import \"SCManagedDeviceCapacityAnalyzerListener.h\"\n\n#import <Foundation/Foundation.h>\n\n@class SCCaptureResource;\n\n@interface SCManagedDeviceCapacityAnalyzerHandler : NSObject <SCManagedDeviceCapacityAnalyzerListener>\n\n- (instancetype)init NS_UNAVAILABLE;\n\n- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedDeviceCapacityAnalyzerHandler.m",
    "content": "//\n//  SCManagedDeviceCapacityAnalyzerHandler.m\n//  Snapchat\n//\n//  Created by Jingtian Yang on 11/12/2017.\n//\n\n#import \"SCManagedDeviceCapacityAnalyzerHandler.h\"\n\n#import \"SCCaptureResource.h\"\n#import \"SCManagedCapturer.h\"\n#import \"SCManagedCapturerLogging.h\"\n#import \"SCManagedCapturerState.h\"\n#import \"SCManagedCapturerStateBuilder.h\"\n\n#import <SCFoundation/SCAssertWrapper.h>\n#import <SCFoundation/SCQueuePerformer.h>\n#import <SCFoundation/SCThreadHelpers.h>\n#import <SCFoundation/SCTraceODPCompatible.h>\n\n@interface SCManagedDeviceCapacityAnalyzerHandler () {\n    __weak SCCaptureResource *_captureResource;\n}\n@end\n\n@implementation SCManagedDeviceCapacityAnalyzerHandler\n\n- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource\n{\n    self = [super init];\n    if (self) {\n        SCAssert(captureResource, @\"\");\n        _captureResource = captureResource;\n    }\n    return self;\n}\n\n- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer\n           didChangeLowLightCondition:(BOOL)lowLightCondition\n{\n    SCTraceODPCompatibleStart(2);\n    SCLogCapturerInfo(@\"Change Low Light Condition %d\", lowLightCondition);\n    [_captureResource.queuePerformer perform:^{\n        _captureResource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:_captureResource.state]\n            setLowLightCondition:lowLightCondition] build];\n        SCManagedCapturerState *state = [_captureResource.state copy];\n        runOnMainThreadAsynchronously(^{\n            [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didChangeState:state];\n            [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]\n                             didChangeLowLightCondition:state];\n        });\n    }];\n}\n\n- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer\n           didChangeAdjustingExposure:(BOOL)adjustingExposure\n{\n    SCTraceODPCompatibleStart(2);\n    SCLogCapturerInfo(@\"Capacity Analyzer Changes adjustExposure %d\", adjustingExposure);\n    [_captureResource.queuePerformer perform:^{\n        _captureResource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:_captureResource.state]\n            setAdjustingExposure:adjustingExposure] build];\n        SCManagedCapturerState *state = [_captureResource.state copy];\n        runOnMainThreadAsynchronously(^{\n            [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didChangeState:state];\n            [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]\n                             didChangeAdjustingExposure:state];\n        });\n    }];\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedDeviceCapacityAnalyzerListener.h",
    "content": "//#!announcer.rb\n//  SCManagedDeviceCapacityAnalyzerListener.h\n//  Snapchat\n//\n//  Created by Liu Liu on 5/4/15.\n//  Copyright (c) 2015 Liu Liu. All rights reserved.\n//\n\n#import \"SCCapturerDefines.h\"\n\n#import <Foundation/Foundation.h>\n\n@class SCManagedDeviceCapacityAnalyzer;\n\n@protocol SCManagedDeviceCapacityAnalyzerListener <NSObject>\n\n@optional\n\n// These callbacks happen on a internal queue\n- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer\n           didChangeLowLightCondition:(BOOL)lowLightCondition;\n\n- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer\n           didChangeAdjustingExposure:(BOOL)adjustingExposure;\n\n- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer\n              didChangeAdjustingFocus:(BOOL)adjustingFocus;\n\n- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer\n                  didChangeBrightness:(float)adjustingBrightness;\n\n- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer\n           didChangeLightingCondition:(SCCapturerLightingConditionType)lightingCondition;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedDeviceCapacityAnalyzerListenerAnnouncer.h",
    "content": "// Generated by the announcer.rb  DO NOT EDIT!!\n\n#import \"SCManagedDeviceCapacityAnalyzerListener.h\"\n\n#import <Foundation/Foundation.h>\n\n@interface SCManagedDeviceCapacityAnalyzerListenerAnnouncer : NSObject <SCManagedDeviceCapacityAnalyzerListener>\n\n- (void)addListener:(id<SCManagedDeviceCapacityAnalyzerListener>)listener;\n- (void)removeListener:(id<SCManagedDeviceCapacityAnalyzerListener>)listener;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedDeviceCapacityAnalyzerListenerAnnouncer.mm",
    "content": "// Generated by the announcer.rb  DO NOT EDIT!!\n\n#import \"SCManagedDeviceCapacityAnalyzerListenerAnnouncer.h\"\n\n#include <mutex>\nusing std::lock_guard;\nusing std::mutex;\n#include <vector>\nusing std::find;\nusing std::make_shared;\nusing std::shared_ptr;\nusing std::vector;\n\n@implementation SCManagedDeviceCapacityAnalyzerListenerAnnouncer {\n    mutex _mutex;\n    shared_ptr<vector<__weak id<SCManagedDeviceCapacityAnalyzerListener>>> _listeners;\n}\n\n- (NSString *)description\n{\n    auto listeners = atomic_load(&self->_listeners);\n    NSMutableString *desc = [NSMutableString string];\n    [desc appendFormat:@\"<SCManagedDeviceCapacityAnalyzerListenerAnnouncer %p>: [\", self];\n    for (int i = 0; i < listeners->size(); ++i) {\n        [desc appendFormat:@\"%@\", (*listeners)[i]];\n        if (i != listeners->size() - 1) {\n            [desc appendString:@\", \"];\n        }\n    }\n    [desc appendString:@\"]\"];\n    return desc;\n}\n\n- (void)addListener:(id<SCManagedDeviceCapacityAnalyzerListener>)listener\n{\n    lock_guard<mutex> lock(_mutex);\n    auto listeners = make_shared<vector<__weak id<SCManagedDeviceCapacityAnalyzerListener>>>();\n    if (_listeners != nil) {\n        // The listener we want to add already exists\n        if (find(_listeners->begin(), _listeners->end(), listener) != _listeners->end()) {\n            return;\n        }\n        for (auto &one : *_listeners) {\n            if (one != nil) {\n                listeners->push_back(one);\n            }\n        }\n        listeners->push_back(listener);\n        atomic_store(&self->_listeners, listeners);\n    } else {\n        listeners->push_back(listener);\n        atomic_store(&self->_listeners, listeners);\n    }\n}\n\n- (void)removeListener:(id<SCManagedDeviceCapacityAnalyzerListener>)listener\n{\n    lock_guard<mutex> lock(_mutex);\n    if (_listeners == nil) {\n        return;\n    }\n    // If the only item in the listener list is the one we want to remove, store it back to nil again\n    if (_listeners->size() == 1 && (*_listeners)[0] == listener) {\n        atomic_store(&self->_listeners, shared_ptr<vector<__weak id<SCManagedDeviceCapacityAnalyzerListener>>>());\n        return;\n    }\n    auto listeners = make_shared<vector<__weak id<SCManagedDeviceCapacityAnalyzerListener>>>();\n    for (auto &one : *_listeners) {\n        if (one != nil && one != listener) {\n            listeners->push_back(one);\n        }\n    }\n    atomic_store(&self->_listeners, listeners);\n}\n\n- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer\n           didChangeLowLightCondition:(BOOL)lowLightCondition\n{\n    auto listeners = atomic_load(&self->_listeners);\n    if (listeners) {\n        for (id<SCManagedDeviceCapacityAnalyzerListener> listener : *listeners) {\n            if ([listener respondsToSelector:@selector(managedDeviceCapacityAnalyzer:didChangeLowLightCondition:)]) {\n                [listener managedDeviceCapacityAnalyzer:managedDeviceCapacityAnalyzer\n                             didChangeLowLightCondition:lowLightCondition];\n            }\n        }\n    }\n}\n\n- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer\n           didChangeAdjustingExposure:(BOOL)adjustingExposure\n{\n    auto listeners = atomic_load(&self->_listeners);\n    if (listeners) {\n        for (id<SCManagedDeviceCapacityAnalyzerListener> listener : *listeners) {\n            if ([listener respondsToSelector:@selector(managedDeviceCapacityAnalyzer:didChangeAdjustingExposure:)]) {\n                [listener managedDeviceCapacityAnalyzer:managedDeviceCapacityAnalyzer\n                             didChangeAdjustingExposure:adjustingExposure];\n            }\n        }\n    }\n}\n\n- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer\n              didChangeAdjustingFocus:(BOOL)adjustingFocus\n{\n    auto listeners = atomic_load(&self->_listeners);\n    if (listeners) {\n        for (id<SCManagedDeviceCapacityAnalyzerListener> listener : *listeners) {\n            if ([listener respondsToSelector:@selector(managedDeviceCapacityAnalyzer:didChangeAdjustingFocus:)]) {\n                [listener managedDeviceCapacityAnalyzer:managedDeviceCapacityAnalyzer\n                                didChangeAdjustingFocus:adjustingFocus];\n            }\n        }\n    }\n}\n\n- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer\n                  didChangeBrightness:(float)adjustingBrightness\n{\n    auto listeners = atomic_load(&self->_listeners);\n    if (listeners) {\n        for (id<SCManagedDeviceCapacityAnalyzerListener> listener : *listeners) {\n            if ([listener respondsToSelector:@selector(managedDeviceCapacityAnalyzer:didChangeBrightness:)]) {\n                [listener managedDeviceCapacityAnalyzer:managedDeviceCapacityAnalyzer\n                                    didChangeBrightness:adjustingBrightness];\n            }\n        }\n    }\n}\n\n- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer\n           didChangeLightingCondition:(SCCapturerLightingConditionType)lightingCondition\n{\n    auto listeners = atomic_load(&self->_listeners);\n    if (listeners) {\n        for (id<SCManagedDeviceCapacityAnalyzerListener> listener : *listeners) {\n            if ([listener respondsToSelector:@selector(managedDeviceCapacityAnalyzer:didChangeLightingCondition:)]) {\n                [listener managedDeviceCapacityAnalyzer:managedDeviceCapacityAnalyzer\n                             didChangeLightingCondition:lightingCondition];\n            }\n        }\n    }\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedDroppedFramesReporter.h",
    "content": "//\n//  SCManagedDroppedFramesReporter.h\n//  Snapchat\n//\n//  Created by Michel Loenngren on 3/21/17.\n//  Copyright © 2017 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCManagedCapturerListener.h\"\n\n#import <SCCameraFoundation/SCManagedVideoDataSourceListener.h>\n\n#import <Foundation/Foundation.h>\n\n/*\n Conforms to SCManagedVideoDataSourceListener and records frame rate statistics\n during recording.\n */\n@interface SCManagedDroppedFramesReporter : NSObject <SCManagedVideoDataSourceListener, SCManagedCapturerListener>\n\n- (void)reportWithKeepLateFrames:(BOOL)keepLateFrames lensesApplied:(BOOL)lensesApplied;\n\n- (void)didChangeCaptureDevicePosition;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedDroppedFramesReporter.m",
    "content": "//\n//  SCManagedDroppedFramesReporter.m\n//  Snapchat\n//\n//  Created by Michel Loenngren on 3/21/17.\n//  Copyright © 2017 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCManagedDroppedFramesReporter.h\"\n\n#import \"SCCameraTweaks.h\"\n#import \"SCManagedCapturerState.h\"\n\n#import <SCFoundation/SCBackgroundTaskMonitor.h>\n#import <SCFoundation/SCLog.h>\n#import <SCFrameRate/SCFrameRateEntry.h>\n#import <SCFrameRate/SCVideoFrameDropCounter.h>\n#import <SCLogger/SCCameraMetrics.h>\n#import <SCLogger/SCLogger.h>\n\nCGFloat const kSCCaptureTargetFramerate = 30;\n\n@interface SCManagedDroppedFramesReporter ()\n\n@property (nonatomic) SCVideoFrameDropCounter *frameDropCounter;\n\n@end\n\n@implementation SCManagedDroppedFramesReporter {\n    SCVideoFrameDropCounter *_frameDropCounter;\n    NSUInteger _droppedFrames;\n}\n\n- (SCVideoFrameDropCounter *)frameDropCounter\n{\n    if (_frameDropCounter == nil) {\n        _frameDropCounter = [[SCVideoFrameDropCounter alloc] initWithTargetFramerate:kSCCaptureTargetFramerate];\n        _droppedFrames = 0;\n    }\n    return _frameDropCounter;\n}\n\n- (void)reportWithKeepLateFrames:(BOOL)keepLateFrames lensesApplied:(BOOL)lensesApplied\n{\n    if (_frameDropCounter == nil) {\n        return;\n    }\n\n    NSMutableDictionary *eventDict = [_frameDropCounter.toDict mutableCopy];\n    eventDict[@\"total_frame_drop_measured\"] = @(_droppedFrames);\n    eventDict[@\"keep_late_frames\"] = @(keepLateFrames);\n    // if user select none of the lenses when activing the lenses scroll view, we still enable keepLateFrames\n    eventDict[@\"lenses_applied\"] = @(lensesApplied);\n\n    [[SCLogger sharedInstance] logEvent:kSCCameraMetricsFramesDroppedDuringRecording parameters:eventDict];\n\n    // Reset\n    _frameDropCounter = nil;\n    _droppedFrames = 0;\n}\n\n- (void)didChangeCaptureDevicePosition\n{\n    [_frameDropCounter didChangeCaptureDevicePosition];\n}\n\n#pragma mark - SCManagedVideoDataSourceListener\n\n- (void)managedVideoDataSource:(id<SCManagedVideoDataSource>)managedVideoDataSource\n         didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer\n                devicePosition:(SCManagedCaptureDevicePosition)devicePosition\n{\n    [self.frameDropCounter processFrameTime:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)];\n}\n\n- (void)managedVideoDataSource:(id<SCManagedVideoDataSource>)managedVideoDataSource\n           didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer\n                devicePosition:(SCManagedCaptureDevicePosition)devicePosition\n{\n    _droppedFrames += 1;\n    NSDictionary<NSString *, NSNumber *> *backgroundTaskScreenshot = SCBackgrounTaskScreenshotReport();\n    SCLogCoreCameraInfo(@\"[SCManagedDroppedFramesReporter] frame dropped, background tasks: %@\",\n                        backgroundTaskScreenshot);\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedFrameHealthChecker.h",
    "content": "//\n//  SCManagedFrameHealthChecker.h\n//  Snapchat\n//\n//  Created by Pinlin Chen on 30/08/2017.\n//\n\n#import <SCBase/SCMacros.h>\n#import <SCFeatureGating/SCExperimentManager.h>\n\n#import <AVFoundation/AVFoundation.h>\n#import <Foundation/Foundation.h>\n\n@interface SCManagedFrameHealthChecker : NSObject\n\n+ (SCManagedFrameHealthChecker *)sharedInstance;\n/*! @abstract Use sharedInstance instead. */\nSC_INIT_AND_NEW_UNAVAILABLE;\n\n/* Utility method */\n- (NSMutableDictionary *)metadataForSampleBuffer:(CMSampleBufferRef)sampleBuffer extraInfo:(NSDictionary *)extraInfo;\n- (NSMutableDictionary *)metadataForSampleBuffer:(CMSampleBufferRef)sampleBuffer\n                            photoCapturerEnabled:(BOOL)photoCapturerEnabled\n                                     lensEnabled:(BOOL)lensesEnabled\n                                          lensID:(NSString *)lensID;\n- (NSMutableDictionary *)metadataForMetadata:(NSDictionary *)metadata\n                        photoCapturerEnabled:(BOOL)photoCapturerEnabled\n                                 lensEnabled:(BOOL)lensesEnabled\n                                      lensID:(NSString *)lensID;\n- (NSMutableDictionary *)getPropertiesFromAsset:(AVAsset *)asset;\n\n/* Image snap */\n- (void)checkImageHealthForCaptureFrameImage:(UIImage *)image\n                             captureSettings:(NSDictionary *)captureSettings\n                            captureSessionID:(NSString *)captureSessionID;\n- (void)checkImageHealthForPreTranscoding:(UIImage *)image\n                                 metadata:(NSDictionary *)metadata\n                         captureSessionID:(NSString *)captureSessionID;\n- (void)checkImageHealthForPostTranscoding:(NSData *)imageData\n                                  metadata:(NSDictionary *)metadata\n                          captureSessionID:(NSString *)captureSessionID;\n\n/* Video snap */\n- (void)checkVideoHealthForCaptureFrameImage:(UIImage *)image\n                                    metedata:(NSDictionary *)metadata\n                            captureSessionID:(NSString *)captureSessionID;\n- (void)checkVideoHealthForOverlayImage:(UIImage *)image\n                               metedata:(NSDictionary *)metadata\n                       captureSessionID:(NSString *)captureSessionID;\n- (void)checkVideoHealthForPostTranscodingThumbnail:(UIImage *)image\n                                           metedata:(NSDictionary *)metadata\n                                         properties:(NSDictionary *)properties\n                                   captureSessionID:(NSString *)captureSessionID;\n\n- (void)reportFrameHealthCheckForCaptureSessionID:(NSString *)captureSessionID;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedFrameHealthChecker.m",
    "content": "//\n//  SCManagedFrameHealthChecker.m\n//  Snapchat\n//\n//  Created by Pinlin Chen on 30/08/2017.\n//\n\n#import \"SCManagedFrameHealthChecker.h\"\n\n#import \"SCCameraSettingUtils.h\"\n#import \"SCCameraTweaks.h\"\n\n#import <SCFoundation/AVAsset+Helpers.h>\n#import <SCFoundation/SCLog.h>\n#import <SCFoundation/SCLogHelper.h>\n#import <SCFoundation/SCQueuePerformer.h>\n#import <SCFoundation/SCTraceODPCompatible.h>\n#import <SCFoundation/UIImage+Helpers.h>\n#import <SCLogger/SCCameraMetrics.h>\n#import <SCLogger/SCLogger+Stats.h>\n#import <SCWebP/UIImage+WebP.h>\n\n#import <ImageIO/CGImageProperties.h>\n@import Accelerate;\n\nstatic const char *kSCManagedFrameHealthCheckerQueueLabel = \"com.snapchat.frame_health_checker\";\nstatic const int kSCManagedFrameHealthCheckerMaxSamples = 2304;\nstatic const float kSCManagedFrameHealthCheckerPossibleBlackThreshold = 20.0;\nstatic const float kSCManagedFrameHealthCheckerScaledImageMaxEdgeLength = 300.0;\nstatic const float kSCManagedFrameHealthCheckerScaledImageScale = 1.0;\n// assume we could process at most of 2 RGBA images which are 2304*4096 RGBA image\nstatic const double kSCManagedFrameHealthCheckerMinFreeMemMB = 72.0;\n\ntypedef NS_ENUM(NSUInteger, SCManagedFrameHealthCheckType) {\n    SCManagedFrameHealthCheck_ImageCapture = 0,\n    SCManagedFrameHealthCheck_ImagePreTranscoding,\n    SCManagedFrameHealthCheck_ImagePostTranscoding,\n    SCManagedFrameHealthCheck_VideoCapture,\n    SCManagedFrameHealthCheck_VideoOverlayImage,\n    SCManagedFrameHealthCheck_VideoPostTranscoding,\n};\n\ntypedef NS_ENUM(NSUInteger, SCManagedFrameHealthCheckErrorType) {\n    SCManagedFrameHealthCheckError_None = 0,\n    SCManagedFrameHealthCheckError_Invalid_Bitmap,\n    SCManagedFrameHealthCheckError_Frame_Possibly_Black,\n    SCManagedFrameHealthCheckError_Frame_Totally_Black,\n    SCManagedFrameHealthCheckError_Execution_Error,\n};\n\ntypedef struct {\n    float R;\n    float G;\n    float B;\n    float A;\n} FloatRGBA;\n\n@class SCManagedFrameHealthCheckerTask;\ntypedef NSMutableDictionary * (^sc_managed_frame_checker_block)(SCManagedFrameHealthCheckerTask *task);\n\nfloat vDspColorElementSum(const Byte *data, NSInteger stripLength, NSInteger bufferLength)\n{\n    float sum = 0;\n    float colorArray[bufferLength];\n    // Convert to float for DSP registerator\n    vDSP_vfltu8(data, stripLength, colorArray, 1, bufferLength);\n    // Calculate sum of color element\n    vDSP_sve(colorArray, 1, &sum, bufferLength);\n    return sum;\n}\n\n@interface SCManagedFrameHealthCheckerTask : NSObject\n\n@property (nonatomic, assign) SCManagedFrameHealthCheckType type;\n@property (nonatomic, strong) id targetObject;\n@property (nonatomic, assign) CGSize sourceImageSize;\n@property (nonatomic, strong) UIImage *unifiedImage;\n@property (nonatomic, strong) NSDictionary *metadata;\n@property (nonatomic, strong) NSDictionary *videoProperties;\n@property (nonatomic, assign) SCManagedFrameHealthCheckErrorType errorType;\n\n+ (SCManagedFrameHealthCheckerTask *)taskWithType:(SCManagedFrameHealthCheckType)type\n                                     targetObject:(id)targetObject\n                                         metadata:(NSDictionary *)metadata\n                                  videoProperties:(NSDictionary *)videoProperties;\n\n+ (SCManagedFrameHealthCheckerTask *)taskWithType:(SCManagedFrameHealthCheckType)type\n                                     targetObject:(id)targetObject\n                                         metadata:(NSDictionary *)metadata;\n\n@end\n\n@implementation SCManagedFrameHealthCheckerTask\n\n+ (SCManagedFrameHealthCheckerTask *)taskWithType:(SCManagedFrameHealthCheckType)type\n                                     targetObject:(id)targetObject\n                                         metadata:(NSDictionary *)metadata\n{\n    return [self taskWithType:type targetObject:targetObject metadata:metadata videoProperties:nil];\n}\n\n+ (SCManagedFrameHealthCheckerTask *)taskWithType:(SCManagedFrameHealthCheckType)type\n                                     targetObject:(id)targetObject\n                                         metadata:(NSDictionary *)metadata\n                                  videoProperties:(NSDictionary *)videoProperties\n{\n    SCManagedFrameHealthCheckerTask *task = [[SCManagedFrameHealthCheckerTask alloc] init];\n    task.type = type;\n    task.targetObject = targetObject;\n    task.metadata = metadata;\n    task.videoProperties = videoProperties;\n    return task;\n}\n\n- (NSString *)textForSnapType\n{\n    switch (self.type) {\n    case SCManagedFrameHealthCheck_ImageCapture:\n    case SCManagedFrameHealthCheck_ImagePreTranscoding:\n    case SCManagedFrameHealthCheck_ImagePostTranscoding:\n        return @\"IMAGE\";\n    case SCManagedFrameHealthCheck_VideoCapture:\n    case SCManagedFrameHealthCheck_VideoOverlayImage:\n    case SCManagedFrameHealthCheck_VideoPostTranscoding:\n        return @\"VIDEO\";\n    }\n}\n\n- (NSString *)textForSource\n{\n    switch (self.type) {\n    case SCManagedFrameHealthCheck_ImageCapture:\n        return @\"CAPTURE\";\n    case SCManagedFrameHealthCheck_ImagePreTranscoding:\n        return @\"PRE_TRANSCODING\";\n    case SCManagedFrameHealthCheck_ImagePostTranscoding:\n        return @\"POST_TRANSCODING\";\n    case SCManagedFrameHealthCheck_VideoCapture:\n        return @\"CAPTURE\";\n    case SCManagedFrameHealthCheck_VideoOverlayImage:\n        return @\"OVERLAY_IMAGE\";\n    case SCManagedFrameHealthCheck_VideoPostTranscoding:\n        return @\"POST_TRANSCODING\";\n    }\n}\n\n- (NSString *)textForErrorType\n{\n    switch (self.errorType) {\n    case SCManagedFrameHealthCheckError_None:\n        return nil;\n    case SCManagedFrameHealthCheckError_Invalid_Bitmap:\n        return @\"Invalid_Bitmap\";\n    case SCManagedFrameHealthCheckError_Frame_Possibly_Black:\n        return @\"Frame_Possibly_Black\";\n    case SCManagedFrameHealthCheckError_Frame_Totally_Black:\n        return @\"Frame_Totally_Black\";\n    case SCManagedFrameHealthCheckError_Execution_Error:\n        return @\"Execution_Error\";\n    }\n}\n\n@end\n\n@interface SCManagedFrameHealthChecker () {\n    id<SCPerforming> _performer;\n    // Dictionary structure\n    // Key   - NSString, captureSessionID\n    // Value - NSMutableArray<SCManagedFrameHealthCheckerTask>\n    NSMutableDictionary *_frameCheckTasks;\n}\n\n@end\n\n@implementation SCManagedFrameHealthChecker\n\n+ (SCManagedFrameHealthChecker *)sharedInstance\n{\n    SCTraceODPCompatibleStart(2);\n    static SCManagedFrameHealthChecker *checker;\n    static dispatch_once_t onceToken;\n    dispatch_once(&onceToken, ^{\n        checker = [[SCManagedFrameHealthChecker alloc] _init];\n    });\n    return checker;\n}\n\n- (instancetype)_init\n{\n    SCTraceODPCompatibleStart(2);\n    if (self = [super init]) {\n        // Use the lowest QoS level\n        _performer = [[SCQueuePerformer alloc] initWithLabel:kSCManagedFrameHealthCheckerQueueLabel\n                                            qualityOfService:QOS_CLASS_UTILITY\n                                                   queueType:DISPATCH_QUEUE_SERIAL\n                                                     context:SCQueuePerformerContextCamera];\n        _frameCheckTasks = [NSMutableDictionary dictionary];\n    }\n    return self;\n}\n\n- (NSMutableDictionary *)metadataForSampleBuffer:(CMSampleBufferRef)sampleBuffer\n{\n    SCTraceODPCompatibleStart(2);\n    // add exposure, ISO, brightness\n    NSMutableDictionary *metadata = [NSMutableDictionary dictionary];\n    if (!sampleBuffer || !CMSampleBufferDataIsReady(sampleBuffer)) {\n        return metadata;\n    }\n    CFDictionaryRef exifAttachments =\n        (CFDictionaryRef)CMGetAttachment(sampleBuffer, kCGImagePropertyExifDictionary, NULL);\n    NSNumber *exposureTimeNum = retrieveExposureTimeFromEXIFAttachments(exifAttachments);\n    if (exposureTimeNum) {\n        metadata[@\"exposure\"] = exposureTimeNum;\n    }\n    NSNumber *isoSpeedRatingNum = retrieveISOSpeedRatingFromEXIFAttachments(exifAttachments);\n    if (isoSpeedRatingNum) {\n        metadata[@\"iso\"] = isoSpeedRatingNum;\n    }\n    NSNumber *brightnessNum = retrieveBrightnessFromEXIFAttachments(exifAttachments);\n    if (brightnessNum) {\n        float brightness = [brightnessNum floatValue];\n        metadata[@\"brightness\"] = isfinite(brightness) ? @(brightness) : @(0);\n    }\n\n    return metadata;\n}\n\n- (NSMutableDictionary *)metadataForMetadata:(NSDictionary *)metadata\n{\n    SCTraceODPCompatibleStart(2);\n    // add exposure, ISO, brightness\n    NSMutableDictionary *newMetadata = [NSMutableDictionary dictionary];\n    CFDictionaryRef exifAttachments = (__bridge CFDictionaryRef)metadata;\n    NSNumber *exposureTimeNum = retrieveExposureTimeFromEXIFAttachments(exifAttachments);\n    if (exposureTimeNum) {\n        newMetadata[@\"exposure\"] = exposureTimeNum;\n    }\n    NSNumber *isoSpeedRatingNum = retrieveISOSpeedRatingFromEXIFAttachments(exifAttachments);\n    if (isoSpeedRatingNum) {\n        newMetadata[@\"iso\"] = isoSpeedRatingNum;\n    }\n    NSNumber *brightnessNum = retrieveBrightnessFromEXIFAttachments(exifAttachments);\n    if (brightnessNum) {\n        float brightness = [brightnessNum floatValue];\n        newMetadata[@\"brightness\"] = isfinite(brightness) ? @(brightness) : @(0);\n    }\n\n    return newMetadata;\n}\n\n- (NSMutableDictionary *)metadataForSampleBuffer:(CMSampleBufferRef)sampleBuffer extraInfo:(NSDictionary *)extraInfo\n{\n    SCTraceODPCompatibleStart(2);\n    NSMutableDictionary *metadata = [self metadataForSampleBuffer:sampleBuffer];\n    [metadata addEntriesFromDictionary:extraInfo];\n    return metadata;\n}\n\n- (NSMutableDictionary *)metadataForSampleBuffer:(CMSampleBufferRef)sampleBuffer\n                            photoCapturerEnabled:(BOOL)photoCapturerEnabled\n                                     lensEnabled:(BOOL)lensesEnabled\n                                          lensID:(NSString *)lensID\n{\n    SCTraceODPCompatibleStart(2);\n    NSMutableDictionary *metadata = [self metadataForSampleBuffer:sampleBuffer];\n    metadata[@\"photo_capturer_enabled\"] = @(photoCapturerEnabled);\n\n    metadata[@\"lens_enabled\"] = @(lensesEnabled);\n    if (lensesEnabled) {\n        metadata[@\"lens_id\"] = lensID ?: @\"\";\n    }\n\n    return metadata;\n}\n\n- (NSMutableDictionary *)metadataForMetadata:(NSDictionary *)metadata\n                        photoCapturerEnabled:(BOOL)photoCapturerEnabled\n                                 lensEnabled:(BOOL)lensesEnabled\n                                      lensID:(NSString *)lensID\n{\n    SCTraceODPCompatibleStart(2);\n    NSMutableDictionary *newMetadata = [self metadataForMetadata:metadata];\n    newMetadata[@\"photo_capturer_enabled\"] = @(photoCapturerEnabled);\n\n    newMetadata[@\"lens_enabled\"] = @(lensesEnabled);\n    if (lensesEnabled) {\n        newMetadata[@\"lens_id\"] = lensID ?: @\"\";\n    }\n\n    return newMetadata;\n}\n\n- (NSMutableDictionary *)getPropertiesFromAsset:(AVAsset *)asset\n{\n    SCTraceODPCompatibleStart(2);\n    SC_GUARD_ELSE_RETURN_VALUE(asset != nil, nil);\n    NSMutableDictionary *properties = [NSMutableDictionary dictionary];\n    // file size\n    properties[@\"file_size\"] = @([asset fileSize]);\n    // duration\n    properties[@\"duration\"] = @(CMTimeGetSeconds(asset.duration));\n    // video track count\n    NSArray<AVAssetTrack *> *videoTracks = [asset tracksWithMediaType:AVMediaTypeVideo];\n    properties[@\"video_track_count\"] = @(videoTracks.count);\n    if (videoTracks.count > 0) {\n        // video bitrate\n        properties[@\"video_bitrate\"] = @([videoTracks.firstObject estimatedDataRate]);\n        // frame rate\n        properties[@\"video_frame_rate\"] = @([videoTracks.firstObject nominalFrameRate]);\n    }\n    // audio track count\n    NSArray<AVAssetTrack *> *audioTracks = [asset tracksWithMediaType:AVMediaTypeAudio];\n    properties[@\"audio_track_count\"] = @(audioTracks.count);\n    if (audioTracks.count > 0) {\n        // audio bitrate\n        properties[@\"audio_bitrate\"] = @([audioTracks.firstObject estimatedDataRate]);\n    }\n    // playable\n    properties[@\"playable\"] = @(asset.isPlayable);\n    return properties;\n}\n\n#pragma mark - Image snap\n\n- (void)checkImageHealthForCaptureFrameImage:(UIImage *)image\n                             captureSettings:(NSDictionary *)captureSettings\n                            captureSessionID:(NSString *)captureSessionID\n{\n    SCTraceODPCompatibleStart(2);\n    if (captureSessionID.length == 0) {\n        SCLogCoreCameraError(@\"[FrameHealthChecker] #IMAGE:CAPTURE - captureSessionID shouldn't be empty\");\n        return;\n    }\n    SCManagedFrameHealthCheckerTask *task =\n        [SCManagedFrameHealthCheckerTask taskWithType:SCManagedFrameHealthCheck_ImageCapture\n                                         targetObject:image\n                                             metadata:captureSettings];\n    [self _addTask:task withCaptureSessionID:captureSessionID];\n}\n\n- (void)checkImageHealthForPreTranscoding:(UIImage *)image\n                                 metadata:(NSDictionary *)metadata\n                         captureSessionID:(NSString *)captureSessionID\n{\n    SCTraceODPCompatibleStart(2);\n    if (captureSessionID.length == 0) {\n        SCLogCoreCameraError(@\"[FrameHealthChecker] #IMAGE:PRE_CAPTURE - captureSessionID shouldn't be empty\");\n        return;\n    }\n    SCManagedFrameHealthCheckerTask *task =\n        [SCManagedFrameHealthCheckerTask taskWithType:SCManagedFrameHealthCheck_ImagePreTranscoding\n                                         targetObject:image\n                                             metadata:metadata];\n    [self _addTask:task withCaptureSessionID:captureSessionID];\n}\n\n- (void)checkImageHealthForPostTranscoding:(NSData *)imageData\n                                  metadata:(NSDictionary *)metadata\n                          captureSessionID:(NSString *)captureSessionID\n{\n    SCTraceODPCompatibleStart(2);\n    if (captureSessionID.length == 0) {\n        SCLogCoreCameraError(@\"[FrameHealthChecker] #IMAGE:POST_CAPTURE - captureSessionID shouldn't be empty\");\n        return;\n    }\n    SCManagedFrameHealthCheckerTask *task =\n        [SCManagedFrameHealthCheckerTask taskWithType:SCManagedFrameHealthCheck_ImagePostTranscoding\n                                         targetObject:imageData\n                                             metadata:metadata];\n    [self _addTask:task withCaptureSessionID:captureSessionID];\n}\n\n#pragma mark - Video snap\n- (void)checkVideoHealthForCaptureFrameImage:(UIImage *)image\n                                    metedata:(NSDictionary *)metadata\n                            captureSessionID:(NSString *)captureSessionID\n{\n    SCTraceODPCompatibleStart(2);\n    if (captureSessionID.length == 0) {\n        SCLogCoreCameraError(@\"[FrameHealthChecker] #VIDEO:CAPTURE - captureSessionID shouldn't be empty\");\n        return;\n    }\n    SCManagedFrameHealthCheckerTask *task =\n        [SCManagedFrameHealthCheckerTask taskWithType:SCManagedFrameHealthCheck_VideoCapture\n                                         targetObject:image\n                                             metadata:metadata];\n    [self _addTask:task withCaptureSessionID:captureSessionID];\n}\n\n- (void)checkVideoHealthForOverlayImage:(UIImage *)image\n                               metedata:(NSDictionary *)metadata\n                       captureSessionID:(NSString *)captureSessionID\n{\n    SCTraceODPCompatibleStart(2);\n    if (captureSessionID.length == 0) {\n        SCLogCoreCameraError(@\"[FrameHealthChecker] #VIDEO:OVERLAY_IMAGE - captureSessionID shouldn't be empty\");\n        return;\n    }\n    // Overlay image could be nil\n    if (!image) {\n        SCLogCoreCameraInfo(@\"[FrameHealthChecker] #VIDEO:OVERLAY_IMAGE - overlayImage is nil.\");\n        return;\n    }\n    SCManagedFrameHealthCheckerTask *task =\n        [SCManagedFrameHealthCheckerTask taskWithType:SCManagedFrameHealthCheck_VideoOverlayImage\n                                         targetObject:image\n                                             metadata:metadata];\n    [self _addTask:task withCaptureSessionID:captureSessionID];\n}\n\n- (void)checkVideoHealthForPostTranscodingThumbnail:(UIImage *)image\n                                           metedata:(NSDictionary *)metadata\n                                         properties:(NSDictionary *)properties\n                                   captureSessionID:(NSString *)captureSessionID\n{\n    SCTraceODPCompatibleStart(2);\n    if (captureSessionID.length == 0) {\n        SCLogCoreCameraError(@\"[FrameHealthChecker] #VIDEO:POST_TRANSCODING - captureSessionID shouldn't be empty\");\n        return;\n    }\n    SCManagedFrameHealthCheckerTask *task =\n        [SCManagedFrameHealthCheckerTask taskWithType:SCManagedFrameHealthCheck_VideoPostTranscoding\n                                         targetObject:image\n                                             metadata:metadata\n                                      videoProperties:properties];\n    [self _addTask:task withCaptureSessionID:captureSessionID];\n}\n\n#pragma mark - Task management\n- (void)reportFrameHealthCheckForCaptureSessionID:(NSString *)captureSessionID\n{\n    SCTraceODPCompatibleStart(2);\n    if (!captureSessionID) {\n        SCLogCoreCameraError(@\"[FrameHealthChecker] report - captureSessionID shouldn't be nil\");\n        return;\n    }\n    [self _asynchronouslyCheckForCaptureSessionID:captureSessionID];\n}\n\n#pragma mark - Private functions\n\n/// Scale the source image to a new image with edges less than kSCManagedFrameHealthCheckerScaledImageMaxEdgeLength.\n- (UIImage *)_unifyImage:(UIImage *)sourceImage\n{\n    CGFloat sourceWidth = sourceImage.size.width;\n    CGFloat sourceHeight = sourceImage.size.height;\n\n    if (sourceWidth == 0.0 || sourceHeight == 0.0) {\n        SCLogCoreCameraInfo(@\"[FrameHealthChecker] Tried scaling image with no size\");\n        return sourceImage;\n    }\n\n    CGFloat maxEdgeLength = kSCManagedFrameHealthCheckerScaledImageMaxEdgeLength;\n\n    CGFloat widthScalingFactor = maxEdgeLength / sourceWidth;\n    CGFloat heightScalingFactor = maxEdgeLength / sourceHeight;\n\n    CGFloat scalingFactor = MIN(widthScalingFactor, heightScalingFactor);\n\n    if (scalingFactor >= 1) {\n        SCLogCoreCameraInfo(@\"[FrameHealthChecker] No need to scale image.\");\n        return sourceImage;\n    }\n\n    CGSize targetSize = CGSizeMake(sourceWidth * scalingFactor, sourceHeight * scalingFactor);\n\n    SCLogCoreCameraInfo(@\"[FrameHealthChecker] Scaling image from %@ to %@\", NSStringFromCGSize(sourceImage.size),\n                        NSStringFromCGSize(targetSize));\n    return [sourceImage scaledImageToSize:targetSize scale:kSCManagedFrameHealthCheckerScaledImageScale];\n}\n\n- (void)_addTask:(SCManagedFrameHealthCheckerTask *)newTask withCaptureSessionID:(NSString *)captureSessionID\n{\n    SCTraceODPCompatibleStart(2);\n    if (captureSessionID.length == 0) {\n        return;\n    }\n    [_performer perform:^{\n        SCTraceODPCompatibleStart(2);\n\n        CFTimeInterval beforeScaling = CACurrentMediaTime();\n        if (newTask.targetObject) {\n            if ([newTask.targetObject isKindOfClass:[UIImage class]]) {\n                UIImage *sourceImage = (UIImage *)newTask.targetObject;\n                newTask.unifiedImage = [self _unifyImage:sourceImage];\n                newTask.sourceImageSize = sourceImage.size;\n            } else if ([newTask.targetObject isKindOfClass:[NSData class]]) {\n                UIImage *sourceImage = [UIImage sc_imageWithData:newTask.targetObject];\n                CFTimeInterval betweenDecodingAndScaling = CACurrentMediaTime();\n                SCLogCoreCameraInfo(@\"[FrameHealthChecker] #Image decoding delay: %f\",\n                                    betweenDecodingAndScaling - beforeScaling);\n                beforeScaling = betweenDecodingAndScaling;\n                newTask.unifiedImage = [self _unifyImage:sourceImage];\n                newTask.sourceImageSize = sourceImage.size;\n            } else {\n                SCLogCoreCameraError(@\"[FrameHealthChecker] Invalid targetObject class:%@\",\n                                     NSStringFromClass([newTask.targetObject class]));\n            }\n            newTask.targetObject = nil;\n        }\n        SCLogCoreCameraInfo(@\"[FrameHealthChecker] #Scale image delay: %f\", CACurrentMediaTime() - beforeScaling);\n\n        NSMutableArray *taskQueue = _frameCheckTasks[captureSessionID];\n        if (!taskQueue) {\n            taskQueue = [NSMutableArray array];\n            _frameCheckTasks[captureSessionID] = taskQueue;\n        }\n        // Remove previous same type task, avoid meaningless task,\n        // for example repeat click \"Send Button\" and then \"Back button\"\n        // will produce a lot of PRE_TRANSCODING and POST_TRANSCODING\n        for (SCManagedFrameHealthCheckerTask *task in taskQueue) {\n            if (task.type == newTask.type) {\n                [taskQueue removeObject:task];\n                break;\n            }\n        }\n\n        [taskQueue addObject:newTask];\n    }];\n}\n\n- (void)_asynchronouslyCheckForCaptureSessionID:(NSString *)captureSessionID\n{\n    SCTraceODPCompatibleStart(2);\n    [_performer perform:^{\n        SCTraceODPCompatibleStart(2);\n        NSMutableArray *tasksQueue = _frameCheckTasks[captureSessionID];\n        if (!tasksQueue) {\n            return;\n        }\n\n        // Check the free memory, if it is too low, drop these tasks\n        double memFree = [SCLogger memoryFreeMB];\n        if (memFree < kSCManagedFrameHealthCheckerMinFreeMemMB) {\n            SCLogCoreCameraWarning(\n                @\"[FrameHealthChecker] mem_free:%f is too low, dropped checking tasks for captureSessionID:%@\", memFree,\n                captureSessionID);\n            [_frameCheckTasks removeObjectForKey:captureSessionID];\n            return;\n        }\n\n        __block NSMutableArray *frameHealthInfoArray = [NSMutableArray array];\n        // Execute all tasks and wait for complete\n        [tasksQueue enumerateObjectsUsingBlock:^(id _Nonnull obj, NSUInteger idx, BOOL *_Nonnull stop) {\n            SCManagedFrameHealthCheckerTask *task = (SCManagedFrameHealthCheckerTask *)obj;\n            NSMutableDictionary *frameHealthInfo;\n            UIImage *image = task.unifiedImage;\n\n            if (image) {\n                // Get frame health info\n                frameHealthInfo = [self _getFrameHealthInfoForImage:image\n                                                             source:[task textForSource]\n                                                           snapType:[task textForSnapType]\n                                                           metadata:task.metadata\n                                                    sourceImageSize:task.sourceImageSize\n                                                   captureSessionID:captureSessionID];\n                NSNumber *isPossibleBlackNum = frameHealthInfo[@\"is_possible_black\"];\n                NSNumber *isTotallyBlackNum = frameHealthInfo[@\"is_total_black\"];\n                NSNumber *hasExecutionError = frameHealthInfo[@\"execution_error\"];\n                if ([isTotallyBlackNum boolValue]) {\n                    task.errorType = SCManagedFrameHealthCheckError_Frame_Totally_Black;\n                } else if ([isPossibleBlackNum boolValue]) {\n                    task.errorType = SCManagedFrameHealthCheckError_Frame_Possibly_Black;\n                } else if ([hasExecutionError boolValue]) {\n                    task.errorType = SCManagedFrameHealthCheckError_Execution_Error;\n                }\n            } else {\n                frameHealthInfo = [NSMutableDictionary dictionary];\n                task.errorType = SCManagedFrameHealthCheckError_Invalid_Bitmap;\n            }\n\n            if (frameHealthInfo) {\n                frameHealthInfo[@\"frame_source\"] = [task textForSource];\n                frameHealthInfo[@\"snap_type\"] = [task textForSnapType];\n                frameHealthInfo[@\"error_type\"] = [task textForErrorType];\n                frameHealthInfo[@\"capture_session_id\"] = captureSessionID;\n                frameHealthInfo[@\"metadata\"] = task.metadata;\n                if (task.videoProperties.count > 0) {\n                    [frameHealthInfo addEntriesFromDictionary:task.videoProperties];\n                }\n                [frameHealthInfoArray addObject:frameHealthInfo];\n            }\n\n            // Release the image as soon as possible to mitigate the memory pressure\n            task.unifiedImage = nil;\n        }];\n\n        for (NSDictionary *frameHealthInfo in frameHealthInfoArray) {\n            if ([frameHealthInfo[@\"is_total_black\"] boolValue] || [frameHealthInfo[@\"is_possible_black\"] boolValue]) {\n                //                // TODO: Zi Kai Chen - add this back. Normally we use id<SCManiphestTicketCreator> for\n                //                this but as this is a shared instance we cannot easily inject it. The work would\n                //                involve making this not a shared instance.\n                //                SCShakeBetaLogEvent(SCShakeBetaLoggerKeyCCamBlackSnap,\n                //                                    JSONStringSerializeObjectForLogging(frameHealthInfo));\n            }\n\n            [[SCLogger sharedInstance] logUnsampledEventToEventLogger:kSCCameraMetricsFrameHealthCheckIndex\n                                                           parameters:frameHealthInfo\n                                                     secretParameters:nil\n                                                              metrics:nil];\n        }\n\n        [_frameCheckTasks removeObjectForKey:captureSessionID];\n    }];\n}\n\n- (NSMutableDictionary *)_getFrameHealthInfoForImage:(UIImage *)image\n                                              source:(NSString *)source\n                                            snapType:(NSString *)snapType\n                                            metadata:(NSDictionary *)metadata\n                                     sourceImageSize:(CGSize)sourceImageSize\n                                    captureSessionID:(NSString *)captureSessionID\n{\n    SCTraceODPCompatibleStart(2);\n    NSMutableDictionary *parameters = [NSMutableDictionary dictionary];\n    size_t samplesCount = 0;\n    CFTimeInterval start = CACurrentMediaTime();\n    CGImageRef imageRef = image.CGImage;\n    size_t imageWidth = CGImageGetWidth(imageRef);\n    size_t imageHeight = CGImageGetHeight(imageRef);\n    CFDataRef pixelData = CGDataProviderCopyData(CGImageGetDataProvider(imageRef));\n    CFTimeInterval getImageDataTime = CACurrentMediaTime();\n    if (pixelData) {\n        const Byte *imageData = CFDataGetBytePtr(pixelData);\n        NSInteger stripLength = 0;\n        NSInteger bufferLength = 0;\n        NSInteger imagePixels = imageWidth * imageHeight;\n        // Limit the max sampled frames\n        if (imagePixels > kSCManagedFrameHealthCheckerMaxSamples) {\n            stripLength = imagePixels / kSCManagedFrameHealthCheckerMaxSamples * 4;\n            bufferLength = kSCManagedFrameHealthCheckerMaxSamples;\n        } else {\n            stripLength = 4;\n            bufferLength = imagePixels;\n        }\n        samplesCount = bufferLength;\n\n        // Avoid dividing by zero\n        if (samplesCount != 0) {\n            FloatRGBA sumRGBA = [self _getSumRGBAFromData:imageData\n                                              stripLength:stripLength\n                                             bufferLength:bufferLength\n                                               bitmapInfo:CGImageGetBitmapInfo(imageRef)];\n            float averageR = sumRGBA.R / samplesCount;\n            float averageG = sumRGBA.G / samplesCount;\n            float averageB = sumRGBA.B / samplesCount;\n            float averageA = sumRGBA.A / samplesCount;\n            parameters[@\"average_sampled_rgba_r\"] = @(averageR);\n            parameters[@\"average_sampled_rgba_g\"] = @(averageG);\n            parameters[@\"average_sampled_rgba_b\"] = @(averageB);\n            parameters[@\"average_sampled_rgba_a\"] = @(averageA);\n            parameters[@\"origin_frame_width\"] = @(sourceImageSize.width);\n            parameters[@\"origin_frame_height\"] = @(sourceImageSize.height);\n            // Also report possible black to identify the intentional black snap by covering camera.\n            // Normally, the averageA very near 255, but for video overlay image, it is very small.\n            // So we use averageA > 250 to avoid considing video overlay image as possible black.\n            if (averageA > 250 && averageR < kSCManagedFrameHealthCheckerPossibleBlackThreshold &&\n                averageG < kSCManagedFrameHealthCheckerPossibleBlackThreshold &&\n                averageB < kSCManagedFrameHealthCheckerPossibleBlackThreshold) {\n                parameters[@\"is_possible_black\"] = @(YES);\n                // Use this parameters for BigQuery conditions in Grafana\n                if (averageR == 0 && averageG == 0 && averageB == 0) {\n                    parameters[@\"is_total_black\"] = @(YES);\n                }\n            }\n        } else {\n            SCLogCoreCameraError(@\"[FrameHealthChecker] #%@:%@ - samplesCount is zero! captureSessionID:%@\", snapType,\n                                 source, captureSessionID);\n            parameters[@\"execution_error\"] = @(YES);\n        }\n        CFRelease(pixelData);\n    } else {\n        SCLogCoreCameraError(@\"[FrameHealthChecker] #%@:%@ - pixelData is nil! captureSessionID:%@\", snapType, source,\n                             captureSessionID);\n        parameters[@\"execution_error\"] = @(YES);\n    }\n    parameters[@\"sample_size\"] = @(samplesCount);\n\n    CFTimeInterval end = CACurrentMediaTime();\n    SCLogCoreCameraInfo(@\"[FrameHealthChecker] #%@:%@ - GET_IMAGE_DATA_TIME:%f SAMPLE_DATA_TIME:%f TOTAL_TIME:%f\",\n                        snapType, source, getImageDataTime - start, end - getImageDataTime, end - start);\n    return parameters;\n}\n\n- (FloatRGBA)_getSumRGBAFromData:(const Byte *)imageData\n                     stripLength:(NSInteger)stripLength\n                    bufferLength:(NSInteger)bufferLength\n                      bitmapInfo:(CGBitmapInfo)bitmapInfo\n{\n    SCTraceODPCompatibleStart(2);\n    FloatRGBA sumRGBA;\n    if ((bitmapInfo & kCGImageAlphaPremultipliedFirst) && (bitmapInfo & kCGImageByteOrder32Little)) {\n        // BGRA\n        sumRGBA.B = vDspColorElementSum(imageData, stripLength, bufferLength);\n        sumRGBA.G = vDspColorElementSum(imageData + 1, stripLength, bufferLength);\n        sumRGBA.R = vDspColorElementSum(imageData + 2, stripLength, bufferLength);\n        sumRGBA.A = vDspColorElementSum(imageData + 3, stripLength, bufferLength);\n    } else {\n        // TODO. support other types beside RGBA\n        sumRGBA.R = vDspColorElementSum(imageData, stripLength, bufferLength);\n        sumRGBA.G = vDspColorElementSum(imageData + 1, stripLength, bufferLength);\n        sumRGBA.B = vDspColorElementSum(imageData + 2, stripLength, bufferLength);\n        sumRGBA.A = vDspColorElementSum(imageData + 3, stripLength, bufferLength);\n    }\n    return sumRGBA;\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedFrontFlashController.h",
    "content": "//\n//  SCManagedFrontFlashController.h\n//  Snapchat\n//\n//  Created by Liu Liu on 5/4/15.\n//  Copyright (c) 2015 Liu Liu. All rights reserved.\n//\n\n#import <Foundation/Foundation.h>\n\n// This object is only access on SCManagedCapturer thread\n@interface SCManagedFrontFlashController : NSObject\n\n@property (nonatomic, assign) BOOL flashActive;\n\n@property (nonatomic, assign) BOOL torchActive;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedFrontFlashController.m",
    "content": "//\n//  SCManagedFrontFlashController.m\n//  Snapchat\n//\n//  Created by Liu Liu on 5/4/15.\n//  Copyright (c) 2015 Liu Liu. All rights reserved.\n//\n\n#import \"SCManagedFrontFlashController.h\"\n\n#import <SCFoundation/SCAssertWrapper.h>\n#import <SCFoundation/SCLog.h>\n#import <SCFoundation/SCThreadHelpers.h>\n#import <SCFoundation/SCTrace.h>\n\n@import UIKit;\n\n@implementation SCManagedFrontFlashController {\n    BOOL _active;\n    UIView *_brightView;\n    CGFloat _brightnessWhenFlashAndTorchOff;\n}\n\n- (void)_setScreenWithFrontViewFlashActive:(BOOL)flashActive torchActive:(BOOL)torchActive\n{\n    SCTraceStart();\n    SCAssertMainThread();\n    BOOL wasActive = _active;\n    _active = flashActive || torchActive;\n    if (!wasActive && _active) {\n        [self _activateFlash:flashActive];\n    } else if (wasActive && !_active) {\n        [self _deactivateFlash];\n    }\n}\n\n- (void)_activateFlash:(BOOL)flashActive\n{\n    UIWindow *mainWindow = [[UIApplication sharedApplication] keyWindow];\n    if (!_brightView) {\n        CGRect frame = [mainWindow bounds];\n        CGFloat maxLength = MAX(CGRectGetWidth(frame), CGRectGetHeight(frame));\n        frame.size = CGSizeMake(maxLength, maxLength);\n        // Using the max length on either side to be compatible with different orientations\n        _brightView = [[UIView alloc] initWithFrame:frame];\n        _brightView.userInteractionEnabled = NO;\n        _brightView.backgroundColor = [UIColor whiteColor];\n    }\n    _brightnessWhenFlashAndTorchOff = [UIScreen mainScreen].brightness;\n    SCLogGeneralInfo(@\"[SCManagedFrontFlashController] Activating flash, setting screen brightness from %f to 1.0\",\n                     _brightnessWhenFlashAndTorchOff);\n    [self _brightenLoop];\n    _brightView.alpha = flashActive ? 1.0 : 0.75;\n    [mainWindow addSubview:_brightView];\n}\n\n- (void)_deactivateFlash\n{\n    SCLogGeneralInfo(@\"[SCManagedFrontFlashController] Deactivating flash, setting screen brightness from %f to %f\",\n                     [UIScreen mainScreen].brightness, _brightnessWhenFlashAndTorchOff);\n    [UIScreen mainScreen].brightness = _brightnessWhenFlashAndTorchOff;\n    if (_brightView) {\n        [_brightView removeFromSuperview];\n    }\n}\n\n- (void)_brightenLoop\n{\n    if (_active) {\n        SCLogGeneralInfo(@\"[SCManagedFrontFlashController] In brighten loop, setting brightness from %f to 1.0\",\n                         [UIScreen mainScreen].brightness);\n        [UIScreen mainScreen].brightness = 1.0;\n        dispatch_after(dispatch_time(DISPATCH_TIME_NOW, NSEC_PER_SEC / 2), dispatch_get_main_queue(), ^(void) {\n            [self _brightenLoop];\n        });\n    } else {\n        SCLogGeneralInfo(@\"[SCManagedFrontFlashController] Recording is done, brighten loop ends\");\n    }\n}\n\n- (void)setFlashActive:(BOOL)flashActive\n{\n    SCTraceStart();\n    if (_flashActive != flashActive) {\n        _flashActive = flashActive;\n        BOOL torchActive = _torchActive;\n        runOnMainThreadAsynchronously(^{\n            [self _setScreenWithFrontViewFlashActive:flashActive torchActive:torchActive];\n        });\n    }\n}\n\n- (void)setTorchActive:(BOOL)torchActive\n{\n    SCTraceStart();\n    if (_torchActive != torchActive) {\n        _torchActive = torchActive;\n        BOOL flashActive = _flashActive;\n        runOnMainThreadAsynchronously(^{\n            [self _setScreenWithFrontViewFlashActive:flashActive torchActive:torchActive];\n        });\n    }\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedLegacyStillImageCapturer.h",
    "content": "//\n//  SCManagedLegacyStillImageCapturer.h\n//  Snapchat\n//\n//  Created by Chao Pang on 10/4/16.\n//  Copyright © 2016 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCManagedStillImageCapturer.h\"\n\n@interface SCManagedLegacyStillImageCapturer : SCManagedStillImageCapturer\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedLegacyStillImageCapturer.m",
    "content": "//\n//  SCManagedLegacyStillImageCapturer.m\n//  Snapchat\n//\n//  Created by Chao Pang on 10/4/16.\n//  Copyright © 2016 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCManagedLegacyStillImageCapturer.h\"\n\n#import \"AVCaptureConnection+InputDevice.h\"\n#import \"SCCameraTweaks.h\"\n#import \"SCLogger+Camera.h\"\n#import \"SCManagedCapturer.h\"\n#import \"SCManagedStillImageCapturer_Protected.h\"\n#import \"SCStillImageCaptureVideoInputMethod.h\"\n\n#import <SCCrashLogger/SCCrashLogger.h>\n#import <SCFoundation/SCAssertWrapper.h>\n#import <SCFoundation/SCLog.h>\n#import <SCFoundation/SCPerforming.h>\n#import <SCFoundation/SCQueuePerformer.h>\n#import <SCFoundation/SCTrace.h>\n#import <SCLenses/SCLens.h>\n#import <SCLogger/SCCameraMetrics.h>\n#import <SCWebP/UIImage+WebP.h>\n\n@import ImageIO;\n\nstatic NSString *const kSCLegacyStillImageCaptureDefaultMethodErrorDomain =\n    @\"kSCLegacyStillImageCaptureDefaultMethodErrorDomain\";\nstatic NSString *const kSCLegacyStillImageCaptureLensStabilizationMethodErrorDomain =\n    @\"kSCLegacyStillImageCaptureLensStabilizationMethodErrorDomain\";\n\nstatic NSInteger const kSCLegacyStillImageCaptureDefaultMethodErrorEncounteredException = 10000;\nstatic NSInteger const kSCLegacyStillImageCaptureLensStabilizationMethodErrorEncounteredException = 10001;\n\n@implementation SCManagedLegacyStillImageCapturer {\n#pragma clang diagnostic push\n#pragma clang diagnostic ignored \"-Wdeprecated-declarations\"\n    AVCaptureStillImageOutput *_stillImageOutput;\n#pragma clang diagnostic pop\n\n    BOOL _shouldCapture;\n    NSUInteger _retries;\n\n    SCStillImageCaptureVideoInputMethod *_videoFileMethod;\n}\n\n- (instancetype)initWithSession:(AVCaptureSession *)session\n                      performer:(id<SCPerforming>)performer\n             lensProcessingCore:(id<SCManagedCapturerLensAPI>)lensProcessingCore\n                       delegate:(id<SCManagedStillImageCapturerDelegate>)delegate\n{\n    SCTraceStart();\n    self = [super initWithSession:session performer:performer lensProcessingCore:lensProcessingCore delegate:delegate];\n    if (self) {\n        [self setupWithSession:session];\n    }\n    return self;\n}\n\n- (void)setupWithSession:(AVCaptureSession *)session\n{\n    SCTraceStart();\n#pragma clang diagnostic push\n#pragma clang diagnostic ignored \"-Wdeprecated-declarations\"\n    _stillImageOutput = [[AVCaptureStillImageOutput alloc] init];\n#pragma clang diagnostic pop\n    _stillImageOutput.outputSettings = @{AVVideoCodecKey : AVVideoCodecJPEG};\n    [self setAsOutput:session];\n}\n\n- (void)setAsOutput:(AVCaptureSession *)session\n{\n    SCTraceStart();\n    if ([session canAddOutput:_stillImageOutput]) {\n        [session addOutput:_stillImageOutput];\n    }\n}\n\n- (void)setHighResolutionStillImageOutputEnabled:(BOOL)highResolutionStillImageOutputEnabled\n{\n    SCTraceStart();\n    if (_stillImageOutput.isHighResolutionStillImageOutputEnabled != highResolutionStillImageOutputEnabled) {\n        _stillImageOutput.highResolutionStillImageOutputEnabled = highResolutionStillImageOutputEnabled;\n    }\n}\n\n- (void)setPortraitModeCaptureEnabled:(BOOL)enabled\n{\n    // Legacy capturer only used on devices running versions under 10.2, which don't support depth data\n    // so this function is never called and does not need to be implemented\n}\n\n- (void)enableStillImageStabilization\n{\n    SCTraceStart();\n#pragma clang diagnostic push\n#pragma clang diagnostic ignored \"-Wdeprecated-declarations\"\n    if (_stillImageOutput.isLensStabilizationDuringBracketedCaptureSupported) {\n        _stillImageOutput.lensStabilizationDuringBracketedCaptureEnabled = YES;\n    }\n#pragma clang diagnostic pop\n}\n\n- (void)removeAsOutput:(AVCaptureSession *)session\n{\n    SCTraceStart();\n    [session removeOutput:_stillImageOutput];\n}\n\n- (void)captureStillImageWithAspectRatio:(CGFloat)aspectRatio\n                            atZoomFactor:(float)zoomFactor\n                             fieldOfView:(float)fieldOfView\n                                   state:(SCManagedCapturerState *)state\n                        captureSessionID:(NSString *)captureSessionID\n                  shouldCaptureFromVideo:(BOOL)shouldCaptureFromVideo\n                       completionHandler:\n                           (sc_managed_still_image_capturer_capture_still_image_completion_handler_t)completionHandler\n{\n    SCTraceStart();\n    SCAssert(completionHandler, @\"completionHandler shouldn't be nil\");\n    _retries = 6; // AVFoundation Unknown Error usually resolves itself within 0.5 seconds\n    _aspectRatio = aspectRatio;\n    _zoomFactor = zoomFactor;\n    _fieldOfView = fieldOfView;\n    _state = state;\n    _captureSessionID = captureSessionID;\n    _shouldCaptureFromVideo = shouldCaptureFromVideo;\n    SCAssert(!_completionHandler, @\"We shouldn't have a _completionHandler at this point otherwise we are destroying \"\n                                  @\"current completion handler.\");\n    _completionHandler = [completionHandler copy];\n    [[SCLogger sharedInstance] logCameraExposureAdjustmentDelayStart];\n    if (!_adjustingExposureManualDetect) {\n        SCLogCoreCameraInfo(@\"Capturing still image now\");\n        [self _captureStillImageWithExposureAdjustmentStrategy:kSCCameraExposureAdjustmentStrategyNo];\n        _shouldCapture = NO;\n    } else {\n        SCLogCoreCameraInfo(@\"Wait adjusting exposure (or after 0.4 seconds) and then capture still image\");\n        _shouldCapture = YES;\n        [self _deadlineCaptureStillImage];\n    }\n}\n\n#pragma mark - SCManagedDeviceCapacityAnalyzerListener\n\n- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer\n           didChangeAdjustingExposure:(BOOL)adjustingExposure\n{\n    SCTraceStart();\n    @weakify(self);\n    [_performer performImmediatelyIfCurrentPerformer:^{\n        // Since this is handled on a different thread, therefore, dispatch back to the queue we operated on.\n        @strongify(self);\n        SC_GUARD_ELSE_RETURN(self);\n        self->_adjustingExposureManualDetect = adjustingExposure;\n        [self _didChangeAdjustingExposure:adjustingExposure\n                             withStrategy:kSCCameraExposureAdjustmentStrategyManualDetect];\n    }];\n}\n\n- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer\n           didChangeLightingCondition:(SCCapturerLightingConditionType)lightingCondition\n{\n    SCTraceStart();\n    @weakify(self);\n    [_performer performImmediatelyIfCurrentPerformer:^{\n        @strongify(self);\n        SC_GUARD_ELSE_RETURN(self);\n        self->_lightingConditionType = lightingCondition;\n    }];\n}\n\n#pragma mark - SCManagedCapturerListener\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeAdjustingExposure:(SCManagedCapturerState *)state\n{\n    SCTraceStart();\n    @weakify(self);\n    [_performer performImmediatelyIfCurrentPerformer:^{\n        @strongify(self);\n        SC_GUARD_ELSE_RETURN(self);\n        // Since this is handled on a different thread, therefore, dispatch back to the queue we operated on.\n        [self _didChangeAdjustingExposure:state.adjustingExposure withStrategy:kSCCameraExposureAdjustmentStrategyKVO];\n    }];\n}\n\n#pragma mark - Private methods\n\n- (void)_didChangeAdjustingExposure:(BOOL)adjustingExposure withStrategy:(NSString *)strategy\n{\n    if (!adjustingExposure && self->_shouldCapture) {\n        SCLogCoreCameraInfo(@\"Capturing after adjusting exposure using strategy: %@\", strategy);\n        [self _captureStillImageWithExposureAdjustmentStrategy:strategy];\n        self->_shouldCapture = NO;\n    }\n}\n\n- (void)_deadlineCaptureStillImage\n{\n    SCTraceStart();\n    // Use the SCManagedCapturer's private queue.\n    [_performer perform:^{\n        if (_shouldCapture) {\n            [self _captureStillImageWithExposureAdjustmentStrategy:kSCCameraExposureAdjustmentStrategyDeadline];\n            _shouldCapture = NO;\n        }\n    }\n                  after:SCCameraTweaksExposureDeadline()];\n}\n\n- (void)_captureStillImageWithExposureAdjustmentStrategy:(NSString *)strategy\n{\n    SCTraceStart();\n    [[SCLogger sharedInstance] logCameraExposureAdjustmentDelayEndWithStrategy:strategy];\n    if (_shouldCaptureFromVideo) {\n        [self captureStillImageFromVideoBuffer];\n        return;\n    }\n    SCAssert(_stillImageOutput, @\"stillImageOutput shouldn't be nil\");\n#pragma clang diagnostic push\n#pragma clang diagnostic ignored \"-Wdeprecated-declarations\"\n    AVCaptureStillImageOutput *stillImageOutput = _stillImageOutput;\n#pragma clang diagnostic pop\n    AVCaptureConnection *captureConnection = [self _captureConnectionFromStillImageOutput:stillImageOutput];\n    SCManagedCapturerState *state = [_state copy];\n    dispatch_block_t legacyStillImageCaptureBlock = ^{\n        SCCAssertMainThread();\n        // If the application is not in background, and we have still image connection, do thecapture. Otherwise fail.\n        if ([UIApplication sharedApplication].applicationState == UIApplicationStateBackground) {\n            [_performer performImmediatelyIfCurrentPerformer:^{\n                sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler =\n                    _completionHandler;\n                _completionHandler = nil;\n                completionHandler(nil, nil,\n                                  [NSError errorWithDomain:kSCManagedStillImageCapturerErrorDomain\n                                                      code:kSCManagedStillImageCapturerApplicationStateBackground\n                                                  userInfo:nil]);\n            }];\n            return;\n        }\n#if !TARGET_IPHONE_SIMULATOR\n        if (!captureConnection) {\n            [_performer performImmediatelyIfCurrentPerformer:^{\n                sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler =\n                    _completionHandler;\n                _completionHandler = nil;\n                completionHandler(nil, nil, [NSError errorWithDomain:kSCManagedStillImageCapturerErrorDomain\n                                                                code:kSCManagedStillImageCapturerNoStillImageConnection\n                                                            userInfo:nil]);\n            }];\n            return;\n        }\n#endif\n        // Select appropriate image capture method\n        if ([_delegate managedStillImageCapturerShouldProcessFileInput:self]) {\n            if (!_videoFileMethod) {\n                _videoFileMethod = [[SCStillImageCaptureVideoInputMethod alloc] init];\n            }\n            [[SCLogger sharedInstance] logStillImageCaptureApi:@\"SCStillImageCapture\"];\n            [[SCCoreCameraLogger sharedInstance]\n                logCameraCreationDelaySplitPointStillImageCaptureApi:@\"SCStillImageCapture\"];\n            [_videoFileMethod captureStillImageWithCapturerState:state\n                successBlock:^(NSData *imageData, NSDictionary *cameraInfo, NSError *error) {\n                    [self _legacyStillImageCaptureDidSucceedWithImageData:imageData\n                                                             sampleBuffer:nil\n                                                               cameraInfo:cameraInfo\n                                                                    error:error];\n                }\n                failureBlock:^(NSError *error) {\n                    [self _legacyStillImageCaptureDidFailWithError:error];\n                }];\n        } else {\n#pragma clang diagnostic push\n#pragma clang diagnostic ignored \"-Wdeprecated-declarations\"\n            if (stillImageOutput.isLensStabilizationDuringBracketedCaptureSupported && !state.flashActive) {\n                [self _captureStabilizedStillImageWithStillImageOutput:stillImageOutput\n                                                     captureConnection:captureConnection\n                                                         capturerState:state];\n            } else {\n                [self _captureStillImageWithStillImageOutput:stillImageOutput\n                                           captureConnection:captureConnection\n                                               capturerState:state];\n            }\n#pragma clang diagnostic pop\n        }\n    };\n    // We need to call this on main thread and blocking.\n    [[SCQueuePerformer mainQueuePerformer] performAndWait:legacyStillImageCaptureBlock];\n}\n\n#pragma clang diagnostic push\n#pragma clang diagnostic ignored \"-Wdeprecated-declarations\"\n- (void)_captureStillImageWithStillImageOutput:(AVCaptureStillImageOutput *)stillImageOutput\n                             captureConnection:(AVCaptureConnection *)captureConnection\n                                 capturerState:(SCManagedCapturerState *)state\n{\n    [[SCLogger sharedInstance] logStillImageCaptureApi:@\"AVStillImageCaptureAsynchronous\"];\n    [[SCCoreCameraLogger sharedInstance]\n        logCameraCreationDelaySplitPointStillImageCaptureApi:@\"AVStillImageCaptureAsynchronous\"];\n    @try {\n        [stillImageOutput\n            captureStillImageAsynchronouslyFromConnection:captureConnection\n                                        completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {\n                                            if (imageDataSampleBuffer) {\n                                                NSData *imageData = [AVCaptureStillImageOutput\n                                                    jpegStillImageNSDataRepresentation:imageDataSampleBuffer];\n                                                [self\n                                                    _legacyStillImageCaptureDidSucceedWithImageData:imageData\n                                                                                       sampleBuffer:\n                                                                                           imageDataSampleBuffer\n                                                                                         cameraInfo:\n                                                                                             cameraInfoForBuffer(\n                                                                                                 imageDataSampleBuffer)\n                                                                                              error:error];\n                                            } else {\n                                                if (error.domain == AVFoundationErrorDomain && error.code == -11800) {\n                                                    // iOS 7 \"unknown error\"; works if we retry\n                                                    [self _legacyStillImageCaptureWillRetryWithError:error];\n                                                } else {\n                                                    [self _legacyStillImageCaptureDidFailWithError:error];\n                                                }\n                                            }\n                                        }];\n    } @catch (NSException *e) {\n        [SCCrashLogger logHandledException:e];\n        [self _legacyStillImageCaptureDidFailWithError:\n                  [NSError errorWithDomain:kSCLegacyStillImageCaptureDefaultMethodErrorDomain\n                                      code:kSCLegacyStillImageCaptureDefaultMethodErrorEncounteredException\n                                  userInfo:@{\n                                      @\"exception\" : e\n                                  }]];\n    }\n}\n\n- (void)_captureStabilizedStillImageWithStillImageOutput:(AVCaptureStillImageOutput *)stillImageOutput\n                                       captureConnection:(AVCaptureConnection *)captureConnection\n                                           capturerState:(SCManagedCapturerState *)state\n{\n    [[SCLogger sharedInstance] logStillImageCaptureApi:@\"AVStillImageOutputCaptureBracketAsynchronously\"];\n    [[SCCoreCameraLogger sharedInstance]\n        logCameraCreationDelaySplitPointStillImageCaptureApi:@\"AVStillImageOutputCaptureBracketAsynchronously\"];\n    NSArray *bracketArray = [self _bracketSettingsArray:captureConnection];\n    @try {\n        [stillImageOutput\n            captureStillImageBracketAsynchronouslyFromConnection:captureConnection\n                                               withSettingsArray:bracketArray\n                                               completionHandler:^(CMSampleBufferRef imageDataSampleBuffer,\n                                                                   AVCaptureBracketedStillImageSettings *settings,\n                                                                   NSError *err) {\n                                                   if (!imageDataSampleBuffer) {\n                                                       [self _legacyStillImageCaptureDidFailWithError:err];\n                                                       return;\n                                                   }\n                                                   NSData *jpegData = [AVCaptureStillImageOutput\n                                                       jpegStillImageNSDataRepresentation:imageDataSampleBuffer];\n                                                   [self\n                                                       _legacyStillImageCaptureDidSucceedWithImageData:jpegData\n                                                                                          sampleBuffer:\n                                                                                              imageDataSampleBuffer\n                                                                                            cameraInfo:\n                                                                                                cameraInfoForBuffer(\n                                                                                                    imageDataSampleBuffer)\n                                                                                                 error:nil];\n                                               }];\n    } @catch (NSException *e) {\n        [SCCrashLogger logHandledException:e];\n        [self _legacyStillImageCaptureDidFailWithError:\n                  [NSError errorWithDomain:kSCLegacyStillImageCaptureLensStabilizationMethodErrorDomain\n                                      code:kSCLegacyStillImageCaptureLensStabilizationMethodErrorEncounteredException\n                                  userInfo:@{\n                                      @\"exception\" : e\n                                  }]];\n    }\n}\n#pragma clang diagnostic pop\n\n- (NSArray *)_bracketSettingsArray:(AVCaptureConnection *)stillImageConnection\n{\n    NSInteger const stillCount = 1;\n    NSMutableArray *bracketSettingsArray = [NSMutableArray arrayWithCapacity:stillCount];\n    AVCaptureDevice *device = [stillImageConnection inputDevice];\n    AVCaptureManualExposureBracketedStillImageSettings *settings = [AVCaptureManualExposureBracketedStillImageSettings\n        manualExposureSettingsWithExposureDuration:device.exposureDuration\n                                               ISO:AVCaptureISOCurrent];\n    for (NSInteger i = 0; i < stillCount; i++) {\n        [bracketSettingsArray addObject:settings];\n    }\n    return [bracketSettingsArray copy];\n}\n\n- (void)_legacyStillImageCaptureDidSucceedWithImageData:(NSData *)imageData\n                                           sampleBuffer:(CMSampleBufferRef)sampleBuffer\n                                             cameraInfo:(NSDictionary *)cameraInfo\n                                                  error:(NSError *)error\n{\n    [[SCLogger sharedInstance] logPreCaptureOperationFinishedAt:CACurrentMediaTime()];\n    [[SCCoreCameraLogger sharedInstance]\n        logCameraCreationDelaySplitPointPreCaptureOperationFinishedAt:CACurrentMediaTime()];\n    if (sampleBuffer) {\n        CFRetain(sampleBuffer);\n    }\n    [_performer performImmediatelyIfCurrentPerformer:^{\n        UIImage *fullScreenImage = [self imageFromData:imageData\n                                     currentZoomFactor:_zoomFactor\n                                     targetAspectRatio:_aspectRatio\n                                           fieldOfView:_fieldOfView\n                                                 state:_state\n                                          sampleBuffer:sampleBuffer];\n\n        sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = _completionHandler;\n        _completionHandler = nil;\n        completionHandler(fullScreenImage, cameraInfo, error);\n        if (sampleBuffer) {\n            CFRelease(sampleBuffer);\n        }\n    }];\n}\n\n- (void)_legacyStillImageCaptureDidFailWithError:(NSError *)error\n{\n    [_performer performImmediatelyIfCurrentPerformer:^{\n        sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = _completionHandler;\n        _completionHandler = nil;\n        completionHandler(nil, nil, error);\n    }];\n}\n\n- (void)_legacyStillImageCaptureWillRetryWithError:(NSError *)error\n{\n    if (_retries-- > 0) {\n        [_performer perform:^{\n            [self _captureStillImageWithExposureAdjustmentStrategy:kSCCameraExposureAdjustmentStrategyNo];\n        }\n                      after:kSCCameraRetryInterval];\n    } else {\n        [self _legacyStillImageCaptureDidFailWithError:error];\n    }\n}\n\n#pragma clang diagnostic push\n#pragma clang diagnostic ignored \"-Wdeprecated-declarations\"\n- (AVCaptureConnection *)_captureConnectionFromStillImageOutput:(AVCaptureStillImageOutput *)stillImageOutput\n#pragma clang diagnostic pop\n{\n    SCTraceStart();\n    SCAssert([_performer isCurrentPerformer], @\"\");\n    NSArray *connections = [stillImageOutput.connections copy];\n    for (AVCaptureConnection *connection in connections) {\n        for (AVCaptureInputPort *port in [connection inputPorts]) {\n            if ([[port mediaType] isEqual:AVMediaTypeVideo]) {\n                return connection;\n            }\n        }\n    }\n    return nil;\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedPhotoCapturer.h",
    "content": "//\n//  SCManagedPhotoCapturer.h\n//  Snapchat\n//\n//  Created by Chao Pang on 10/5/16.\n//  Copyright © 2016 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCManagedStillImageCapturer.h\"\n\n@interface SCManagedPhotoCapturer : SCManagedStillImageCapturer\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedPhotoCapturer.m",
    "content": "//\n//  SCManagedPhotoCapturer.m\n//  Snapchat\n//\n//  Created by Chao Pang on 10/5/16.\n//  Copyright © 2016 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCManagedPhotoCapturer.h\"\n\n#import \"AVCaptureConnection+InputDevice.h\"\n#import \"SCCameraTweaks.h\"\n#import \"SCLogger+Camera.h\"\n#import \"SCManagedCapturer.h\"\n#import \"SCManagedFrameHealthChecker.h\"\n#import \"SCManagedStillImageCapturer_Protected.h\"\n#import \"SCStillImageCaptureVideoInputMethod.h\"\n#import \"SCStillImageDepthBlurFilter.h\"\n\n#import <SCCrashLogger/SCCrashLogger.h>\n#import <SCFoundation/SCAssertWrapper.h>\n#import <SCFoundation/SCLog.h>\n#import <SCFoundation/SCPerforming.h>\n#import <SCFoundation/SCQueuePerformer.h>\n#import <SCFoundation/SCTrace.h>\n#import <SCLenses/SCLens.h>\n#import <SCLogger/SCCameraMetrics.h>\n#import <SCLogger/SClogger+Performance.h>\n#import <SCWebP/UIImage+WebP.h>\n\n@import ImageIO;\n\nstatic NSString *const kSCManagedPhotoCapturerErrorDomain = @\"kSCManagedPhotoCapturerErrorDomain\";\n\nstatic NSInteger const kSCManagedPhotoCapturerErrorEncounteredException = 10000;\nstatic NSInteger const kSCManagedPhotoCapturerInconsistentStatus = 10001;\n\ntypedef NS_ENUM(NSUInteger, SCManagedPhotoCapturerStatus) {\n    SCManagedPhotoCapturerStatusPrepareToCapture,\n    SCManagedPhotoCapturerStatusWillCapture,\n    SCManagedPhotoCapturerStatusDidFinishProcess,\n};\n\n@interface SCManagedPhotoCapturer () <AVCapturePhotoCaptureDelegate>\n@end\n\n@implementation SCManagedPhotoCapturer {\n    AVCapturePhotoOutput *_photoOutput;\n\n    BOOL _shouldCapture;\n    BOOL _shouldEnableHRSI;\n    BOOL _portraitModeCaptureEnabled;\n    NSUInteger _retries;\n\n    CGPoint _portraitModePointOfInterest;\n    SCStillImageDepthBlurFilter *_depthBlurFilter;\n    sc_managed_still_image_capturer_capture_still_image_completion_handler_t _callbackBlock;\n\n    SCStillImageCaptureVideoInputMethod *_videoFileMethod;\n\n    SCManagedPhotoCapturerStatus _status;\n}\n\n- (instancetype)initWithSession:(AVCaptureSession *)session\n                      performer:(id<SCPerforming>)performer\n             lensProcessingCore:(id<SCManagedCapturerLensAPI>)lensProcessingCore\n                       delegate:(id<SCManagedStillImageCapturerDelegate>)delegate\n{\n    SCTraceStart();\n    self = [super initWithSession:session performer:performer lensProcessingCore:lensProcessingCore delegate:delegate];\n    if (self) {\n        [self setupWithSession:session];\n        _portraitModePointOfInterest = CGPointMake(0.5, 0.5);\n    }\n    return self;\n}\n\n- (void)setupWithSession:(AVCaptureSession *)session\n{\n    SCTraceStart();\n    _photoOutput = [[AVCapturePhotoOutput alloc] init];\n    _photoOutput.highResolutionCaptureEnabled = YES;\n    [self setAsOutput:session];\n}\n\n- (void)setAsOutput:(AVCaptureSession *)session\n{\n    SCTraceStart();\n    if ([session canAddOutput:_photoOutput]) {\n        [session addOutput:_photoOutput];\n    }\n}\n\n- (void)setHighResolutionStillImageOutputEnabled:(BOOL)highResolutionStillImageOutputEnabled\n{\n    SCTraceStart();\n    SCAssert([_performer isCurrentPerformer], @\"\");\n    // Here we cannot directly set _photoOutput.highResolutionCaptureEnabled, since it will cause\n    // black frame blink when enabling lenses. Instead, we enable HRSI in AVCapturePhotoSettings.\n    // https://ph.sc-corp.net/T96228\n    _shouldEnableHRSI = highResolutionStillImageOutputEnabled;\n}\n\n- (void)enableStillImageStabilization\n{\n    // The lens stabilization is enabled when configure AVCapturePhotoSettings\n    // instead of AVCapturePhotoOutput\n    SCTraceStart();\n}\n\n- (void)setPortraitModeCaptureEnabled:(BOOL)enabled\n{\n    _portraitModeCaptureEnabled = enabled;\n    if (@available(ios 11.0, *)) {\n        _photoOutput.depthDataDeliveryEnabled = enabled;\n    }\n    if (enabled && _depthBlurFilter == nil) {\n        _depthBlurFilter = [[SCStillImageDepthBlurFilter alloc] init];\n    }\n}\n\n- (void)setPortraitModePointOfInterest:(CGPoint)pointOfInterest\n{\n    _portraitModePointOfInterest = pointOfInterest;\n}\n\n- (void)removeAsOutput:(AVCaptureSession *)session\n{\n    SCTraceStart();\n    [session removeOutput:_photoOutput];\n}\n\n- (void)captureStillImageWithAspectRatio:(CGFloat)aspectRatio\n                            atZoomFactor:(float)zoomFactor\n                             fieldOfView:(float)fieldOfView\n                                   state:(SCManagedCapturerState *)state\n                        captureSessionID:(NSString *)captureSessionID\n                  shouldCaptureFromVideo:(BOOL)shouldCaptureFromVideo\n                       completionHandler:\n                           (sc_managed_still_image_capturer_capture_still_image_completion_handler_t)completionHandler\n{\n    SCTraceStart();\n    SCAssert(completionHandler, @\"completionHandler shouldn't be nil\");\n    SCAssert([_performer isCurrentPerformer], @\"\");\n    _retries = 6; // AVFoundation Unknown Error usually resolves itself within 0.5 seconds\n    _aspectRatio = aspectRatio;\n    _zoomFactor = zoomFactor;\n    _fieldOfView = fieldOfView;\n    _state = state;\n    _captureSessionID = captureSessionID;\n    _shouldCaptureFromVideo = shouldCaptureFromVideo;\n    SCAssert(!_completionHandler, @\"We shouldn't have a _completionHandler at this point otherwise we are destroying \"\n                                  @\"current completion handler.\");\n\n    // The purpose of these lines is to attach a strong reference to self to the completion handler.\n    // This is because AVCapturePhotoOutput does not hold a strong reference to its delegate, which acts as a completion\n    // handler.\n    // If self is deallocated during the call to _photoOuptut capturePhotoWithSettings:delegate:, which may happen if\n    // any AVFoundationError occurs,\n    // then it's callback method, captureOutput:didFinish..., will not be called, and the completion handler will be\n    // forgotten.\n    // This comes with a risk of a memory leak. If for whatever reason the completion handler field is never used and\n    // then unset,\n    // then we have a permanent retain cycle.\n    _callbackBlock = completionHandler;\n    __typeof(self) strongSelf = self;\n    _completionHandler = ^(UIImage *fullScreenImage, NSDictionary *metadata, NSError *error) {\n        strongSelf->_callbackBlock(fullScreenImage, metadata, error);\n        strongSelf->_callbackBlock = nil;\n    };\n    [[SCLogger sharedInstance] logCameraExposureAdjustmentDelayStart];\n\n    if (!_adjustingExposureManualDetect) {\n        SCLogCoreCameraInfo(@\"Capturing still image now\");\n        [self _capturePhotoWithExposureAdjustmentStrategy:kSCCameraExposureAdjustmentStrategyNo];\n        _shouldCapture = NO;\n    } else {\n        SCLogCoreCameraInfo(@\"Wait adjusting exposure (or after 0.4 seconds) and then capture still image\");\n        _shouldCapture = YES;\n        [self _deadlineCapturePhoto];\n    }\n}\n\n#pragma mark - SCManagedDeviceCapacityAnalyzerListener\n\n- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer\n           didChangeAdjustingExposure:(BOOL)adjustingExposure\n{\n    SCTraceStart();\n    @weakify(self);\n    [_performer performImmediatelyIfCurrentPerformer:^{\n        @strongify(self);\n        SC_GUARD_ELSE_RETURN(self);\n        // Since this is handled on a different thread, therefore, dispatch back to the queue we operated on.\n        self->_adjustingExposureManualDetect = adjustingExposure;\n        [self _didChangeAdjustingExposure:adjustingExposure\n                             withStrategy:kSCCameraExposureAdjustmentStrategyManualDetect];\n    }];\n}\n\n- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer\n           didChangeLightingCondition:(SCCapturerLightingConditionType)lightingCondition\n{\n    SCTraceStart();\n    @weakify(self);\n    [_performer performImmediatelyIfCurrentPerformer:^{\n        @strongify(self);\n        SC_GUARD_ELSE_RETURN(self);\n        self->_lightingConditionType = lightingCondition;\n    }];\n}\n\n#pragma mark - SCManagedCapturerListener\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeAdjustingExposure:(SCManagedCapturerState *)state\n{\n    SCTraceStart();\n    @weakify(self);\n    [_performer performImmediatelyIfCurrentPerformer:^{\n        @strongify(self);\n        SC_GUARD_ELSE_RETURN(self);\n        // Since this is handled on a different thread, therefore, dispatch back to the queue we operated on.\n        [self _didChangeAdjustingExposure:state.adjustingExposure withStrategy:kSCCameraExposureAdjustmentStrategyKVO];\n    }];\n}\n\n#pragma mark - AVCapturePhotoCaptureDelegate\n\n- (void)captureOutput:(AVCapturePhotoOutput *)captureOutput\n    didFinishProcessingPhotoSampleBuffer:(CMSampleBufferRef)photoSampleBuffer\n                previewPhotoSampleBuffer:(CMSampleBufferRef)previewPhotoSampleBuffer\n                        resolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings\n                         bracketSettings:(AVCaptureBracketedStillImageSettings *)bracketSettings\n                                   error:(NSError *)error\n{\n    SCTraceStart();\n    if (photoSampleBuffer) {\n        CFRetain(photoSampleBuffer);\n    }\n    @weakify(self);\n    [_performer performImmediatelyIfCurrentPerformer:^{\n        SCTraceStart();\n        @strongify(self);\n        SC_GUARD_ELSE_RETURN(self);\n        SC_GUARD_ELSE_RUN_AND_RETURN(photoSampleBuffer, [self _photoCaptureDidFailWithError:error]);\n        if (self->_status == SCManagedPhotoCapturerStatusWillCapture) {\n            NSData *imageData = [AVCapturePhotoOutput JPEGPhotoDataRepresentationForJPEGSampleBuffer:photoSampleBuffer\n                                                                            previewPhotoSampleBuffer:nil];\n\n            [[SCLogger sharedInstance] updateLogTimedEvent:kSCCameraMetricsRecordingDelay\n                                                  uniqueId:@\"IMAGE\"\n                                                splitPoint:@\"DID_FINISH_PROCESSING\"];\n            [self _capturePhotoFinishedWithImageData:imageData\n                                        sampleBuffer:photoSampleBuffer\n                                          cameraInfo:cameraInfoForBuffer(photoSampleBuffer)\n                                               error:error];\n\n        } else {\n            SCLogCoreCameraInfo(@\"DidFinishProcessingPhoto with unexpected status: %@\",\n                                [self _photoCapturerStatusToString:self->_status]);\n            [self _photoCaptureDidFailWithError:[NSError errorWithDomain:kSCManagedPhotoCapturerErrorDomain\n                                                                    code:kSCManagedPhotoCapturerInconsistentStatus\n                                                                userInfo:nil]];\n        }\n        CFRelease(photoSampleBuffer);\n    }];\n}\n\n- (void)captureOutput:(AVCapturePhotoOutput *)output\n    didFinishProcessingPhoto:(nonnull AVCapturePhoto *)photo\n                       error:(nullable NSError *)error NS_AVAILABLE_IOS(11_0)\n{\n    SCTraceStart();\n    @weakify(self);\n    [_performer performImmediatelyIfCurrentPerformer:^{\n        SCTraceStart();\n        @strongify(self);\n        SC_GUARD_ELSE_RETURN(self);\n        NSData *imageData = [photo fileDataRepresentation];\n        SC_GUARD_ELSE_RUN_AND_RETURN(imageData, [self _photoCaptureDidFailWithError:error]);\n        if (self->_status == SCManagedPhotoCapturerStatusWillCapture) {\n            if (@available(ios 11.0, *)) {\n                if (_portraitModeCaptureEnabled) {\n                    RenderData renderData = {\n                        .depthDataMap = photo.depthData.depthDataMap,\n                        .depthBlurPointOfInterest = &_portraitModePointOfInterest,\n                    };\n                    imageData = [_depthBlurFilter renderWithPhotoData:imageData renderData:renderData];\n                }\n            }\n\n            [[SCLogger sharedInstance] updateLogTimedEvent:kSCCameraMetricsRecordingDelay\n                                                  uniqueId:@\"IMAGE\"\n                                                splitPoint:@\"DID_FINISH_PROCESSING\"];\n\n            [self _capturePhotoFinishedWithImageData:imageData metadata:photo.metadata error:error];\n\n        } else {\n            SCLogCoreCameraInfo(@\"DidFinishProcessingPhoto with unexpected status: %@\",\n                                [self _photoCapturerStatusToString:self->_status]);\n            [self _photoCaptureDidFailWithError:[NSError errorWithDomain:kSCManagedPhotoCapturerErrorDomain\n                                                                    code:kSCManagedPhotoCapturerInconsistentStatus\n                                                                userInfo:nil]];\n        }\n    }];\n}\n\n- (void)captureOutput:(AVCapturePhotoOutput *)captureOutput\n    willBeginCaptureForResolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings\n{\n    SCTraceStart();\n    @weakify(self);\n    [_performer performImmediatelyIfCurrentPerformer:^{\n        SCTraceStart();\n        @strongify(self);\n        SC_GUARD_ELSE_RETURN(self);\n        if ([self->_delegate respondsToSelector:@selector(managedStillImageCapturerWillCapturePhoto:)]) {\n            if (self->_status == SCManagedPhotoCapturerStatusPrepareToCapture) {\n                self->_status = SCManagedPhotoCapturerStatusWillCapture;\n\n                [[SCLogger sharedInstance] updateLogTimedEvent:kSCCameraMetricsRecordingDelay\n                                                      uniqueId:@\"IMAGE\"\n                                                    splitPoint:@\"WILL_BEGIN_CAPTURE\"];\n                [self->_delegate managedStillImageCapturerWillCapturePhoto:self];\n            } else {\n                SCLogCoreCameraInfo(@\"WillBeginCapture with unexpected status: %@\",\n                                    [self _photoCapturerStatusToString:self->_status]);\n            }\n        }\n    }];\n}\n\n- (void)captureOutput:(AVCapturePhotoOutput *)captureOutput\n    didCapturePhotoForResolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings\n{\n    SCTraceStart();\n    @weakify(self);\n    [_performer performImmediatelyIfCurrentPerformer:^{\n        SCTraceStart();\n        @strongify(self);\n        SC_GUARD_ELSE_RETURN(self);\n        if ([self->_delegate respondsToSelector:@selector(managedStillImageCapturerDidCapturePhoto:)]) {\n            if (self->_status == SCManagedPhotoCapturerStatusWillCapture ||\n                self->_status == SCManagedPhotoCapturerStatusDidFinishProcess) {\n                [[SCLogger sharedInstance] updateLogTimedEvent:kSCCameraMetricsRecordingDelay\n                                                      uniqueId:@\"IMAGE\"\n                                                    splitPoint:@\"DID_CAPTURE_PHOTO\"];\n                [self->_delegate managedStillImageCapturerDidCapturePhoto:self];\n            } else {\n                SCLogCoreCameraInfo(@\"DidCapturePhoto with unexpected status: %@\",\n                                    [self _photoCapturerStatusToString:self->_status]);\n            }\n        }\n    }];\n}\n\n#pragma mark - Private methods\n\n- (void)_didChangeAdjustingExposure:(BOOL)adjustingExposure withStrategy:(NSString *)strategy\n{\n    if (!adjustingExposure && self->_shouldCapture) {\n        SCLogCoreCameraInfo(@\"Capturing after adjusting exposure using strategy: %@\", strategy);\n        [self _capturePhotoWithExposureAdjustmentStrategy:strategy];\n        self->_shouldCapture = NO;\n    }\n}\n\n- (void)_capturePhotoFinishedWithImageData:(NSData *)imageData\n                              sampleBuffer:(CMSampleBufferRef)sampleBuffer\n                                cameraInfo:(NSDictionary *)cameraInfo\n                                     error:(NSError *)error\n{\n    [self _photoCaptureDidSucceedWithImageData:imageData\n                                  sampleBuffer:sampleBuffer\n                                    cameraInfo:cameraInfoForBuffer(sampleBuffer)\n                                         error:error];\n    self->_status = SCManagedPhotoCapturerStatusDidFinishProcess;\n}\n\n- (void)_capturePhotoFinishedWithImageData:(NSData *)imageData metadata:(NSDictionary *)metadata error:(NSError *)error\n{\n    [self _photoCaptureDidSucceedWithImageData:imageData metadata:metadata error:error];\n    self->_status = SCManagedPhotoCapturerStatusDidFinishProcess;\n}\n\n- (void)_deadlineCapturePhoto\n{\n    SCTraceStart();\n    // Use the SCManagedCapturer's private queue.\n    @weakify(self);\n    [_performer perform:^{\n        @strongify(self);\n        SC_GUARD_ELSE_RETURN(self);\n        if (self->_shouldCapture) {\n            [self _capturePhotoWithExposureAdjustmentStrategy:kSCCameraExposureAdjustmentStrategyDeadline];\n            self->_shouldCapture = NO;\n        }\n    }\n                  after:SCCameraTweaksExposureDeadline()];\n}\n\n- (void)_capturePhotoWithExposureAdjustmentStrategy:(NSString *)strategy\n{\n    SCTraceStart();\n    [[SCLogger sharedInstance] logCameraExposureAdjustmentDelayEndWithStrategy:strategy];\n    if (_shouldCaptureFromVideo) {\n        [self captureStillImageFromVideoBuffer];\n        return;\n    }\n    SCAssert([_performer isCurrentPerformer], @\"\");\n    SCAssert(_photoOutput, @\"_photoOutput shouldn't be nil\");\n    _status = SCManagedPhotoCapturerStatusPrepareToCapture;\n    AVCapturePhotoOutput *photoOutput = _photoOutput;\n    AVCaptureConnection *captureConnection = [self _captureConnectionFromPhotoOutput:photoOutput];\n    SCManagedCapturerState *state = [_state copy];\n#if !TARGET_IPHONE_SIMULATOR\n    if (!captureConnection) {\n        sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = _completionHandler;\n        _completionHandler = nil;\n        completionHandler(nil, nil, [NSError errorWithDomain:kSCManagedStillImageCapturerErrorDomain\n                                                        code:kSCManagedStillImageCapturerNoStillImageConnection\n                                                    userInfo:nil]);\n    }\n#endif\n    AVCapturePhotoSettings *photoSettings =\n        [self _photoSettingsWithPhotoOutput:photoOutput captureConnection:captureConnection captureState:state];\n    // Select appropriate image capture method\n\n    if ([_delegate managedStillImageCapturerShouldProcessFileInput:self]) {\n        if (!_videoFileMethod) {\n            _videoFileMethod = [[SCStillImageCaptureVideoInputMethod alloc] init];\n        }\n        [[SCLogger sharedInstance] logStillImageCaptureApi:@\"SCStillImageCaptureVideoFileInput\"];\n        [[SCCoreCameraLogger sharedInstance]\n            logCameraCreationDelaySplitPointStillImageCaptureApi:@\"SCStillImageCaptureVideoFileInput\"];\n        [_delegate managedStillImageCapturerWillCapturePhoto:self];\n        [_videoFileMethod captureStillImageWithCapturerState:state\n            successBlock:^(NSData *imageData, NSDictionary *cameraInfo, NSError *error) {\n                [_performer performImmediatelyIfCurrentPerformer:^{\n                    [self _photoCaptureDidSucceedWithImageData:imageData\n                                                  sampleBuffer:nil\n                                                    cameraInfo:cameraInfo\n                                                         error:error];\n                }];\n            }\n            failureBlock:^(NSError *error) {\n                [_performer performImmediatelyIfCurrentPerformer:^{\n                    [self _photoCaptureDidFailWithError:error];\n                }];\n            }];\n    } else {\n        [[SCLogger sharedInstance] logStillImageCaptureApi:@\"AVCapturePhoto\"];\n        [[SCCoreCameraLogger sharedInstance] logCameraCreationDelaySplitPointStillImageCaptureApi:@\"AVCapturePhoto\"];\n        @try {\n            [photoOutput capturePhotoWithSettings:photoSettings delegate:self];\n        } @catch (NSException *e) {\n            [SCCrashLogger logHandledException:e];\n            [self\n                _photoCaptureDidFailWithError:[NSError errorWithDomain:kSCManagedPhotoCapturerErrorDomain\n                                                                  code:kSCManagedPhotoCapturerErrorEncounteredException\n                                                              userInfo:@{\n                                                                  @\"exception\" : e\n                                                              }]];\n        }\n    }\n}\n\n- (void)_photoCaptureDidSucceedWithImageData:(NSData *)imageData\n                                sampleBuffer:(CMSampleBufferRef)sampleBuffer\n                                  cameraInfo:(NSDictionary *)cameraInfo\n                                       error:(NSError *)error\n{\n    SCTraceStart();\n    SCAssert([_performer isCurrentPerformer], @\"\");\n    [[SCLogger sharedInstance] logPreCaptureOperationFinishedAt:CACurrentMediaTime()];\n    [[SCCoreCameraLogger sharedInstance]\n        logCameraCreationDelaySplitPointPreCaptureOperationFinishedAt:CACurrentMediaTime()];\n\n    UIImage *fullScreenImage = [self imageFromData:imageData\n                                 currentZoomFactor:_zoomFactor\n                                 targetAspectRatio:_aspectRatio\n                                       fieldOfView:_fieldOfView\n                                             state:_state\n                                      sampleBuffer:sampleBuffer];\n    [[SCLogger sharedInstance] updateLogTimedEvent:kSCCameraMetricsRecordingDelay\n                                          uniqueId:@\"IMAGE\"\n                                        splitPoint:@\"WILL_START_COMPLETION_HANDLER\"];\n    sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = _completionHandler;\n    _completionHandler = nil;\n    if (completionHandler) {\n        completionHandler(fullScreenImage, cameraInfo, error);\n    }\n}\n\n- (void)_photoCaptureDidSucceedWithImageData:(NSData *)imageData\n                                    metadata:(NSDictionary *)metadata\n                                       error:(NSError *)error\n{\n    SCTraceStart();\n    SCAssert([_performer isCurrentPerformer], @\"\");\n    [[SCLogger sharedInstance] logPreCaptureOperationFinishedAt:CACurrentMediaTime()];\n    [[SCCoreCameraLogger sharedInstance]\n        logCameraCreationDelaySplitPointPreCaptureOperationFinishedAt:CACurrentMediaTime()];\n\n    UIImage *fullScreenImage = [self imageFromData:imageData\n                                 currentZoomFactor:_zoomFactor\n                                 targetAspectRatio:_aspectRatio\n                                       fieldOfView:_fieldOfView\n                                             state:_state\n                                          metadata:metadata];\n    [[SCLogger sharedInstance] updateLogTimedEvent:kSCCameraMetricsRecordingDelay\n                                          uniqueId:@\"IMAGE\"\n                                        splitPoint:@\"WILL_START_COMPLETION_HANDLER\"];\n    sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = _completionHandler;\n    _completionHandler = nil;\n    if (completionHandler) {\n        completionHandler(fullScreenImage, metadata, error);\n    }\n}\n\n- (void)_photoCaptureDidFailWithError:(NSError *)error\n{\n    SCTraceStart();\n    SCAssert([_performer isCurrentPerformer], @\"\");\n    sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = _completionHandler;\n    _completionHandler = nil;\n    if (completionHandler) {\n        completionHandler(nil, nil, error);\n    }\n}\n\n- (AVCaptureConnection *)_captureConnectionFromPhotoOutput:(AVCapturePhotoOutput *)photoOutput\n{\n    SCTraceStart();\n    SCAssert([_performer isCurrentPerformer], @\"\");\n    NSArray *connections = [photoOutput.connections copy];\n    for (AVCaptureConnection *connection in connections) {\n        for (AVCaptureInputPort *port in [connection inputPorts]) {\n            if ([[port mediaType] isEqual:AVMediaTypeVideo]) {\n                return connection;\n            }\n        }\n    }\n    return nil;\n}\n\n- (AVCapturePhotoSettings *)_photoSettingsWithPhotoOutput:(AVCapturePhotoOutput *)photoOutput\n                                        captureConnection:(AVCaptureConnection *)captureConnection\n                                             captureState:(SCManagedCapturerState *)state\n{\n    SCTraceStart();\n    if ([self _shouldUseBracketPhotoSettingsWithCaptureState:state]) {\n        return [self _bracketPhotoSettingsWithPhotoOutput:photoOutput\n                                        captureConnection:captureConnection\n                                             captureState:state];\n    } else {\n        return [self _defaultPhotoSettingsWithPhotoOutput:photoOutput captureState:state];\n    }\n}\n\n- (BOOL)_shouldUseBracketPhotoSettingsWithCaptureState:(SCManagedCapturerState *)state\n{\n    // According to Apple docmentation, AVCapturePhotoBracketSettings do not support flashMode,\n    // autoStillImageStabilizationEnabled, livePhotoMovieFileURL or livePhotoMovieMetadata.\n    // Besides, we only use AVCapturePhotoBracketSettings if capture settings needs to be set manually.\n    return !state.flashActive && !_portraitModeCaptureEnabled &&\n           (([SCManagedCaptureDevice isEnhancedNightModeSupported] && state.isNightModeActive) ||\n            [_delegate managedStillImageCapturerIsUnderDeviceMotion:self]);\n}\n\n- (AVCapturePhotoSettings *)_defaultPhotoSettingsWithPhotoOutput:(AVCapturePhotoOutput *)photoOutput\n                                                    captureState:(SCManagedCapturerState *)state\n{\n    SCTraceStart();\n    // Specify the output file format\n    AVCapturePhotoSettings *photoSettings =\n        [AVCapturePhotoSettings photoSettingsWithFormat:@{AVVideoCodecKey : AVVideoCodecJPEG}];\n\n    // Enable HRSI if necessary\n    if (photoSettings.isHighResolutionPhotoEnabled != _shouldEnableHRSI) {\n        photoSettings.highResolutionPhotoEnabled = _shouldEnableHRSI;\n    }\n\n    // Turn on flash if active and supported by device\n    if (state.flashActive && state.flashSupported) {\n        photoSettings.flashMode = AVCaptureFlashModeOn;\n    }\n\n    // Turn on stabilization if available\n    // Seems that setting autoStillImageStabilizationEnabled doesn't work during video capture session,\n    // but we set enable it anyway as it is harmless.\n    if (photoSettings.isAutoStillImageStabilizationEnabled) {\n        photoSettings.autoStillImageStabilizationEnabled = YES;\n    }\n\n    if (_portraitModeCaptureEnabled) {\n        if (@available(ios 11.0, *)) {\n            photoSettings.depthDataDeliveryEnabled = YES;\n        }\n    }\n\n    return photoSettings;\n}\n\n- (AVCapturePhotoSettings *)_bracketPhotoSettingsWithPhotoOutput:(AVCapturePhotoOutput *)photoOutput\n                                               captureConnection:(AVCaptureConnection *)captureConnection\n                                                    captureState:(SCManagedCapturerState *)state\n{\n    SCTraceStart();\n    OSType rawPixelFormatType = [photoOutput.availableRawPhotoPixelFormatTypes.firstObject unsignedIntValue];\n    NSArray<AVCaptureBracketedStillImageSettings *> *bracketedSettings =\n        [self _bracketSettingsArray:captureConnection withCaptureState:state];\n    SCAssert(bracketedSettings.count <= photoOutput.maxBracketedCapturePhotoCount,\n             @\"Bracket photo count cannot exceed maximum count\");\n    // Specify the output file format and raw pixel format\n    AVCapturePhotoBracketSettings *photoSettings =\n        [AVCapturePhotoBracketSettings photoBracketSettingsWithRawPixelFormatType:rawPixelFormatType\n                                                                  processedFormat:@{\n                                                                      AVVideoCodecKey : AVVideoCodecJPEG\n                                                                  }\n                                                                bracketedSettings:bracketedSettings];\n\n    // Enable HRSI if necessary\n    if (photoSettings.isHighResolutionPhotoEnabled != _shouldEnableHRSI) {\n        photoSettings.highResolutionPhotoEnabled = _shouldEnableHRSI;\n    }\n\n    // If lens stabilization is supportd, enable the stabilization when device is moving\n    if (photoOutput.isLensStabilizationDuringBracketedCaptureSupported && !photoSettings.isLensStabilizationEnabled &&\n        [_delegate managedStillImageCapturerIsUnderDeviceMotion:self]) {\n        photoSettings.lensStabilizationEnabled = YES;\n    }\n    return photoSettings;\n}\n\n- (NSArray *)_bracketSettingsArray:(AVCaptureConnection *)stillImageConnection\n                  withCaptureState:(SCManagedCapturerState *)state\n{\n    NSInteger const stillCount = 1;\n    NSMutableArray *bracketSettingsArray = [NSMutableArray arrayWithCapacity:stillCount];\n    AVCaptureDevice *device = [stillImageConnection inputDevice];\n    CMTime exposureDuration = device.exposureDuration;\n    if ([SCManagedCaptureDevice isEnhancedNightModeSupported] && state.isNightModeActive) {\n        exposureDuration = [self adjustedExposureDurationForNightModeWithCurrentExposureDuration:exposureDuration];\n    }\n    AVCaptureBracketedStillImageSettings *settings = [AVCaptureManualExposureBracketedStillImageSettings\n        manualExposureSettingsWithExposureDuration:exposureDuration\n                                               ISO:AVCaptureISOCurrent];\n    for (NSInteger i = 0; i < stillCount; i++) {\n        [bracketSettingsArray addObject:settings];\n    }\n    return [bracketSettingsArray copy];\n}\n\n- (NSString *)_photoCapturerStatusToString:(SCManagedPhotoCapturerStatus)status\n{\n    switch (status) {\n    case SCManagedPhotoCapturerStatusPrepareToCapture:\n        return @\"PhotoCapturerStatusPrepareToCapture\";\n    case SCManagedPhotoCapturerStatusWillCapture:\n        return @\"PhotoCapturerStatusWillCapture\";\n    case SCManagedPhotoCapturerStatusDidFinishProcess:\n        return @\"PhotoCapturerStatusDidFinishProcess\";\n    }\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedRecordedVideo.h",
    "content": "// ed265cb0c346ae35dce70d3fc12a0bd8deae0802\n// Generated by the value-object.rb  DO NOT EDIT!!\n\n#import <AvailabilityMacros.h>\n\n#import <Foundation/Foundation.h>\n#import <UIKit/UIKit.h>\n\n@protocol SCManagedRecordedVideo <NSObject, NSCoding, NSCopying>\n\n@property (nonatomic, copy, readonly) NSURL *videoURL;\n\n@property (nonatomic, copy, readonly) NSURL *rawVideoDataFileURL;\n\n@property (nonatomic, copy, readonly) UIImage *placeholderImage;\n\n@property (nonatomic, assign, readonly) BOOL isFrontFacingCamera;\n\n@end\n\n@interface SCManagedRecordedVideo : NSObject <SCManagedRecordedVideo>\n\n@property (nonatomic, copy, readonly) NSURL *videoURL;\n\n@property (nonatomic, copy, readonly) NSURL *rawVideoDataFileURL;\n\n@property (nonatomic, copy, readonly) UIImage *placeholderImage;\n\n@property (nonatomic, assign, readonly) BOOL isFrontFacingCamera;\n\n- (instancetype)initWithVideoURL:(NSURL *)videoURL\n             rawVideoDataFileURL:(NSURL *)rawVideoDataFileURL\n                placeholderImage:(UIImage *)placeholderImage\n             isFrontFacingCamera:(BOOL)isFrontFacingCamera;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedRecordedVideo.m",
    "content": "// ed265cb0c346ae35dce70d3fc12a0bd8deae0802\n// Generated by the value-object.rb  DO NOT EDIT!!\n\n#import \"SCManagedRecordedVideo.h\"\n\n#import <FastCoding/FastCoder.h>\n\n@implementation SCManagedRecordedVideo\n\n- (instancetype)initWithVideoURL:(NSURL *)videoURL\n             rawVideoDataFileURL:(NSURL *)rawVideoDataFileURL\n                placeholderImage:(UIImage *)placeholderImage\n             isFrontFacingCamera:(BOOL)isFrontFacingCamera\n{\n    self = [super init];\n    if (self) {\n        _videoURL = [(NSObject *)videoURL copy];\n        _rawVideoDataFileURL = [(NSObject *)rawVideoDataFileURL copy];\n        _placeholderImage = [(NSObject *)placeholderImage copy];\n        _isFrontFacingCamera = isFrontFacingCamera;\n    }\n    return self;\n}\n\n#pragma mark - NSCopying\n\n- (instancetype)copyWithZone:(NSZone *)zone\n{\n    // Immutable object, bypass copy\n    return self;\n}\n\n#pragma mark - NSCoding\n\n- (instancetype)initWithCoder:(NSCoder *)aDecoder\n{\n    self = [super init];\n    if (self) {\n        _videoURL = [aDecoder decodeObjectForKey:@\"videoURL\"];\n        _rawVideoDataFileURL = [aDecoder decodeObjectForKey:@\"rawVideoDataFileURL\"];\n        _placeholderImage = [aDecoder decodeObjectForKey:@\"placeholderImage\"];\n        _isFrontFacingCamera = [aDecoder decodeBoolForKey:@\"isFrontFacingCamera\"];\n    }\n    return self;\n}\n\n- (void)encodeWithCoder:(NSCoder *)aCoder\n{\n    [aCoder encodeObject:_videoURL forKey:@\"videoURL\"];\n    [aCoder encodeObject:_rawVideoDataFileURL forKey:@\"rawVideoDataFileURL\"];\n    [aCoder encodeObject:_placeholderImage forKey:@\"placeholderImage\"];\n    [aCoder encodeBool:_isFrontFacingCamera forKey:@\"isFrontFacingCamera\"];\n}\n\n#pragma mark - FasterCoding\n\n- (BOOL)preferFasterCoding\n{\n    return YES;\n}\n\n- (void)encodeWithFasterCoder:(id<FCFasterCoder>)fasterCoder\n{\n    [fasterCoder encodeBool:_isFrontFacingCamera];\n    [fasterCoder encodeObject:_placeholderImage];\n    [fasterCoder encodeObject:_rawVideoDataFileURL];\n    [fasterCoder encodeObject:_videoURL];\n}\n\n- (void)decodeWithFasterDecoder:(id<FCFasterDecoder>)fasterDecoder\n{\n    _isFrontFacingCamera = (BOOL)[fasterDecoder decodeBool];\n    _placeholderImage = (UIImage *)[fasterDecoder decodeObject];\n    _rawVideoDataFileURL = (NSURL *)[fasterDecoder decodeObject];\n    _videoURL = (NSURL *)[fasterDecoder decodeObject];\n}\n\n- (void)setObject:(id)val forUInt64Key:(uint64_t)key\n{\n    switch (key) {\n    case 50783861721184594ULL:\n        _placeholderImage = (UIImage *)val;\n        break;\n    case 13152167848358790ULL:\n        _rawVideoDataFileURL = (NSURL *)val;\n        break;\n    case 48945309622713334ULL:\n        _videoURL = (NSURL *)val;\n        break;\n    }\n}\n\n- (void)setBool:(BOOL)val forUInt64Key:(uint64_t)key\n{\n    switch (key) {\n    case 11924284868025312ULL:\n        _isFrontFacingCamera = (BOOL)val;\n        break;\n    }\n}\n\n+ (uint64_t)fasterCodingVersion\n{\n    return 17435789727352013688ULL;\n}\n\n+ (uint64_t *)fasterCodingKeys\n{\n    static uint64_t keys[] = {\n        4 /* Total */,\n        FC_ENCODE_KEY_TYPE(11924284868025312, FCEncodeTypeBool),\n        FC_ENCODE_KEY_TYPE(50783861721184594, FCEncodeTypeObject),\n        FC_ENCODE_KEY_TYPE(13152167848358790, FCEncodeTypeObject),\n        FC_ENCODE_KEY_TYPE(48945309622713334, FCEncodeTypeObject),\n    };\n    return keys;\n}\n\n#pragma mark - isEqual\n\n- (BOOL)isEqual:(id)object\n{\n    if (self == object) {\n        return YES;\n    }\n    if (![object isMemberOfClass:[self class]]) {\n        return NO;\n    }\n    SCManagedRecordedVideo *other = (SCManagedRecordedVideo *)object;\n    if (other.videoURL != _videoURL && ![(NSObject *)other.videoURL isEqual:_videoURL]) {\n        return NO;\n    }\n    if (other.rawVideoDataFileURL != _rawVideoDataFileURL &&\n        ![(NSObject *)other.rawVideoDataFileURL isEqual:_rawVideoDataFileURL]) {\n        return NO;\n    }\n    if (other.placeholderImage != _placeholderImage &&\n        ![(NSObject *)other.placeholderImage isEqual:_placeholderImage]) {\n        return NO;\n    }\n    if (other.isFrontFacingCamera != _isFrontFacingCamera) {\n        return NO;\n    }\n    return YES;\n}\n\n- (NSUInteger)hash\n{\n    NSUInteger subhashes[] = {[_videoURL hash], [_rawVideoDataFileURL hash], [_placeholderImage hash],\n                              (NSUInteger)_isFrontFacingCamera};\n    NSUInteger result = subhashes[0];\n    for (int i = 1; i < 4; i++) {\n        unsigned long long base = (((unsigned long long)result) << 32 | subhashes[i]);\n        base = (~base) + (base << 18);\n        base ^= (base >> 31);\n        base *= 21;\n        base ^= (base >> 11);\n        base += (base << 6);\n        base ^= (base >> 22);\n        result = (NSUInteger)base;\n    }\n    return result;\n}\n\n#pragma mark - Print description in console: lldb> po #{variable name}\n\n- (NSString *)description\n{\n    NSMutableString *desc = [NSMutableString string];\n    [desc appendString:@\"{\\n\"];\n    [desc appendFormat:@\"\\tvideoURL:%@\\n\", [_videoURL description]];\n    [desc appendFormat:@\"\\trawVideoDataFileURL:%@\\n\", [_rawVideoDataFileURL description]];\n    [desc appendFormat:@\"\\tplaceholderImage:%@\\n\", [_placeholderImage description]];\n    [desc appendFormat:@\"\\tisFrontFacingCamera:%@\\n\", [@(_isFrontFacingCamera) description]];\n    [desc appendString:@\"}\\n\"];\n\n    return [desc copy];\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedRecordedVideo.value",
    "content": "interface SCManagedRecordedVideo\n\tNSURL *videoURL;\n\tNSURL *rawVideoDataFileURL;\n\tUIImage *placeholderImage;\n\tBOOL isFrontFacingCamera;\nend"
  },
  {
    "path": "ManagedCapturer/SCManagedStillImageCapturer.h",
    "content": "//\n//  SCManagedStillImageCapturer.h\n//  Snapchat\n//\n//  Created by Liu Liu on 4/30/15.\n//  Copyright (c) 2015 Liu Liu. All rights reserved.\n//\n\n#import \"SCCoreCameraLogger.h\"\n#import \"SCManagedCaptureDevice.h\"\n#import \"SCManagedCapturerListener.h\"\n#import \"SCManagedCapturerState.h\"\n#import \"SCManagedDeviceCapacityAnalyzerListener.h\"\n\n#import <SCCameraFoundation/SCManagedVideoDataSourceListener.h>\n#import <SCLogger/SCCameraMetrics+ExposureAdjustment.h>\n\n#import <AVFoundation/AVFoundation.h>\n#import <Foundation/Foundation.h>\n\nSC_EXTERN_C_BEGIN\n\nextern BOOL SCPhotoCapturerIsEnabled(void);\n\nSC_EXTERN_C_END\n\n@protocol SCPerforming;\n@protocol SCManagedStillImageCapturerDelegate;\n@class SCCaptureResource;\n\ntypedef void (^sc_managed_still_image_capturer_capture_still_image_completion_handler_t)(UIImage *fullScreenImage,\n                                                                                         NSDictionary *metadata,\n                                                                                         NSError *error);\n\n@interface SCManagedStillImageCapturer\n    : NSObject <SCManagedDeviceCapacityAnalyzerListener, SCManagedCapturerListener, SCManagedVideoDataSourceListener> {\n    SCManagedCapturerState *_state;\n    BOOL _shouldCaptureFromVideo;\n    BOOL _captureImageFromVideoImmediately;\n    CGFloat _aspectRatio;\n    float _zoomFactor;\n    float _fieldOfView;\n    BOOL _adjustingExposureManualDetect;\n    sc_managed_still_image_capturer_capture_still_image_completion_handler_t _completionHandler;\n}\n\n+ (instancetype)capturerWithCaptureResource:(SCCaptureResource *)captureResource;\n\nSC_INIT_AND_NEW_UNAVAILABLE;\n\n@property (nonatomic, weak) id<SCManagedStillImageCapturerDelegate> delegate;\n\n- (void)setupWithSession:(AVCaptureSession *)session;\n\n- (void)setAsOutput:(AVCaptureSession *)session;\n\n- (void)removeAsOutput:(AVCaptureSession *)session;\n\n- (void)setHighResolutionStillImageOutputEnabled:(BOOL)highResolutionStillImageOutputEnabled;\n\n- (void)setPortraitModeCaptureEnabled:(BOOL)enabled;\n\n- (void)setPortraitModePointOfInterest:(CGPoint)pointOfInterest;\n\n- (void)enableStillImageStabilization;\n\n- (void)captureStillImageWithAspectRatio:(CGFloat)aspectRatio\n                            atZoomFactor:(float)zoomFactor\n                             fieldOfView:(float)fieldOfView\n                                   state:(SCManagedCapturerState *)state\n                        captureSessionID:(NSString *)captureSessionID\n                  shouldCaptureFromVideo:(BOOL)shouldCaptureFromVideo\n                       completionHandler:\n                           (sc_managed_still_image_capturer_capture_still_image_completion_handler_t)completionHandler;\n\n- (void)captureStillImageFromVideoBuffer;\n\n@end\n\n@protocol SCManagedStillImageCapturerDelegate <NSObject>\n\n- (BOOL)managedStillImageCapturerIsUnderDeviceMotion:(SCManagedStillImageCapturer *)managedStillImageCapturer;\n\n- (BOOL)managedStillImageCapturerShouldProcessFileInput:(SCManagedStillImageCapturer *)managedStillImageCapturer;\n\n@optional\n\n- (void)managedStillImageCapturerWillCapturePhoto:(SCManagedStillImageCapturer *)managedStillImageCapturer;\n\n- (void)managedStillImageCapturerDidCapturePhoto:(SCManagedStillImageCapturer *)managedStillImageCapturer;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedStillImageCapturer.mm",
    "content": "//\n//  SCManagedStillImageCapturer.m\n//  Snapchat\n//\n//  Created by Liu Liu on 4/30/15.\n//  Copyright (c) 2015 Liu Liu. All rights reserved.\n//\n\n#import \"SCManagedStillImageCapturer.h\"\n\n#import \"SCCameraSettingUtils.h\"\n#import \"SCCameraTweaks.h\"\n#import \"SCCaptureResource.h\"\n#import \"SCLogger+Camera.h\"\n#import \"SCManagedCaptureSession.h\"\n#import \"SCManagedCapturer.h\"\n#import \"SCManagedCapturerLensAPI.h\"\n#import \"SCManagedFrameHealthChecker.h\"\n#import \"SCManagedLegacyStillImageCapturer.h\"\n#import \"SCManagedPhotoCapturer.h\"\n#import \"SCManagedStillImageCapturerHandler.h\"\n#import \"SCManagedStillImageCapturer_Protected.h\"\n\n#import <SCFoundation/NSException+Exceptions.h>\n#import <SCFoundation/SCLog.h>\n#import <SCFoundation/SCPerforming.h>\n#import <SCFoundation/SCQueuePerformer.h>\n#import <SCFoundation/SCTrace.h>\n#import <SCFoundation/UIImage+CVPixelBufferRef.h>\n#import <SCLenses/SCLens.h>\n#import <SCLogger/SCCameraMetrics.h>\n#import <SCWebP/UIImage+WebP.h>\n\n#import <ImageIO/ImageIO.h>\n\nNSString *const kSCManagedStillImageCapturerErrorDomain = @\"kSCManagedStillImageCapturerErrorDomain\";\n\nNSInteger const kSCCameraShutterSoundID = 1108;\n\n#if !TARGET_IPHONE_SIMULATOR\nNSInteger const kSCManagedStillImageCapturerNoStillImageConnection = 1101;\n#endif\nNSInteger const kSCManagedStillImageCapturerApplicationStateBackground = 1102;\n\n// We will do the image capture regardless if these is still camera adjustment in progress after 0.4 seconds.\nNSTimeInterval const kSCManagedStillImageCapturerDeadline = 0.4;\nNSTimeInterval const kSCCameraRetryInterval = 0.1;\n\nBOOL SCPhotoCapturerIsEnabled(void)\n{\n    // Due to the native crash in https://jira.sc-corp.net/browse/CCAM-4904, we guard it >= 10.2\n    return SC_AT_LEAST_IOS_10_2;\n}\n\nNSDictionary *cameraInfoForBuffer(CMSampleBufferRef imageDataSampleBuffer)\n{\n    CFDictionaryRef exifAttachments =\n        (CFDictionaryRef)CMGetAttachment(imageDataSampleBuffer, kCGImagePropertyExifDictionary, NULL);\n    float brightness = [retrieveBrightnessFromEXIFAttachments(exifAttachments) floatValue];\n    NSInteger ISOSpeedRating = [retrieveISOSpeedRatingFromEXIFAttachments(exifAttachments) integerValue];\n    return @{\n        (__bridge NSString *) kCGImagePropertyExifISOSpeedRatings : @(ISOSpeedRating), (__bridge NSString *)\n        kCGImagePropertyExifBrightnessValue : @(brightness)\n    };\n}\n\n@implementation SCManagedStillImageCapturer\n\n+ (instancetype)capturerWithCaptureResource:(SCCaptureResource *)captureResource\n{\n    if (SCPhotoCapturerIsEnabled()) {\n        return [[SCManagedPhotoCapturer alloc] initWithSession:captureResource.managedSession.avSession\n                                                     performer:captureResource.queuePerformer\n                                            lensProcessingCore:captureResource.lensProcessingCore\n                                                      delegate:captureResource.stillImageCapturerHandler];\n    } else {\n        return [[SCManagedLegacyStillImageCapturer alloc] initWithSession:captureResource.managedSession.avSession\n                                                                performer:captureResource.queuePerformer\n                                                       lensProcessingCore:captureResource.lensProcessingCore\n                                                                 delegate:captureResource.stillImageCapturerHandler];\n    }\n}\n\n- (instancetype)initWithSession:(AVCaptureSession *)session\n                      performer:(id<SCPerforming>)performer\n             lensProcessingCore:(id<SCManagedCapturerLensAPI>)lensAPI\n                       delegate:(id<SCManagedStillImageCapturerDelegate>)delegate\n{\n    self = [super init];\n    if (self) {\n        _session = session;\n        _performer = performer;\n        _lensAPI = lensAPI;\n        _delegate = delegate;\n    }\n    return self;\n}\n\n- (void)setupWithSession:(AVCaptureSession *)session\n{\n    UNIMPLEMENTED_METHOD;\n}\n\n- (void)setAsOutput:(AVCaptureSession *)session\n{\n    UNIMPLEMENTED_METHOD;\n}\n\n- (void)setHighResolutionStillImageOutputEnabled:(BOOL)highResolutionStillImageOutputEnabled\n{\n    UNIMPLEMENTED_METHOD;\n}\n\n- (void)enableStillImageStabilization\n{\n    UNIMPLEMENTED_METHOD;\n}\n\n- (void)removeAsOutput:(AVCaptureSession *)session\n{\n    UNIMPLEMENTED_METHOD;\n}\n\n- (void)setPortraitModeCaptureEnabled:(BOOL)enabled\n{\n    UNIMPLEMENTED_METHOD;\n}\n\n- (void)setPortraitModePointOfInterest:(CGPoint)pointOfInterest\n{\n    UNIMPLEMENTED_METHOD;\n}\n\n- (void)captureStillImageWithAspectRatio:(CGFloat)aspectRatio\n                            atZoomFactor:(float)zoomFactor\n                             fieldOfView:(float)fieldOfView\n                                   state:(SCManagedCapturerState *)state\n                        captureSessionID:(NSString *)captureSessionID\n                  shouldCaptureFromVideo:(BOOL)shouldCaptureFromVideo\n                       completionHandler:\n                           (sc_managed_still_image_capturer_capture_still_image_completion_handler_t)completionHandler\n{\n    UNIMPLEMENTED_METHOD;\n}\n\n#pragma mark - SCManagedDeviceCapacityAnalyzerListener\n\n- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer\n           didChangeAdjustingExposure:(BOOL)adjustingExposure\n{\n    UNIMPLEMENTED_METHOD;\n}\n\n- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer\n           didChangeLightingCondition:(SCCapturerLightingConditionType)lightingCondition\n{\n    UNIMPLEMENTED_METHOD;\n}\n\n#pragma mark - SCManagedCapturerListener\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeAdjustingExposure:(SCManagedCapturerState *)state\n{\n    UNIMPLEMENTED_METHOD;\n}\n\n- (UIImage *)imageFromData:(NSData *)data\n         currentZoomFactor:(float)currentZoomFactor\n         targetAspectRatio:(CGFloat)targetAspectRatio\n               fieldOfView:(float)fieldOfView\n                     state:(SCManagedCapturerState *)state\n              sampleBuffer:(CMSampleBufferRef)sampleBuffer\n{\n    UIImage *capturedImage = [self imageFromImage:[UIImage sc_imageWithData:data]\n                                currentZoomFactor:currentZoomFactor\n                                targetAspectRatio:targetAspectRatio\n                                      fieldOfView:fieldOfView\n                                            state:state];\n    // Check capture frame health before showing preview\n    NSDictionary *metadata =\n        [[SCManagedFrameHealthChecker sharedInstance] metadataForSampleBuffer:sampleBuffer\n                                                         photoCapturerEnabled:SCPhotoCapturerIsEnabled()\n                                                                  lensEnabled:state.lensesActive\n                                                                       lensID:[_lensAPI activeLensId]];\n    [[SCManagedFrameHealthChecker sharedInstance] checkImageHealthForCaptureFrameImage:capturedImage\n                                                                       captureSettings:metadata\n                                                                      captureSessionID:_captureSessionID];\n    _captureSessionID = nil;\n    return capturedImage;\n}\n\n- (UIImage *)imageFromData:(NSData *)data\n         currentZoomFactor:(float)currentZoomFactor\n         targetAspectRatio:(CGFloat)targetAspectRatio\n               fieldOfView:(float)fieldOfView\n                     state:(SCManagedCapturerState *)state\n                  metadata:(NSDictionary *)metadata\n{\n    UIImage *capturedImage = [self imageFromImage:[UIImage sc_imageWithData:data]\n                                currentZoomFactor:currentZoomFactor\n                                targetAspectRatio:targetAspectRatio\n                                      fieldOfView:fieldOfView\n                                            state:state];\n    // Check capture frame health before showing preview\n    NSDictionary *newMetadata =\n        [[SCManagedFrameHealthChecker sharedInstance] metadataForMetadata:metadata\n                                                     photoCapturerEnabled:SCPhotoCapturerIsEnabled()\n                                                              lensEnabled:state.lensesActive\n                                                                   lensID:[_lensAPI activeLensId]];\n    [[SCManagedFrameHealthChecker sharedInstance] checkImageHealthForCaptureFrameImage:capturedImage\n                                                                       captureSettings:newMetadata\n                                                                      captureSessionID:_captureSessionID];\n    _captureSessionID = nil;\n    return capturedImage;\n}\n\n- (UIImage *)imageFromImage:(UIImage *)image\n          currentZoomFactor:(float)currentZoomFactor\n          targetAspectRatio:(CGFloat)targetAspectRatio\n                fieldOfView:(float)fieldOfView\n                      state:(SCManagedCapturerState *)state\n{\n    UIImage *fullScreenImage = image;\n    if (state.lensesActive && _lensAPI.isLensApplied) {\n        fullScreenImage = [_lensAPI processImage:fullScreenImage\n                                    maxPixelSize:[_lensAPI maxPixelSize]\n                                  devicePosition:state.devicePosition\n                                     fieldOfView:fieldOfView];\n    }\n    // Resize and crop\n    return [self resizeImage:fullScreenImage currentZoomFactor:currentZoomFactor targetAspectRatio:targetAspectRatio];\n}\n\n- (UIImage *)resizeImage:(UIImage *)image\n       currentZoomFactor:(float)currentZoomFactor\n       targetAspectRatio:(CGFloat)targetAspectRatio\n{\n    SCTraceStart();\n    if (currentZoomFactor == 1) {\n        return SCCropImageToTargetAspectRatio(image, targetAspectRatio);\n    } else {\n        @autoreleasepool {\n            return [self resizeImageUsingCG:image\n                          currentZoomFactor:currentZoomFactor\n                          targetAspectRatio:targetAspectRatio\n                               maxPixelSize:[_lensAPI maxPixelSize]];\n        }\n    }\n}\n\n- (UIImage *)resizeImageUsingCG:(UIImage *)inputImage\n              currentZoomFactor:(float)currentZoomFactor\n              targetAspectRatio:(CGFloat)targetAspectRatio\n                   maxPixelSize:(CGFloat)maxPixelSize\n{\n    size_t imageWidth = CGImageGetWidth(inputImage.CGImage);\n    size_t imageHeight = CGImageGetHeight(inputImage.CGImage);\n    SCLogGeneralInfo(@\"Captured still image at %dx%d\", (int)imageWidth, (int)imageHeight);\n    size_t targetWidth, targetHeight;\n    float zoomFactor = currentZoomFactor;\n    if (imageWidth > imageHeight) {\n        targetWidth = maxPixelSize;\n        targetHeight = (maxPixelSize * imageHeight + imageWidth / 2) / imageWidth;\n        // Update zoom factor here\n        zoomFactor *= (float)maxPixelSize / imageWidth;\n    } else {\n        targetHeight = maxPixelSize;\n        targetWidth = (maxPixelSize * imageWidth + imageHeight / 2) / imageHeight;\n        zoomFactor *= (float)maxPixelSize / imageHeight;\n    }\n    if (targetAspectRatio != kSCManagedCapturerAspectRatioUnspecified) {\n        SCCropImageSizeToAspectRatio(targetWidth, targetHeight, inputImage.imageOrientation, targetAspectRatio,\n                                     &targetWidth, &targetHeight);\n    }\n    CGContextRef context =\n        CGBitmapContextCreate(NULL, targetWidth, targetHeight, CGImageGetBitsPerComponent(inputImage.CGImage),\n                              CGImageGetBitsPerPixel(inputImage.CGImage) * targetWidth / 8,\n                              CGImageGetColorSpace(inputImage.CGImage), CGImageGetBitmapInfo(inputImage.CGImage));\n    CGContextSetInterpolationQuality(context, kCGInterpolationHigh);\n    CGContextDrawImage(context, CGRectMake(targetWidth * 0.5 - imageWidth * 0.5 * zoomFactor,\n                                           targetHeight * 0.5 - imageHeight * 0.5 * zoomFactor, imageWidth * zoomFactor,\n                                           imageHeight * zoomFactor),\n                       inputImage.CGImage);\n    CGImageRef thumbnail = CGBitmapContextCreateImage(context);\n    CGContextRelease(context);\n    UIImage *image =\n        [UIImage imageWithCGImage:thumbnail scale:inputImage.scale orientation:inputImage.imageOrientation];\n    CGImageRelease(thumbnail);\n    return image;\n}\n\n- (CMTime)adjustedExposureDurationForNightModeWithCurrentExposureDuration:(CMTime)exposureDuration\n{\n    CMTime adjustedExposureDuration = exposureDuration;\n    if (_lightingConditionType == SCCapturerLightingConditionTypeDark) {\n        adjustedExposureDuration = CMTimeMultiplyByFloat64(exposureDuration, 1.5);\n    } else if (_lightingConditionType == SCCapturerLightingConditionTypeExtremeDark) {\n        adjustedExposureDuration = CMTimeMultiplyByFloat64(exposureDuration, 2.5);\n    }\n    return adjustedExposureDuration;\n}\n\n#pragma mark - SCManagedVideoDataSourceListener\n\n- (void)managedVideoDataSource:(id<SCManagedVideoDataSource>)managedVideoDataSource\n         didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer\n                devicePosition:(SCManagedCaptureDevicePosition)devicePosition\n{\n    SCTraceStart();\n    SC_GUARD_ELSE_RETURN(_captureImageFromVideoImmediately);\n    _captureImageFromVideoImmediately = NO;\n    @weakify(self);\n    CFRetain(sampleBuffer);\n    [_performer performImmediatelyIfCurrentPerformer:^{\n        SCTraceStart();\n        @strongify(self);\n        SC_GUARD_ELSE_RETURN(self);\n        [self _didCapturePhotoFromVideoBuffer];\n        UIImageOrientation orientation = devicePosition == SCManagedCaptureDevicePositionBack\n                                             ? UIImageOrientationRight\n                                             : UIImageOrientationLeftMirrored;\n        UIImage *videoImage = [UIImage imageWithPixelBufferRef:CMSampleBufferGetImageBuffer(sampleBuffer)\n                                                   backingType:UIImageBackingTypeCGImage\n                                                   orientation:orientation\n                                                       context:[CIContext contextWithOptions:nil]];\n        UIImage *fullScreenImage = [self imageFromImage:videoImage\n                                      currentZoomFactor:_zoomFactor\n                                      targetAspectRatio:_aspectRatio\n                                            fieldOfView:_fieldOfView\n                                                  state:_state];\n        NSMutableDictionary *cameraInfo = [cameraInfoForBuffer(sampleBuffer) mutableCopy];\n        cameraInfo[@\"capture_image_from_video_buffer\"] = @\"enabled\";\n        [self _didFinishProcessingFromVideoBufferWithImage:fullScreenImage cameraInfo:cameraInfo];\n        CFRelease(sampleBuffer);\n    }];\n}\n\n- (void)_willBeginCapturePhotoFromVideoBuffer\n{\n    SCTraceStart();\n    @weakify(self);\n    [_performer performImmediatelyIfCurrentPerformer:^{\n        SCTraceStart();\n        @strongify(self);\n        SC_GUARD_ELSE_RETURN(self);\n        if ([self->_delegate respondsToSelector:@selector(managedStillImageCapturerWillCapturePhoto:)]) {\n            [self->_delegate managedStillImageCapturerWillCapturePhoto:self];\n        }\n    }];\n}\n\n- (void)_didCapturePhotoFromVideoBuffer\n{\n    SCTraceStart();\n    @weakify(self);\n    [_performer performImmediatelyIfCurrentPerformer:^{\n        SCTraceStart();\n        @strongify(self);\n        SC_GUARD_ELSE_RETURN(self);\n        if ([self->_delegate respondsToSelector:@selector(managedStillImageCapturerDidCapturePhoto:)]) {\n            [self->_delegate managedStillImageCapturerDidCapturePhoto:self];\n        }\n    }];\n}\n\n- (void)_didFinishProcessingFromVideoBufferWithImage:(UIImage *)image cameraInfo:(NSDictionary *)cameraInfo\n{\n    SCTraceStart();\n    @weakify(self);\n    [_performer performImmediatelyIfCurrentPerformer:^{\n        SCTraceStart();\n        @strongify(self);\n        SC_GUARD_ELSE_RETURN(self);\n        [[SCLogger sharedInstance] logPreCaptureOperationFinishedAt:CACurrentMediaTime()];\n        [[SCCoreCameraLogger sharedInstance]\n            logCameraCreationDelaySplitPointPreCaptureOperationFinishedAt:CACurrentMediaTime()];\n        sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = _completionHandler;\n        _completionHandler = nil;\n        if (completionHandler) {\n            completionHandler(image, cameraInfo, nil);\n        }\n    }];\n}\n\n- (void)captureStillImageFromVideoBuffer\n{\n    SCTraceStart();\n    @weakify(self);\n    [_performer performImmediatelyIfCurrentPerformer:^{\n        SCTraceStart();\n        @strongify(self);\n        SC_GUARD_ELSE_RETURN(self);\n        AudioServicesPlaySystemSoundWithCompletion(kSCCameraShutterSoundID, nil);\n        [self _willBeginCapturePhotoFromVideoBuffer];\n        self->_captureImageFromVideoImmediately = YES;\n    }];\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedStillImageCapturerHandler.h",
    "content": "//\n//  SCManagedStillImageCapturerHandler.h\n//  Snapchat\n//\n//  Created by Jingtian Yang on 11/12/2017.\n//\n\n#import \"SCManagedStillImageCapturer.h\"\n\n#import <Foundation/Foundation.h>\n\n@class SCCaptureResource;\n@protocol SCDeviceMotionProvider\n, SCFileInputDecider;\n\n@interface SCManagedStillImageCapturerHandler : NSObject <SCManagedStillImageCapturerDelegate>\n\nSC_INIT_AND_NEW_UNAVAILABLE\n- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedStillImageCapturerHandler.m",
    "content": "//\n//  SCManagedStillImageCapturerHandler.m\n//  Snapchat\n//\n//  Created by Jingtian Yang on 11/12/2017.\n//\n\n#import \"SCManagedStillImageCapturerHandler.h\"\n\n#import \"SCCaptureResource.h\"\n#import \"SCManagedCaptureDevice+SCManagedCapturer.h\"\n#import \"SCManagedCapturer.h\"\n#import \"SCManagedCapturerLogging.h\"\n#import \"SCManagedCapturerSampleMetadata.h\"\n#import \"SCManagedCapturerState.h\"\n\n#import <SCFoundation/SCAssertWrapper.h>\n#import <SCFoundation/SCQueuePerformer.h>\n#import <SCFoundation/SCThreadHelpers.h>\n#import <SCFoundation/SCTraceODPCompatible.h>\n\n@interface SCManagedStillImageCapturerHandler () {\n    __weak SCCaptureResource *_captureResource;\n}\n\n@end\n\n@implementation SCManagedStillImageCapturerHandler\n\n- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource\n{\n    self = [super init];\n    if (self) {\n        SCAssert(captureResource, @\"\");\n        _captureResource = captureResource;\n    }\n    return self;\n}\n\n- (void)managedStillImageCapturerWillCapturePhoto:(SCManagedStillImageCapturer *)managedStillImageCapturer\n{\n    SCTraceODPCompatibleStart(2);\n    SCLogCapturerInfo(@\"Will capture photo. stillImageCapturer:%@\", _captureResource.stillImageCapturer);\n    [_captureResource.queuePerformer performImmediatelyIfCurrentPerformer:^{\n        SCTraceStart();\n        if (_captureResource.stillImageCapturer) {\n            SCManagedCapturerState *state = [_captureResource.state copy];\n            SCManagedCapturerSampleMetadata *sampleMetadata = [[SCManagedCapturerSampleMetadata alloc]\n                initWithPresentationTimestamp:kCMTimeZero\n                                  fieldOfView:_captureResource.device.fieldOfView];\n            runOnMainThreadAsynchronously(^{\n                [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]\n                                           willCapturePhoto:state\n                                             sampleMetadata:sampleMetadata];\n            });\n        }\n    }];\n}\n\n- (void)managedStillImageCapturerDidCapturePhoto:(SCManagedStillImageCapturer *)managedStillImageCapturer\n{\n    SCTraceODPCompatibleStart(2);\n    SCLogCapturerInfo(@\"Did capture photo. stillImageCapturer:%@\", _captureResource.stillImageCapturer);\n    [_captureResource.queuePerformer performImmediatelyIfCurrentPerformer:^{\n        SCTraceStart();\n        if (_captureResource.stillImageCapturer) {\n            SCManagedCapturerState *state = [_captureResource.state copy];\n            runOnMainThreadAsynchronously(^{\n                [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didCapturePhoto:state];\n            });\n        }\n    }];\n}\n\n- (BOOL)managedStillImageCapturerIsUnderDeviceMotion:(SCManagedStillImageCapturer *)managedStillImageCapturer\n{\n    return _captureResource.deviceMotionProvider.isUnderDeviceMotion;\n}\n\n- (BOOL)managedStillImageCapturerShouldProcessFileInput:(SCManagedStillImageCapturer *)managedStillImageCapturer\n{\n    return _captureResource.fileInputDecider.shouldProcessFileInput;\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedStillImageCapturer_Protected.h",
    "content": "//\n//  SCManagedStillImageCapturer_Protected.h\n//  Snapchat\n//\n//  Created by Chao Pang on 10/4/16.\n//  Copyright © 2016 Snapchat, Inc. All rights reserved.\n//\n\nSC_EXTERN_C_BEGIN\nextern NSDictionary *cameraInfoForBuffer(CMSampleBufferRef imageDataSampleBuffer);\nSC_EXTERN_C_END\n\nextern NSString *const kSCManagedStillImageCapturerErrorDomain;\n\n#if !TARGET_IPHONE_SIMULATOR\nextern NSInteger const kSCManagedStillImageCapturerNoStillImageConnection;\n#endif\nextern NSInteger const kSCManagedStillImageCapturerApplicationStateBackground;\n\n// We will do the image capture regardless if these is still camera adjustment in progress after 0.4 seconds.\nextern NSTimeInterval const kSCManagedStillImageCapturerDeadline;\nextern NSTimeInterval const kSCCameraRetryInterval;\n\n@protocol SCManagedCapturerLensAPI;\n\n@interface SCManagedStillImageCapturer () {\n  @protected\n    id<SCManagedCapturerLensAPI> _lensAPI;\n    id<SCPerforming> _performer;\n    AVCaptureSession *_session;\n    id<SCManagedStillImageCapturerDelegate> __weak _delegate;\n    NSString *_captureSessionID;\n    SCCapturerLightingConditionType _lightingConditionType;\n}\n\n- (instancetype)initWithSession:(AVCaptureSession *)session\n                      performer:(id<SCPerforming>)performer\n             lensProcessingCore:(id<SCManagedCapturerLensAPI>)lensProcessingCore\n                       delegate:(id<SCManagedStillImageCapturerDelegate>)delegate;\n\n- (UIImage *)imageFromData:(NSData *)data\n         currentZoomFactor:(float)currentZoomFactor\n         targetAspectRatio:(CGFloat)targetAspectRatio\n               fieldOfView:(float)fieldOfView\n                     state:(SCManagedCapturerState *)state\n              sampleBuffer:(CMSampleBufferRef)sampleBuffer;\n\n- (UIImage *)imageFromData:(NSData *)data\n         currentZoomFactor:(float)currentZoomFactor\n         targetAspectRatio:(CGFloat)targetAspectRatio\n               fieldOfView:(float)fieldOfView\n                     state:(SCManagedCapturerState *)state\n                  metadata:(NSDictionary *)metadata;\n\n- (UIImage *)imageFromImage:(UIImage *)image\n          currentZoomFactor:(float)currentZoomFactor\n          targetAspectRatio:(CGFloat)targetAspectRatio\n                fieldOfView:(float)fieldOfView\n                      state:(SCManagedCapturerState *)state;\n\n- (CMTime)adjustedExposureDurationForNightModeWithCurrentExposureDuration:(CMTime)exposureDuration;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedVideoARDataSource.h",
    "content": "//\n//  SCManagedVideoARDataSource.h\n//  Snapchat\n//\n//  Created by Eyal Segal on 20/10/2017.\n//\n\n#import \"SCCapturerDefines.h\"\n\n#import <SCCameraFoundation/SCManagedVideoDataSource.h>\n\n#import <ARKit/ARKit.h>\n\n@protocol SCManagedVideoARDataSource <SCManagedVideoDataSource>\n\n@property (atomic, strong) ARFrame *currentFrame NS_AVAILABLE_IOS(11_0);\n\n#ifdef SC_USE_ARKIT_FACE\n@property (atomic, strong) AVDepthData *lastDepthData NS_AVAILABLE_IOS(11_0);\n#endif\n\n@property (atomic, assign) float fieldOfView NS_AVAILABLE_IOS(11_0);\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedVideoCapturer.h",
    "content": "//\n//  SCManagedVideoCapturer.h\n//  Snapchat\n//\n//  Created by Liu Liu on 5/1/15.\n//  Copyright (c) 2015 Liu Liu. All rights reserved.\n//\n\n#import \"SCManagedRecordedVideo.h\"\n#import \"SCManagedVideoCapturerOutputSettings.h\"\n#import \"SCVideoCaptureSessionInfo.h\"\n\n#import <SCCameraFoundation/SCManagedAudioDataSource.h>\n#import <SCCameraFoundation/SCManagedVideoDataSourceListener.h>\n#import <SCFoundation/SCFuture.h>\n\n#import <AVFoundation/AVFoundation.h>\n#import <Foundation/Foundation.h>\n\ntypedef void (^sc_managed_video_capturer_recording_completion_handler_t)(NSURL *fileURL, NSError *error);\n\n@class SCManagedVideoCapturer, SCTimedTask;\n\n@protocol SCManagedVideoCapturerDelegate <NSObject>\n\n// All these calbacks are invoked on a private queue for video recording channels\n\n- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer\n      didBeginVideoRecording:(SCVideoCaptureSessionInfo)sessionInfo;\n\n- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer\n      didBeginAudioRecording:(SCVideoCaptureSessionInfo)sessionInfo;\n\n- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer\n    willStopWithRecordedVideoFuture:(SCFuture<id<SCManagedRecordedVideo>> *)videoProviderFuture\n                          videoSize:(CGSize)videoSize\n                   placeholderImage:(UIImage *)placeholderImage\n                            session:(SCVideoCaptureSessionInfo)sessionInfo;\n\n- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer\n didSucceedWithRecordedVideo:(SCManagedRecordedVideo *)recordedVideo\n                     session:(SCVideoCaptureSessionInfo)sessionInfo;\n\n- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer\n            didFailWithError:(NSError *)error\n                     session:(SCVideoCaptureSessionInfo)sessionInfo;\n\n- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer\n     didCancelVideoRecording:(SCVideoCaptureSessionInfo)sessionInfo;\n\n- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer\n                 didGetError:(NSError *)error\n                     forType:(SCManagedVideoCapturerInfoType)type\n                     session:(SCVideoCaptureSessionInfo)sessionInfo;\n\n- (NSDictionary *)managedVideoCapturerGetExtraFrameHealthInfo:(SCManagedVideoCapturer *)managedVideoCapturer;\n\n- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer\n  didAppendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer\n       presentationTimestamp:(CMTime)presentationTimestamp;\n\n@end\n\n/**\n * AVFoundation backed class that writes frames to an output file. SCManagedVideoCapturer\n * uses SCManagedVideoCapturerOutputSettings to determine output settings. If no output\n * settings are passed in (nil) SCManagedVideoCapturer will fall back on default settings.\n */\n@interface SCManagedVideoCapturer : NSObject <SCManagedVideoDataSourceListener, SCManagedAudioDataSource>\n\n/**\n * Return the output URL that passed into beginRecordingToURL method\n */\n@property (nonatomic, copy, readonly) NSURL *outputURL;\n\n@property (nonatomic, weak) id<SCManagedVideoCapturerDelegate> delegate;\n@property (nonatomic, readonly) SCVideoCaptureSessionInfo activeSession;\n@property (nonatomic, assign, readonly) CMTime firstWrittenAudioBufferDelay;\n@property (nonatomic, assign, readonly) BOOL audioQueueStarted;\n\n- (instancetype)initWithQueuePerformer:(SCQueuePerformer *)queuePerformer;\n\n- (void)prepareForRecordingWithAudioConfiguration:(SCAudioConfiguration *)configuration;\n- (SCVideoCaptureSessionInfo)startRecordingAsynchronouslyWithOutputSettings:\n                                 (SCManagedVideoCapturerOutputSettings *)outputSettings\n                                                         audioConfiguration:(SCAudioConfiguration *)audioConfiguration\n                                                                maxDuration:(NSTimeInterval)maxDuration\n                                                                      toURL:(NSURL *)URL\n                                                               deviceFormat:(AVCaptureDeviceFormat *)deviceFormat\n                                                                orientation:(AVCaptureVideoOrientation)videoOrientation\n                                                           captureSessionID:(NSString *)captureSessionID;\n\n- (void)stopRecordingAsynchronously;\n- (void)cancelRecordingAsynchronously;\n\n// Schedule a task to run, it is thread safe.\n- (void)addTimedTask:(SCTimedTask *)task;\n\n// Clear all tasks, it is thread safe.\n- (void)clearTimedTasks;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedVideoCapturer.m",
    "content": "//\n//  SCManagedVideoCapturer.m\n//  Snapchat\n//\n//  Created by Liu Liu on 5/1/15.\n//  Copyright (c) 2015 Liu Liu. All rights reserved.\n//\n\n#import \"SCManagedVideoCapturer.h\"\n\n#import \"NSURL+Asset.h\"\n#import \"SCAudioCaptureSession.h\"\n#import \"SCCameraTweaks.h\"\n#import \"SCCapturerBufferedVideoWriter.h\"\n#import \"SCCoreCameraLogger.h\"\n#import \"SCLogger+Camera.h\"\n#import \"SCManagedCapturer.h\"\n#import \"SCManagedFrameHealthChecker.h\"\n#import \"SCManagedVideoCapturerLogger.h\"\n#import \"SCManagedVideoCapturerTimeObserver.h\"\n\n#import <SCAudio/SCAudioSession.h>\n#import <SCAudio/SCMutableAudioSession.h>\n#import <SCBase/SCMacros.h>\n#import <SCCameraFoundation/SCManagedAudioDataSourceListenerAnnouncer.h>\n#import <SCFoundation/SCAssertWrapper.h>\n#import <SCFoundation/SCCoreGraphicsUtils.h>\n#import <SCFoundation/SCDeviceName.h>\n#import <SCFoundation/SCFuture.h>\n#import <SCFoundation/SCLog.h>\n#import <SCFoundation/SCQueuePerformer.h>\n#import <SCFoundation/SCTrace.h>\n#import <SCFoundation/UIImage+CVPixelBufferRef.h>\n#import <SCImageProcess/SCSnapVideoFrameRawData.h>\n#import <SCImageProcess/SCVideoFrameRawDataCollector.h>\n#import <SCImageProcess/SnapVideoMetadata.h>\n#import <SCLogger/SCCameraMetrics.h>\n#import <SCLogger/SCLogger+Performance.h>\n#import <SCLogger/SCLogger.h>\n\n#import <SCAudioScope/SCAudioSessionExperimentAdapter.h>\n\n@import CoreMedia;\n@import ImageIO;\n\nstatic NSString *const kSCAudioCaptureAudioSessionLabel = @\"CAMERA\";\n\n// wild card audio queue error code\nstatic NSInteger const kSCAudioQueueErrorWildCard = -50;\n// kAudioHardwareIllegalOperationError, it means hardware failure\nstatic NSInteger const kSCAudioQueueErrorHardware = 1852797029;\n\ntypedef NS_ENUM(NSUInteger, SCManagedVideoCapturerStatus) {\n    SCManagedVideoCapturerStatusUnknown,\n    SCManagedVideoCapturerStatusIdle,\n    SCManagedVideoCapturerStatusPrepareToRecord,\n    SCManagedVideoCapturerStatusReadyForRecording,\n    SCManagedVideoCapturerStatusRecording,\n    SCManagedVideoCapturerStatusError,\n};\n\n#define SCLogVideoCapturerInfo(fmt, ...) SCLogCoreCameraInfo(@\"[SCManagedVideoCapturer] \" fmt, ##__VA_ARGS__)\n#define SCLogVideoCapturerWarning(fmt, ...) SCLogCoreCameraWarning(@\"[SCManagedVideoCapturer] \" fmt, ##__VA_ARGS__)\n#define SCLogVideoCapturerError(fmt, ...) SCLogCoreCameraError(@\"[SCManagedVideoCapturer] \" fmt, ##__VA_ARGS__)\n\n@interface SCManagedVideoCapturer () <SCAudioCaptureSessionDelegate>\n// This value has to be atomic because it is read on a different thread (write\n// on output queue, as always)\n@property (atomic, assign, readwrite) SCManagedVideoCapturerStatus status;\n\n@property (nonatomic, assign) CMTime firstWrittenAudioBufferDelay;\n\n@end\n\nstatic char *const kSCManagedVideoCapturerQueueLabel = \"com.snapchat.managed-video-capturer-queue\";\nstatic char *const kSCManagedVideoCapturerPromiseQueueLabel = \"com.snapchat.video-capture-promise\";\n\nstatic NSString *const kSCManagedVideoCapturerErrorDomain = @\"kSCManagedVideoCapturerErrorDomain\";\n\nstatic NSInteger const kSCManagedVideoCapturerCannotAddAudioVideoInput = 1001;\nstatic NSInteger const kSCManagedVideoCapturerEmptyFrame = 1002;\nstatic NSInteger const kSCManagedVideoCapturerStopBeforeStart = 1003;\nstatic NSInteger const kSCManagedVideoCapturerStopWithoutStart = 1004;\nstatic NSInteger const kSCManagedVideoCapturerZeroVideoSize = -111;\n\nstatic NSUInteger const kSCVideoContentComplexitySamplingRate = 90;\n\n// This is the maximum time we will wait for the Recording Capturer pipeline to drain\n// When video stabilization is turned on the extra frame delay is around 20 frames.\n// @30 fps this is 0.66 seconds\nstatic NSTimeInterval const kSCManagedVideoCapturerStopRecordingDeadline = 1.0;\n\nstatic const char *SCPlaceholderImageGenerationQueueLabel = \"com.snapchat.video-capturer-placeholder-queue\";\n\nstatic const char *SCVideoRecordingPreparationQueueLabel = \"com.snapchat.video-recording-preparation-queue\";\n\nstatic dispatch_queue_t SCPlaceholderImageGenerationQueue(void)\n{\n    static dispatch_queue_t queue;\n    static dispatch_once_t onceToken;\n    dispatch_once(&onceToken, ^{\n        queue = dispatch_queue_create(SCPlaceholderImageGenerationQueueLabel, DISPATCH_QUEUE_SERIAL);\n    });\n    return queue;\n}\n\n@interface SCManagedVideoCapturer () <SCCapturerBufferedVideoWriterDelegate>\n\n@end\n\n@implementation SCManagedVideoCapturer {\n    NSTimeInterval _maxDuration;\n    NSTimeInterval _recordStartTime;\n\n    SCCapturerBufferedVideoWriter *_videoWriter;\n\n    BOOL _hasWritten;\n    SCQueuePerformer *_performer;\n    SCQueuePerformer *_videoPreparationPerformer;\n    SCAudioCaptureSession *_audioCaptureSession;\n    NSError *_lastError;\n    UIImage *_placeholderImage;\n\n    // For logging purpose\n    BOOL _isVideoSnap;\n    NSDictionary *_videoOutputSettings;\n\n    // The following value is used to control the encoder shutdown following a stop recording message.\n    // When a shutdown is requested this value will be the timestamp of the last captured frame.\n    CFTimeInterval _stopTime;\n    NSInteger _stopSession;\n    SCAudioConfigurationToken *_preparedAudioConfiguration;\n    SCAudioConfigurationToken *_audioConfiguration;\n\n    dispatch_semaphore_t _startRecordingSemaphore;\n\n    // For store the raw frame datas\n    NSInteger _rawDataFrameNum;\n    NSURL *_rawDataURL;\n    SCVideoFrameRawDataCollector *_videoFrameRawDataCollector;\n\n    CMTime _startSessionTime;\n    // Indicates how actual processing time of first frame. Also used for camera timer animation start offset.\n    NSTimeInterval _startSessionRealTime;\n    CMTime _endSessionTime;\n    sc_managed_capturer_recording_session_t _sessionId;\n\n    SCManagedVideoCapturerTimeObserver *_timeObserver;\n    SCManagedVideoCapturerLogger *_capturerLogger;\n\n    CGSize _outputSize;\n    BOOL _isFrontFacingCamera;\n    SCPromise<id<SCManagedRecordedVideo>> *_recordedVideoPromise;\n    SCManagedAudioDataSourceListenerAnnouncer *_announcer;\n\n    NSString *_captureSessionID;\n    CIContext *_ciContext;\n}\n\n@synthesize performer = _performer;\n\n- (instancetype)init\n{\n    SCTraceStart();\n    return [self initWithQueuePerformer:[[SCQueuePerformer alloc] initWithLabel:kSCManagedVideoCapturerQueueLabel\n                                                               qualityOfService:QOS_CLASS_USER_INTERACTIVE\n                                                                      queueType:DISPATCH_QUEUE_SERIAL\n                                                                        context:SCQueuePerformerContextCamera]];\n}\n\n- (instancetype)initWithQueuePerformer:(SCQueuePerformer *)queuePerformer\n{\n    SCTraceStart();\n    self = [super init];\n    if (self) {\n        _performer = queuePerformer;\n        _audioCaptureSession = [[SCAudioCaptureSession alloc] init];\n        _audioCaptureSession.delegate = self;\n        _announcer = [SCManagedAudioDataSourceListenerAnnouncer new];\n        self.status = SCManagedVideoCapturerStatusIdle;\n        _capturerLogger = [[SCManagedVideoCapturerLogger alloc] init];\n        _startRecordingSemaphore = dispatch_semaphore_create(0);\n    }\n    return self;\n}\n\n- (void)dealloc\n{\n    SCLogVideoCapturerInfo(@\"SCVideoCaptureSessionInfo before dealloc: %@\",\n                           SCVideoCaptureSessionInfoGetDebugDescription(self.activeSession));\n}\n\n- (SCVideoCaptureSessionInfo)activeSession\n{\n    return SCVideoCaptureSessionInfoMake(_startSessionTime, _endSessionTime, _sessionId);\n}\n\n- (CGSize)defaultSizeForDeviceFormat:(AVCaptureDeviceFormat *)format\n{\n    SCTraceStart();\n    // if there is no device, and no format\n    if (format == nil) {\n        // hard code 720p\n        return CGSizeMake(kSCManagedCapturerDefaultVideoActiveFormatWidth,\n                          kSCManagedCapturerDefaultVideoActiveFormatHeight);\n    }\n    CMVideoDimensions videoDimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription);\n    CGSize size = CGSizeMake(videoDimensions.width, videoDimensions.height);\n    if (videoDimensions.width > kSCManagedCapturerDefaultVideoActiveFormatWidth &&\n        videoDimensions.height > kSCManagedCapturerDefaultVideoActiveFormatHeight) {\n        CGFloat scaleFactor = MAX((kSCManagedCapturerDefaultVideoActiveFormatWidth / videoDimensions.width),\n                                  (kSCManagedCapturerDefaultVideoActiveFormatHeight / videoDimensions.height));\n        size = SCSizeMakeAlignTo(SCSizeApplyScale(size, scaleFactor), 2);\n    }\n    if ([SCDeviceName isIphoneX]) {\n        size = SCSizeApplyScale(size, kSCIPhoneXCapturedImageVideoCropRatio);\n    }\n    return size;\n}\n\n- (CGSize)cropSize:(CGSize)size toAspectRatio:(CGFloat)aspectRatio\n{\n    if (aspectRatio == kSCManagedCapturerAspectRatioUnspecified) {\n        return size;\n    }\n    // video input is always in landscape mode\n    aspectRatio = 1.0 / aspectRatio;\n    if (size.width > size.height * aspectRatio) {\n        size.width = size.height * aspectRatio;\n    } else {\n        size.height = size.width / aspectRatio;\n    }\n    return CGSizeMake(roundf(size.width / 2) * 2, roundf(size.height / 2) * 2);\n}\n\n- (SCManagedVideoCapturerOutputSettings *)defaultRecordingOutputSettingsWithDeviceFormat:\n    (AVCaptureDeviceFormat *)deviceFormat\n{\n    SCTraceStart();\n    CGFloat aspectRatio = SCManagedCapturedImageAndVideoAspectRatio();\n    CGSize outputSize = [self defaultSizeForDeviceFormat:deviceFormat];\n    outputSize = [self cropSize:outputSize toAspectRatio:aspectRatio];\n\n    // [TODO](Chao): remove the dependency of SCManagedVideoCapturer on SnapVideoMetaData\n    NSInteger videoBitRate = [SnapVideoMetadata averageTranscodingBitRate:outputSize\n                                                              isRecording:YES\n                                                              highQuality:YES\n                                                                 duration:0\n                                                               iFrameOnly:NO\n                                                     originalVideoBitRate:0\n                                                 overlayImageFileSizeBits:0\n                                                        videoPlaybackRate:1\n                                                            isLagunaVideo:NO\n                                                        hasOverlayToBlend:NO\n                                                               sourceType:SCSnapVideoFilterSourceTypeUndefined];\n    SCTraceSignal(@\"Setup transcoding video bitrate\");\n    [_capturerLogger logStartingStep:kSCCapturerStartingStepTranscodeingVideoBitrate];\n\n    SCManagedVideoCapturerOutputSettings *outputSettings =\n        [[SCManagedVideoCapturerOutputSettings alloc] initWithWidth:outputSize.width\n                                                             height:outputSize.height\n                                                       videoBitRate:videoBitRate\n                                                       audioBitRate:64000.0\n                                                   keyFrameInterval:15\n                                                         outputType:SCManagedVideoCapturerOutputTypeVideoSnap];\n\n    return outputSettings;\n}\n\n- (SCQueuePerformer *)_getVideoPreparationPerformer\n{\n    SCAssert([_performer isCurrentPerformer], @\"must run on _performer\");\n    if (!_videoPreparationPerformer) {\n        _videoPreparationPerformer = [[SCQueuePerformer alloc] initWithLabel:SCVideoRecordingPreparationQueueLabel\n                                                            qualityOfService:QOS_CLASS_USER_INTERACTIVE\n                                                                   queueType:DISPATCH_QUEUE_SERIAL\n                                                                     context:SCQueuePerformerContextCamera];\n    }\n    return _videoPreparationPerformer;\n}\n\n- (void)prepareForRecordingWithAudioConfiguration:(SCAudioConfiguration *)configuration\n{\n    SCTraceStart();\n    [_performer performImmediatelyIfCurrentPerformer:^{\n        SCTraceStart();\n        self.status = SCManagedVideoCapturerStatusPrepareToRecord;\n        if (_audioConfiguration) {\n            [SCAudioSessionExperimentAdapter relinquishConfiguration:_audioConfiguration performer:nil completion:nil];\n        }\n        __block NSError *audioSessionError = nil;\n        _preparedAudioConfiguration = _audioConfiguration =\n            [SCAudioSessionExperimentAdapter configureWith:configuration\n                                                 performer:[self _getVideoPreparationPerformer]\n                                                completion:^(NSError *error) {\n                                                    audioSessionError = error;\n                                                    if (self.status == SCManagedVideoCapturerStatusPrepareToRecord) {\n                                                        dispatch_semaphore_signal(_startRecordingSemaphore);\n                                                    }\n                                                }];\n\n        // Wait until preparation for recording is done\n        dispatch_semaphore_wait(_startRecordingSemaphore, DISPATCH_TIME_FOREVER);\n        [_delegate managedVideoCapturer:self\n                            didGetError:audioSessionError\n                                forType:SCManagedVideoCapturerInfoAudioSessionError\n                                session:self.activeSession];\n    }];\n}\n\n- (SCVideoCaptureSessionInfo)startRecordingAsynchronouslyWithOutputSettings:\n                                 (SCManagedVideoCapturerOutputSettings *)outputSettings\n                                                         audioConfiguration:(SCAudioConfiguration *)audioConfiguration\n                                                                maxDuration:(NSTimeInterval)maxDuration\n                                                                      toURL:(NSURL *)URL\n                                                               deviceFormat:(AVCaptureDeviceFormat *)deviceFormat\n                                                                orientation:(AVCaptureVideoOrientation)videoOrientation\n                                                           captureSessionID:(NSString *)captureSessionID\n{\n    SCTraceStart();\n    _captureSessionID = [captureSessionID copy];\n    [_capturerLogger prepareForStartingLog];\n\n    [[SCLogger sharedInstance] logTimedEventStart:kSCCameraMetricsAudioDelay\n                                         uniqueId:_captureSessionID\n                                    isUniqueEvent:NO];\n\n    NSTimeInterval startTime = CACurrentMediaTime();\n    [[SCLogger sharedInstance] logPreCaptureOperationRequestedAt:startTime];\n    [[SCCoreCameraLogger sharedInstance] logCameraCreationDelaySplitPointPreCaptureOperationRequested];\n    _sessionId = arc4random();\n\n    // Set a invalid time so that we don't process videos when no frame available\n    _startSessionTime = kCMTimeInvalid;\n    _endSessionTime = kCMTimeInvalid;\n    _firstWrittenAudioBufferDelay = kCMTimeInvalid;\n    _audioQueueStarted = NO;\n\n    SCLogVideoCapturerInfo(@\"SCVideoCaptureSessionInfo at start of recording: %@\",\n                           SCVideoCaptureSessionInfoGetDebugDescription(self.activeSession));\n\n    SCVideoCaptureSessionInfo sessionInfo = self.activeSession;\n    [_performer performImmediatelyIfCurrentPerformer:^{\n        _maxDuration = maxDuration;\n        dispatch_block_t startRecordingBlock = ^{\n            _rawDataFrameNum = 0;\n            // Begin audio recording asynchronously, first, need to have correct audio session.\n            SCTraceStart();\n            SCLogVideoCapturerInfo(@\"Dequeue begin recording with audio session change delay: %lf seconds\",\n                                   CACurrentMediaTime() - startTime);\n            if (self.status != SCManagedVideoCapturerStatusReadyForRecording) {\n                SCLogVideoCapturerInfo(@\"SCManagedVideoCapturer status: %lu\", (unsigned long)self.status);\n                // We may already released, but this should be OK.\n                [SCAudioSessionExperimentAdapter relinquishConfiguration:_preparedAudioConfiguration\n                                                               performer:nil\n                                                              completion:nil];\n                return;\n            }\n            if (_preparedAudioConfiguration != _audioConfiguration) {\n                SCLogVideoCapturerInfo(\n                    @\"SCManagedVideoCapturer has mismatched audio session token, prepared: %@, have: %@\",\n                    _preparedAudioConfiguration.token, _audioConfiguration.token);\n                // We are on a different audio session token already.\n                [SCAudioSessionExperimentAdapter relinquishConfiguration:_preparedAudioConfiguration\n                                                               performer:nil\n                                                              completion:nil];\n                return;\n            }\n\n            // Divide start recording workflow into different steps to log delay time.\n            // And checkpoint is the end of a step\n            [_capturerLogger logStartingStep:kSCCapturerStartingStepAudioSession];\n            [[SCLogger sharedInstance] logStepToEvent:kSCCameraMetricsAudioDelay\n                                             uniqueId:_captureSessionID\n                                             stepName:@\"audio_session_start_end\"];\n\n            SCLogVideoCapturerInfo(@\"Prepare to begin recording\");\n            _lastError = nil;\n\n            // initialize stopTime to a number much larger than the CACurrentMediaTime() which is the time from Jan 1,\n            // 2001\n            _stopTime = kCFAbsoluteTimeIntervalSince1970;\n\n            // Restart everything\n            _hasWritten = NO;\n\n            SCManagedVideoCapturerOutputSettings *finalOutputSettings =\n                outputSettings ? outputSettings : [self defaultRecordingOutputSettingsWithDeviceFormat:deviceFormat];\n            _isVideoSnap = finalOutputSettings.outputType == SCManagedVideoCapturerOutputTypeVideoSnap;\n            _outputSize = CGSizeMake(finalOutputSettings.height, finalOutputSettings.width);\n            [[SCLogger sharedInstance] logEvent:kSCCameraMetricsVideoRecordingStart\n                                     parameters:@{\n                                         @\"video_width\" : @(finalOutputSettings.width),\n                                         @\"video_height\" : @(finalOutputSettings.height),\n                                         @\"bit_rate\" : @(finalOutputSettings.videoBitRate),\n                                         @\"is_video_snap\" : @(_isVideoSnap),\n                                     }];\n\n            _outputURL = [URL copy];\n            _rawDataURL = [_outputURL URLByAppendingPathExtension:@\"dat\"];\n            [_capturerLogger logStartingStep:kSCCapturerStartingStepOutputSettings];\n\n            // Make sure the raw frame data file is gone\n            SCTraceSignal(@\"Setup video frame raw data\");\n            [[NSFileManager defaultManager] removeItemAtURL:_rawDataURL error:NULL];\n            if ([SnapVideoMetadata deviceMeetsRequirementsForContentAdaptiveVideoEncoding]) {\n                if (!_videoFrameRawDataCollector) {\n                    _videoFrameRawDataCollector = [[SCVideoFrameRawDataCollector alloc] initWithPerformer:_performer];\n                }\n                [_videoFrameRawDataCollector prepareForCollectingVideoFrameRawDataWithRawDataURL:_rawDataURL];\n            }\n            [_capturerLogger logStartingStep:kSCCapturerStartingStepVideoFrameRawData];\n\n            SCLogVideoCapturerInfo(@\"Prepare to begin audio recording\");\n\n            [[SCLogger sharedInstance] logStepToEvent:kSCCameraMetricsAudioDelay\n                                             uniqueId:_captureSessionID\n                                             stepName:@\"audio_queue_start_begin\"];\n            [self _beginAudioQueueRecordingWithCompleteHandler:^(NSError *error) {\n                [[SCLogger sharedInstance] logStepToEvent:kSCCameraMetricsAudioDelay\n                                                 uniqueId:_captureSessionID\n                                                 stepName:@\"audio_queue_start_end\"];\n                if (error) {\n                    [_delegate managedVideoCapturer:self\n                                        didGetError:error\n                                            forType:SCManagedVideoCapturerInfoAudioQueueError\n                                            session:sessionInfo];\n                } else {\n                    _audioQueueStarted = YES;\n                }\n                if (self.status == SCManagedVideoCapturerStatusRecording) {\n                    [_delegate managedVideoCapturer:self didBeginAudioRecording:sessionInfo];\n                }\n            }];\n\n            // Call this delegate first so that we have proper state transition from begin recording to finish / error\n            [_delegate managedVideoCapturer:self didBeginVideoRecording:sessionInfo];\n\n            // We need to start with a fresh recording file, make sure it's gone\n            [[NSFileManager defaultManager] removeItemAtURL:_outputURL error:NULL];\n            [_capturerLogger logStartingStep:kSCCapturerStartingStepAudioRecording];\n\n            SCTraceSignal(@\"Setup asset writer\");\n\n            NSError *error = nil;\n            _videoWriter = [[SCCapturerBufferedVideoWriter alloc] initWithPerformer:_performer\n                                                                          outputURL:self.outputURL\n                                                                           delegate:self\n                                                                              error:&error];\n            if (error) {\n                self.status = SCManagedVideoCapturerStatusError;\n                _lastError = error;\n                _placeholderImage = nil;\n                [_delegate managedVideoCapturer:self\n                                    didGetError:error\n                                        forType:SCManagedVideoCapturerInfoAssetWriterError\n                                        session:sessionInfo];\n                [_delegate managedVideoCapturer:self didFailWithError:_lastError session:sessionInfo];\n                return;\n            }\n\n            [_capturerLogger logStartingStep:kSCCapturerStartingStepAssetWriterConfiguration];\n            if (![_videoWriter prepareWritingWithOutputSettings:finalOutputSettings]) {\n                _lastError = [NSError errorWithDomain:kSCManagedVideoCapturerErrorDomain\n                                                 code:kSCManagedVideoCapturerCannotAddAudioVideoInput\n                                             userInfo:nil];\n                _placeholderImage = nil;\n                [_delegate managedVideoCapturer:self didFailWithError:_lastError session:sessionInfo];\n                return;\n            }\n            SCTraceSignal(@\"Observe asset writer status change\");\n            SCCAssert(_placeholderImage == nil, @\"placeholderImage should be nil\");\n            self.status = SCManagedVideoCapturerStatusRecording;\n            // Only log the recording delay event from camera view (excluding video note recording)\n            if (_isVideoSnap) {\n                [[SCLogger sharedInstance] logTimedEventEnd:kSCCameraMetricsRecordingDelay\n                                                   uniqueId:@\"VIDEO\"\n                                                 parameters:@{\n                                                     @\"type\" : @\"video\"\n                                                 }];\n            }\n            _recordStartTime = CACurrentMediaTime();\n        };\n\n        [[SCLogger sharedInstance] logStepToEvent:kSCCameraMetricsAudioDelay\n                                         uniqueId:_captureSessionID\n                                         stepName:@\"audio_session_start_begin\"];\n\n        if (self.status == SCManagedVideoCapturerStatusPrepareToRecord) {\n            self.status = SCManagedVideoCapturerStatusReadyForRecording;\n            startRecordingBlock();\n        } else {\n            self.status = SCManagedVideoCapturerStatusReadyForRecording;\n            if (_audioConfiguration) {\n                [SCAudioSessionExperimentAdapter relinquishConfiguration:_audioConfiguration\n                                                               performer:nil\n                                                              completion:nil];\n            }\n            _preparedAudioConfiguration = _audioConfiguration = [SCAudioSessionExperimentAdapter\n                configureWith:audioConfiguration\n                    performer:_performer\n                   completion:^(NSError *error) {\n                       if (error) {\n                           [_delegate managedVideoCapturer:self\n                                               didGetError:error\n                                                   forType:SCManagedVideoCapturerInfoAudioSessionError\n                                                   session:sessionInfo];\n                       }\n                       startRecordingBlock();\n                   }];\n        }\n    }];\n    return sessionInfo;\n}\n\n- (NSError *)_handleRetryBeginAudioRecordingErrorCode:(NSInteger)errorCode\n                                                error:(NSError *)error\n                                            micResult:(NSDictionary *)resultInfo\n{\n    SCTraceStart();\n    NSString *resultStr = SC_CAST_TO_CLASS_OR_NIL(resultInfo[SCAudioSessionRetryDataSourceInfoKey], NSString);\n    BOOL changeMicSuccess = [resultInfo[SCAudioSessionRetryDataSourceResultKey] boolValue];\n    if (!error) {\n        SCManagedVideoCapturerInfoType type = SCManagedVideoCapturerInfoAudioQueueRetrySuccess;\n        if (changeMicSuccess) {\n            if (errorCode == kSCAudioQueueErrorWildCard) {\n                type = SCManagedVideoCapturerInfoAudioQueueRetryDataSourceSuccess_audioQueue;\n            } else if (errorCode == kSCAudioQueueErrorHardware) {\n                type = SCManagedVideoCapturerInfoAudioQueueRetryDataSourceSuccess_hardware;\n            }\n        }\n        [_delegate managedVideoCapturer:self didGetError:nil forType:type session:self.activeSession];\n    } else {\n        error = [self _appendInfo:resultStr forInfoKey:@\"retry_datasource_result\" toError:error];\n        SCLogVideoCapturerError(@\"Retry setting audio session failed with error:%@\", error);\n    }\n    return error;\n}\n\n- (BOOL)_isBottomMicBrokenCode:(NSInteger)errorCode\n{\n    // we consider both -50 and 1852797029 as a broken microphone case\n    return (errorCode == kSCAudioQueueErrorWildCard || errorCode == kSCAudioQueueErrorHardware);\n}\n\n- (void)_beginAudioQueueRecordingWithCompleteHandler:(audio_capture_session_block)block\n{\n    SCTraceStart();\n    SCAssert(block, @\"block can not be nil\");\n    @weakify(self);\n    void (^beginAudioBlock)(NSError *error) = ^(NSError *error) {\n        @strongify(self);\n        SC_GUARD_ELSE_RETURN(self);\n        [_performer performImmediatelyIfCurrentPerformer:^{\n\n            SCTraceStart();\n            NSInteger errorCode = error.code;\n            if ([self _isBottomMicBrokenCode:errorCode] &&\n                (self.status == SCManagedVideoCapturerStatusReadyForRecording ||\n                 self.status == SCManagedVideoCapturerStatusRecording)) {\n\n                SCLogVideoCapturerError(@\"Start to retry begin audio queue (error code: %@)\", @(errorCode));\n\n                // use front microphone to retry\n                NSDictionary *resultInfo = [[SCAudioSession sharedInstance] tryUseFrontMicWithErrorCode:errorCode];\n                [self _retryRequestRecordingWithCompleteHandler:^(NSError *error) {\n                    // then retry audio queue again\n                    [_audioCaptureSession\n                        beginAudioRecordingAsynchronouslyWithSampleRate:kSCAudioCaptureSessionDefaultSampleRate\n                                                      completionHandler:^(NSError *innerError) {\n                                                          NSError *modifyError = [self\n                                                              _handleRetryBeginAudioRecordingErrorCode:errorCode\n                                                                                                 error:innerError\n                                                                                             micResult:resultInfo];\n                                                          block(modifyError);\n                                                      }];\n                }];\n\n            } else {\n                block(error);\n            }\n        }];\n    };\n    [_audioCaptureSession beginAudioRecordingAsynchronouslyWithSampleRate:kSCAudioCaptureSessionDefaultSampleRate\n                                                        completionHandler:^(NSError *error) {\n                                                            beginAudioBlock(error);\n                                                        }];\n}\n\n// This method must not change nullability of error, it should only either append info into userInfo,\n// or return the NSError as it is.\n- (NSError *)_appendInfo:(NSString *)infoStr forInfoKey:(NSString *)infoKey toError:(NSError *)error\n{\n    if (!error || infoStr.length == 0 || infoKey.length == 0 || error.domain.length == 0) {\n        return error;\n    }\n    NSMutableDictionary *errorInfo = [[error userInfo] mutableCopy];\n    errorInfo[infoKey] = infoStr.length > 0 ? infoStr : @\"(null)\";\n\n    return [NSError errorWithDomain:error.domain code:error.code userInfo:errorInfo];\n}\n\n- (void)_retryRequestRecordingWithCompleteHandler:(audio_capture_session_block)block\n{\n    SCTraceStart();\n    if (_audioConfiguration) {\n        [SCAudioSessionExperimentAdapter relinquishConfiguration:_audioConfiguration performer:nil completion:nil];\n    }\n    SCVideoCaptureSessionInfo sessionInfo = self.activeSession;\n    _preparedAudioConfiguration = _audioConfiguration = [SCAudioSessionExperimentAdapter\n        configureWith:_audioConfiguration.configuration\n            performer:_performer\n           completion:^(NSError *error) {\n               if (error) {\n                   [_delegate managedVideoCapturer:self\n                                       didGetError:error\n                                           forType:SCManagedVideoCapturerInfoAudioSessionError\n                                           session:sessionInfo];\n               }\n               if (block) {\n                   block(error);\n               }\n           }];\n}\n\n#pragma SCCapturerBufferedVideoWriterDelegate\n\n- (void)videoWriterDidFailWritingWithError:(NSError *)error\n{\n    // If it failed, we call the delegate method, release everything else we\n    // have, well, on the output queue obviously\n    SCTraceStart();\n    [_performer performImmediatelyIfCurrentPerformer:^{\n        SCTraceStart();\n        SCVideoCaptureSessionInfo sessionInfo = self.activeSession;\n        [_outputURL reloadAssetKeys];\n        [self _cleanup];\n        [self _disposeAudioRecording];\n        self.status = SCManagedVideoCapturerStatusError;\n        _lastError = error;\n        _placeholderImage = nil;\n        [_delegate managedVideoCapturer:self\n                            didGetError:error\n                                forType:SCManagedVideoCapturerInfoAssetWriterError\n                                session:sessionInfo];\n        [_delegate managedVideoCapturer:self didFailWithError:_lastError session:sessionInfo];\n    }];\n}\n\n- (void)_willStopRecording\n{\n    if (self.status == SCManagedVideoCapturerStatusRecording) {\n        // To notify UI continue the preview processing\n        SCQueuePerformer *promisePerformer =\n            [[SCQueuePerformer alloc] initWithLabel:kSCManagedVideoCapturerPromiseQueueLabel\n                                   qualityOfService:QOS_CLASS_USER_INTERACTIVE\n                                          queueType:DISPATCH_QUEUE_SERIAL\n                                            context:SCQueuePerformerContextCamera];\n        _recordedVideoPromise = [[SCPromise alloc] initWithPerformer:promisePerformer];\n        [_delegate managedVideoCapturer:self\n            willStopWithRecordedVideoFuture:_recordedVideoPromise.future\n                                  videoSize:_outputSize\n                           placeholderImage:_placeholderImage\n                                    session:self.activeSession];\n    }\n}\n\n- (void)_stopRecording\n{\n    SCTraceStart();\n    SCAssert([_performer isCurrentPerformer], @\"Needs to be on the performing queue\");\n    // Reset stop session as well as stop time.\n    ++_stopSession;\n    _stopTime = kCFAbsoluteTimeIntervalSince1970;\n    SCPromise<id<SCManagedRecordedVideo>> *recordedVideoPromise = _recordedVideoPromise;\n    _recordedVideoPromise = nil;\n    sc_managed_capturer_recording_session_t sessionId = _sessionId;\n    if (self.status == SCManagedVideoCapturerStatusRecording) {\n        self.status = SCManagedVideoCapturerStatusIdle;\n        if (CMTIME_IS_VALID(_endSessionTime)) {\n            [_videoWriter\n                finishWritingAtSourceTime:_endSessionTime\n                    withCompletionHanlder:^{\n                        // actually, make sure everything happens on outputQueue\n                        [_performer performImmediatelyIfCurrentPerformer:^{\n                            if (sessionId != _sessionId) {\n                                SCLogVideoCapturerError(@\"SessionId mismatch: before: %@, after: %@\", @(sessionId),\n                                                        @(_sessionId));\n                                return;\n                            }\n                            [self _disposeAudioRecording];\n                            // Log the video snap recording success event w/ parameters, not including video\n                            // note\n                            if (_isVideoSnap) {\n                                [SnapVideoMetadata logVideoEvent:kSCCameraMetricsVideoRecordingSuccess\n                                                   videoSettings:_videoOutputSettings\n                                                          isSave:NO];\n                            }\n                            void (^stopRecordingCompletionBlock)(NSURL *) = ^(NSURL *rawDataURL) {\n                                SCAssert([_performer isCurrentPerformer], @\"Needs to be on the performing queue\");\n                                SCVideoCaptureSessionInfo sessionInfo = self.activeSession;\n\n                                [self _cleanup];\n\n                                [[SCLogger sharedInstance] logTimedEventStart:@\"SNAP_VIDEO_SIZE_LOADING\"\n                                                                     uniqueId:@\"\"\n                                                                isUniqueEvent:NO];\n                                CGSize videoSize =\n                                    [SnapVideoMetadata videoSizeForURL:_outputURL waitWhileLoadingTracksIfNeeded:YES];\n                                [[SCLogger sharedInstance] logTimedEventEnd:@\"SNAP_VIDEO_SIZE_LOADING\"\n                                                                   uniqueId:@\"\"\n                                                                 parameters:nil];\n                                // Log error if video file is not really ready\n                                if (videoSize.width == 0.0 || videoSize.height == 0.0) {\n                                    _lastError = [NSError errorWithDomain:kSCManagedVideoCapturerErrorDomain\n                                                                     code:kSCManagedVideoCapturerZeroVideoSize\n                                                                 userInfo:nil];\n                                    [recordedVideoPromise completeWithError:_lastError];\n                                    [_delegate managedVideoCapturer:self\n                                                   didFailWithError:_lastError\n                                                            session:sessionInfo];\n                                    _placeholderImage = nil;\n                                    return;\n                                }\n                                // If the video duration is too short, the future object will complete\n                                // with error as well\n                                SCManagedRecordedVideo *recordedVideo =\n                                    [[SCManagedRecordedVideo alloc] initWithVideoURL:_outputURL\n                                                                 rawVideoDataFileURL:_rawDataURL\n                                                                    placeholderImage:_placeholderImage\n                                                                 isFrontFacingCamera:_isFrontFacingCamera];\n                                [recordedVideoPromise completeWithValue:recordedVideo];\n                                [_delegate managedVideoCapturer:self\n                                    didSucceedWithRecordedVideo:recordedVideo\n                                                        session:sessionInfo];\n                                _placeholderImage = nil;\n                            };\n\n                            if (_videoFrameRawDataCollector) {\n                                [_videoFrameRawDataCollector\n                                    drainFrameDataCollectionWithCompletionHandler:^(NSURL *rawDataURL) {\n                                        stopRecordingCompletionBlock(rawDataURL);\n                                    }];\n                            } else {\n                                stopRecordingCompletionBlock(nil);\n                            }\n                        }];\n                    }];\n\n        } else {\n            [self _disposeAudioRecording];\n            SCVideoCaptureSessionInfo sessionInfo = self.activeSession;\n            [self _cleanup];\n            self.status = SCManagedVideoCapturerStatusError;\n            _lastError = [NSError errorWithDomain:kSCManagedVideoCapturerErrorDomain\n                                             code:kSCManagedVideoCapturerEmptyFrame\n                                         userInfo:nil];\n            _placeholderImage = nil;\n            [recordedVideoPromise completeWithError:_lastError];\n            [_delegate managedVideoCapturer:self didFailWithError:_lastError session:sessionInfo];\n        }\n    } else {\n        if (self.status == SCManagedVideoCapturerStatusPrepareToRecord ||\n            self.status == SCManagedVideoCapturerStatusReadyForRecording) {\n            _lastError = [NSError errorWithDomain:kSCManagedVideoCapturerErrorDomain\n                                             code:kSCManagedVideoCapturerStopBeforeStart\n                                         userInfo:nil];\n        } else {\n            _lastError = [NSError errorWithDomain:kSCManagedVideoCapturerErrorDomain\n                                             code:kSCManagedVideoCapturerStopWithoutStart\n                                         userInfo:nil];\n        }\n        SCVideoCaptureSessionInfo sessionInfo = self.activeSession;\n        [self _cleanup];\n        _placeholderImage = nil;\n        if (_audioConfiguration) {\n            [SCAudioSessionExperimentAdapter relinquishConfiguration:_audioConfiguration performer:nil completion:nil];\n            _audioConfiguration = nil;\n        }\n        [recordedVideoPromise completeWithError:_lastError];\n        [_delegate managedVideoCapturer:self didFailWithError:_lastError session:sessionInfo];\n        self.status = SCManagedVideoCapturerStatusIdle;\n        [_capturerLogger logEventIfStartingTooSlow];\n    }\n}\n\n- (void)stopRecordingAsynchronously\n{\n    SCTraceStart();\n    NSTimeInterval stopTime = CACurrentMediaTime();\n    [_performer performImmediatelyIfCurrentPerformer:^{\n        _stopTime = stopTime;\n        NSInteger stopSession = _stopSession;\n        [self _willStopRecording];\n        [_performer perform:^{\n            // If we haven't stopped yet, call the stop now nevertheless.\n            if (stopSession == _stopSession) {\n                [self _stopRecording];\n            }\n        }\n                      after:kSCManagedVideoCapturerStopRecordingDeadline];\n    }];\n}\n\n- (void)cancelRecordingAsynchronously\n{\n    SCTraceStart();\n    [_performer performImmediatelyIfCurrentPerformer:^{\n        SCTraceStart();\n        SCLogVideoCapturerInfo(@\"Cancel recording. status: %lu\", (unsigned long)self.status);\n        if (self.status == SCManagedVideoCapturerStatusRecording) {\n            self.status = SCManagedVideoCapturerStatusIdle;\n            [self _disposeAudioRecording];\n            [_videoWriter cancelWriting];\n            SCVideoCaptureSessionInfo sessionInfo = self.activeSession;\n            [self _cleanup];\n            _placeholderImage = nil;\n            [_delegate managedVideoCapturer:self didCancelVideoRecording:sessionInfo];\n        } else if ((self.status == SCManagedVideoCapturerStatusPrepareToRecord) ||\n                   (self.status == SCManagedVideoCapturerStatusReadyForRecording)) {\n            SCVideoCaptureSessionInfo sessionInfo = self.activeSession;\n            [self _cleanup];\n            self.status = SCManagedVideoCapturerStatusIdle;\n            _placeholderImage = nil;\n            if (_audioConfiguration) {\n                [SCAudioSessionExperimentAdapter relinquishConfiguration:_audioConfiguration\n                                                               performer:nil\n                                                              completion:nil];\n                _audioConfiguration = nil;\n            }\n            [_delegate managedVideoCapturer:self didCancelVideoRecording:sessionInfo];\n        }\n        [_capturerLogger logEventIfStartingTooSlow];\n    }];\n}\n\n- (void)addTimedTask:(SCTimedTask *)task\n{\n    [_performer performImmediatelyIfCurrentPerformer:^{\n        // Only allow to add observers when we are not recording.\n        if (!self->_timeObserver) {\n            self->_timeObserver = [SCManagedVideoCapturerTimeObserver new];\n        }\n        [self->_timeObserver addTimedTask:task];\n        SCLogVideoCapturerInfo(@\"Added timetask: %@\", task);\n    }];\n}\n\n- (void)clearTimedTasks\n{\n    // _timeObserver will be initialized lazily when adding timed tasks\n    SCLogVideoCapturerInfo(@\"Clearing time observer\");\n    [_performer performImmediatelyIfCurrentPerformer:^{\n        if (self->_timeObserver) {\n            self->_timeObserver = nil;\n        }\n    }];\n}\n\n- (void)_cleanup\n{\n    [_videoWriter cleanUp];\n    _timeObserver = nil;\n\n    SCLogVideoCapturerInfo(@\"SCVideoCaptureSessionInfo before cleanup: %@\",\n                           SCVideoCaptureSessionInfoGetDebugDescription(self.activeSession));\n\n    _startSessionTime = kCMTimeInvalid;\n    _endSessionTime = kCMTimeInvalid;\n    _firstWrittenAudioBufferDelay = kCMTimeInvalid;\n    _sessionId = 0;\n    _captureSessionID = nil;\n    _audioQueueStarted = NO;\n}\n\n- (void)_disposeAudioRecording\n{\n    SCLogVideoCapturerInfo(@\"Disposing audio recording\");\n    SCAssert([_performer isCurrentPerformer], @\"\");\n    // Setup the audio session token correctly\n    SCAudioConfigurationToken *audioConfiguration = _audioConfiguration;\n    [[SCLogger sharedInstance] logStepToEvent:kSCCameraMetricsAudioDelay\n                                     uniqueId:_captureSessionID\n                                     stepName:@\"audio_queue_stop_begin\"];\n    NSString *captureSessionID = _captureSessionID;\n    [_audioCaptureSession disposeAudioRecordingSynchronouslyWithCompletionHandler:^{\n        [[SCLogger sharedInstance] logStepToEvent:kSCCameraMetricsAudioDelay\n                                         uniqueId:captureSessionID\n                                         stepName:@\"audio_queue_stop_end\"];\n        SCLogVideoCapturerInfo(@\"Did dispose audio recording\");\n        if (audioConfiguration) {\n            [[SCLogger sharedInstance] logStepToEvent:kSCCameraMetricsAudioDelay\n                                             uniqueId:captureSessionID\n                                             stepName:@\"audio_session_stop_begin\"];\n            [SCAudioSessionExperimentAdapter\n                relinquishConfiguration:audioConfiguration\n                              performer:_performer\n                             completion:^(NSError *_Nullable error) {\n                                 [[SCLogger sharedInstance] logStepToEvent:kSCCameraMetricsAudioDelay\n                                                                  uniqueId:captureSessionID\n                                                                  stepName:@\"audio_session_stop_end\"];\n                                 [[SCLogger sharedInstance] logTimedEventEnd:kSCCameraMetricsAudioDelay\n                                                                    uniqueId:captureSessionID\n                                                                  parameters:nil];\n                             }];\n        }\n    }];\n    _audioConfiguration = nil;\n}\n\n- (CIContext *)ciContext\n{\n    if (!_ciContext) {\n        _ciContext = [CIContext contextWithOptions:nil];\n    }\n    return _ciContext;\n}\n\n#pragma mark - SCAudioCaptureSessionDelegate\n\n- (void)audioCaptureSession:(SCAudioCaptureSession *)audioCaptureSession\n      didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer\n{\n    SCTraceStart();\n    if (self.status != SCManagedVideoCapturerStatusRecording) {\n        return;\n    }\n    CFRetain(sampleBuffer);\n    [_performer performImmediatelyIfCurrentPerformer:^{\n        if (self.status == SCManagedVideoCapturerStatusRecording) {\n            // Audio always follows video, there is no other way around this :)\n            if (_hasWritten && CACurrentMediaTime() - _recordStartTime <= _maxDuration) {\n                [self _processAudioSampleBuffer:sampleBuffer];\n                [_videoWriter appendAudioSampleBuffer:sampleBuffer];\n            }\n        }\n        CFRelease(sampleBuffer);\n    }];\n}\n\n#pragma mark - SCManagedVideoDataSourceListener\n\n- (void)managedVideoDataSource:(id<SCManagedVideoDataSource>)managedVideoDataSource\n         didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer\n                devicePosition:(SCManagedCaptureDevicePosition)devicePosition\n{\n    SCTraceStart();\n    if (self.status != SCManagedVideoCapturerStatusRecording) {\n        return;\n    }\n    CFRetain(sampleBuffer);\n    [_performer performImmediatelyIfCurrentPerformer:^{\n        // the following check will allow the capture pipeline to drain\n        if (CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) > _stopTime) {\n            [self _stopRecording];\n        } else {\n            if (self.status == SCManagedVideoCapturerStatusRecording) {\n                _isFrontFacingCamera = (devicePosition == SCManagedCaptureDevicePositionFront);\n                CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);\n                if (CMTIME_IS_VALID(presentationTime)) {\n                    SCLogVideoCapturerInfo(@\"Obtained video data source at time %lld\", presentationTime.value);\n                } else {\n                    SCLogVideoCapturerInfo(@\"Obtained video data source with an invalid time\");\n                }\n                if (!_hasWritten) {\n                    // Start writing!\n                    [_videoWriter startWritingAtSourceTime:presentationTime];\n                    [_capturerLogger endLoggingForStarting];\n                    _startSessionTime = presentationTime;\n                    _startSessionRealTime = CACurrentMediaTime();\n                    SCLogVideoCapturerInfo(@\"First frame processed %f seconds after presentation Time\",\n                                           _startSessionRealTime - CMTimeGetSeconds(presentationTime));\n                    _hasWritten = YES;\n                    [[SCLogger sharedInstance] logPreCaptureOperationFinishedAt:CMTimeGetSeconds(presentationTime)];\n                    [[SCCoreCameraLogger sharedInstance]\n                        logCameraCreationDelaySplitPointPreCaptureOperationFinishedAt:CMTimeGetSeconds(\n                                                                                          presentationTime)];\n                    SCLogVideoCapturerInfo(@\"SCVideoCaptureSessionInfo after first frame: %@\",\n                                           SCVideoCaptureSessionInfoGetDebugDescription(self.activeSession));\n                }\n                // Only respect video end session time, audio can be cut off, not video,\n                // not video\n                if (CMTIME_IS_INVALID(_endSessionTime)) {\n                    _endSessionTime = presentationTime;\n                } else {\n                    _endSessionTime = CMTimeMaximum(_endSessionTime, presentationTime);\n                }\n                if (CACurrentMediaTime() - _recordStartTime <= _maxDuration) {\n                    [_videoWriter appendVideoSampleBuffer:sampleBuffer];\n                    [self _processVideoSampleBuffer:sampleBuffer];\n                }\n                if (_timeObserver) {\n                    [_timeObserver processTime:CMTimeSubtract(presentationTime, _startSessionTime)\n                        sessionStartTimeDelayInSecond:_startSessionRealTime - CMTimeGetSeconds(_startSessionTime)];\n                }\n            }\n        }\n        CFRelease(sampleBuffer);\n    }];\n}\n\n- (void)_generatePlaceholderImageWithPixelBuffer:(CVImageBufferRef)pixelBuffer metaData:(NSDictionary *)metadata\n{\n    SCTraceStart();\n    CVImageBufferRef imageBuffer = CVPixelBufferRetain(pixelBuffer);\n    if (imageBuffer) {\n        dispatch_async(SCPlaceholderImageGenerationQueue(), ^{\n            UIImage *placeholderImage = [UIImage imageWithPixelBufferRef:imageBuffer\n                                                             backingType:UIImageBackingTypeCGImage\n                                                             orientation:UIImageOrientationRight\n                                                                 context:[self ciContext]];\n            placeholderImage =\n                SCCropImageToTargetAspectRatio(placeholderImage, SCManagedCapturedImageAndVideoAspectRatio());\n            [_performer performImmediatelyIfCurrentPerformer:^{\n                // After processing, assign it back.\n                if (self.status == SCManagedVideoCapturerStatusRecording) {\n                    _placeholderImage = placeholderImage;\n                    // Check video frame health by placeholder image\n                    [[SCManagedFrameHealthChecker sharedInstance]\n                        checkVideoHealthForCaptureFrameImage:placeholderImage\n                                                    metedata:metadata\n                                            captureSessionID:_captureSessionID];\n                }\n                CVPixelBufferRelease(imageBuffer);\n            }];\n        });\n    }\n}\n\n#pragma mark - Pixel Buffer methods\n\n- (void)_processVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer\n{\n    SC_GUARD_ELSE_RETURN(sampleBuffer);\n    CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);\n    BOOL shouldGeneratePlaceholderImage = CMTimeCompare(presentationTime, _startSessionTime) == 0;\n\n    CVImageBufferRef outputPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);\n    if (outputPixelBuffer) {\n        [self _addVideoRawDataWithPixelBuffer:outputPixelBuffer];\n        if (shouldGeneratePlaceholderImage) {\n            NSDictionary *extraInfo = [_delegate managedVideoCapturerGetExtraFrameHealthInfo:self];\n            NSDictionary *metadata =\n                [[[SCManagedFrameHealthChecker sharedInstance] metadataForSampleBuffer:sampleBuffer extraInfo:extraInfo]\n                    copy];\n            [self _generatePlaceholderImageWithPixelBuffer:outputPixelBuffer metaData:metadata];\n        }\n    }\n\n    [_delegate managedVideoCapturer:self\n         didAppendVideoSampleBuffer:sampleBuffer\n              presentationTimestamp:CMTimeSubtract(presentationTime, _startSessionTime)];\n}\n\n- (void)_processAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer\n{\n    [_announcer managedAudioDataSource:self didOutputSampleBuffer:sampleBuffer];\n    if (!CMTIME_IS_VALID(self.firstWrittenAudioBufferDelay)) {\n        self.firstWrittenAudioBufferDelay =\n            CMTimeSubtract(CMSampleBufferGetPresentationTimeStamp(sampleBuffer), _startSessionTime);\n    }\n}\n\n- (void)_addVideoRawDataWithPixelBuffer:(CVImageBufferRef)pixelBuffer\n{\n    if (_videoFrameRawDataCollector && [SnapVideoMetadata deviceMeetsRequirementsForContentAdaptiveVideoEncoding] &&\n        ((_rawDataFrameNum % kSCVideoContentComplexitySamplingRate) == 0) && (_rawDataFrameNum > 0)) {\n        if (_videoFrameRawDataCollector) {\n            CVImageBufferRef imageBuffer = CVPixelBufferRetain(pixelBuffer);\n            [_videoFrameRawDataCollector collectVideoFrameRawDataWithImageBuffer:imageBuffer\n                                                                        frameNum:_rawDataFrameNum\n                                                                      completion:^{\n                                                                          CVPixelBufferRelease(imageBuffer);\n                                                                      }];\n        }\n    }\n    _rawDataFrameNum++;\n}\n\n#pragma mark - SCManagedAudioDataSource\n\n- (void)addListener:(id<SCManagedAudioDataSourceListener>)listener\n{\n    [_announcer addListener:listener];\n}\n\n- (void)removeListener:(id<SCManagedAudioDataSourceListener>)listener\n{\n    [_announcer removeListener:listener];\n}\n\n- (void)startStreamingWithAudioConfiguration:(SCAudioConfiguration *)configuration\n{\n    SCAssertFail(@\"Controlled by recorder\");\n}\n\n- (void)stopStreaming\n{\n    SCAssertFail(@\"Controlled by recorder\");\n}\n\n- (BOOL)isStreaming\n{\n    return self.status == SCManagedVideoCapturerStatusRecording;\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedVideoCapturerHandler.h",
    "content": "//\n//  SCManagedVideoCapturerHandler.h\n//  Snapchat\n//\n//  Created by Jingtian Yang on 11/12/2017.\n//\n\n#import \"SCManagedVideoCapturer.h\"\n\n#import <Foundation/Foundation.h>\n\n@class SCCaptureResource;\n\n@interface SCManagedVideoCapturerHandler : NSObject <SCManagedVideoCapturerDelegate>\n\n- (instancetype)init NS_UNAVAILABLE;\n\n- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedVideoCapturerHandler.m",
    "content": "//\n//  SCManagedVideoCapturerHandler.m\n//  Snapchat\n//\n//  Created by Jingtian Yang on 11/12/2017.\n//\n\n#import \"SCManagedVideoCapturerHandler.h\"\n\n#import \"SCCaptureResource.h\"\n#import \"SCManagedCaptureDevice+SCManagedCapturer.h\"\n#import \"SCManagedCapturer.h\"\n#import \"SCManagedCapturerLensAPI.h\"\n#import \"SCManagedCapturerLogging.h\"\n#import \"SCManagedCapturerSampleMetadata.h\"\n#import \"SCManagedCapturerState.h\"\n#import \"SCManagedDeviceCapacityAnalyzer.h\"\n#import \"SCManagedFrontFlashController.h\"\n#import \"SCManagedVideoFileStreamer.h\"\n#import \"SCManagedVideoFrameSampler.h\"\n#import \"SCManagedVideoStreamer.h\"\n\n#import <SCCameraFoundation/SCManagedDataSource.h>\n#import <SCFoundation/SCAssertWrapper.h>\n#import <SCFoundation/SCQueuePerformer.h>\n#import <SCFoundation/SCThreadHelpers.h>\n#import <SCFoundation/SCTraceODPCompatible.h>\n\n@interface SCManagedVideoCapturerHandler () {\n    __weak SCCaptureResource *_captureResource;\n}\n@end\n\n@implementation SCManagedVideoCapturerHandler\n\n- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource\n{\n    self = [super init];\n    if (self) {\n        SCAssert(captureResource, @\"\");\n        _captureResource = captureResource;\n    }\n    return self;\n}\n\n- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer\n      didBeginVideoRecording:(SCVideoCaptureSessionInfo)sessionInfo\n{\n    SCTraceODPCompatibleStart(2);\n    SCLogCapturerInfo(@\"Did begin video recording. sessionId:%u\", sessionInfo.sessionId);\n    [_captureResource.queuePerformer perform:^{\n        SCTraceStart();\n        SCManagedCapturerState *state = [_captureResource.state copy];\n        runOnMainThreadAsynchronously(^{\n            [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]\n                                 didBeginVideoRecording:state\n                                                session:sessionInfo];\n        });\n    }];\n}\n\n- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer\n      didBeginAudioRecording:(SCVideoCaptureSessionInfo)sessionInfo\n{\n    SCTraceODPCompatibleStart(2);\n    SCLogCapturerInfo(@\"Did begin audio recording. sessionId:%u\", sessionInfo.sessionId);\n    [_captureResource.queuePerformer perform:^{\n        if ([_captureResource.fileInputDecider shouldProcessFileInput]) {\n            [_captureResource.videoDataSource startStreaming];\n        }\n        SCTraceStart();\n        SCManagedCapturerState *state = [_captureResource.state copy];\n        runOnMainThreadAsynchronously(^{\n            [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]\n                                 didBeginAudioRecording:state\n                                                session:sessionInfo];\n        });\n    }];\n}\n\n- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer\n    willStopWithRecordedVideoFuture:(SCFuture<id<SCManagedRecordedVideo>> *)recordedVideoFuture\n                          videoSize:(CGSize)videoSize\n                   placeholderImage:(UIImage *)placeholderImage\n                            session:(SCVideoCaptureSessionInfo)sessionInfo\n{\n    SCTraceODPCompatibleStart(2);\n    SCLogCapturerInfo(@\"Will stop recording. sessionId:%u placeHolderImage:%@ videoSize:(%f, %f)\",\n                      sessionInfo.sessionId, placeholderImage, videoSize.width, videoSize.height);\n    [_captureResource.queuePerformer perform:^{\n        SCTraceStart();\n        if (_captureResource.videoRecording) {\n            SCManagedCapturerState *state = [_captureResource.state copy];\n            // Then, sync back to main thread to notify will finish recording\n            runOnMainThreadAsynchronously(^{\n                [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]\n                                        willFinishRecording:state\n                                                    session:sessionInfo\n                                        recordedVideoFuture:recordedVideoFuture\n                                                  videoSize:videoSize\n                                           placeholderImage:placeholderImage];\n            });\n        }\n    }];\n}\n\n- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer\n didSucceedWithRecordedVideo:(SCManagedRecordedVideo *)recordedVideo\n                     session:(SCVideoCaptureSessionInfo)sessionInfo\n{\n    SCTraceODPCompatibleStart(2);\n    SCLogCapturerInfo(@\"Did succeed recording. sessionId:%u recordedVideo:%@\", sessionInfo.sessionId, recordedVideo);\n    [_captureResource.queuePerformer perform:^{\n        SCTraceStart();\n        if (_captureResource.videoRecording) {\n            [self _videoRecordingCleanup];\n            SCManagedCapturerState *state = [_captureResource.state copy];\n            // Then, sync back to main thread to notify the finish recording\n            runOnMainThreadAsynchronously(^{\n                [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]\n                                         didFinishRecording:state\n                                                    session:sessionInfo\n                                              recordedVideo:recordedVideo];\n            });\n        }\n    }];\n}\n\n- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer\n            didFailWithError:(NSError *)error\n                     session:(SCVideoCaptureSessionInfo)sessionInfo\n{\n    SCTraceODPCompatibleStart(2);\n    SCLogCapturerInfo(@\"Did fail recording. sessionId:%u\", sessionInfo.sessionId);\n    [_captureResource.queuePerformer perform:^{\n        SCTraceStart();\n        if (_captureResource.videoRecording) {\n            [self _videoRecordingCleanup];\n            SCManagedCapturerState *state = [_captureResource.state copy];\n            runOnMainThreadAsynchronously(^{\n                [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]\n                                           didFailRecording:state\n                                                    session:sessionInfo\n                                                      error:error];\n            });\n        }\n    }];\n}\n\n- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer\n     didCancelVideoRecording:(SCVideoCaptureSessionInfo)sessionInfo\n{\n    SCTraceODPCompatibleStart(2);\n    SCLogCapturerInfo(@\"Did cancel recording. sessionId:%u\", sessionInfo.sessionId);\n    [_captureResource.queuePerformer perform:^{\n        SCTraceStart();\n        if (_captureResource.videoRecording) {\n            [self _videoRecordingCleanup];\n            SCManagedCapturerState *state = [_captureResource.state copy];\n            runOnMainThreadAsynchronously(^{\n                [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]\n                                         didCancelRecording:state\n                                                    session:sessionInfo];\n            });\n        }\n    }];\n}\n\n- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer\n                 didGetError:(NSError *)error\n                     forType:(SCManagedVideoCapturerInfoType)type\n                     session:(SCVideoCaptureSessionInfo)sessionInfo\n{\n    SCTraceODPCompatibleStart(2);\n    SCLogCapturerInfo(@\"Did get error. sessionId:%u errorType:%lu, error:%@\", sessionInfo.sessionId, (long)type, error);\n    [_captureResource.queuePerformer perform:^{\n        runOnMainThreadAsynchronously(^{\n            [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]\n                                            didGetError:error\n                                                forType:type\n                                                session:sessionInfo];\n        });\n    }];\n}\n\n- (NSDictionary *)managedVideoCapturerGetExtraFrameHealthInfo:(SCManagedVideoCapturer *)managedVideoCapturer\n{\n    SCTraceODPCompatibleStart(2);\n    if (_captureResource.state.lensesActive) {\n        return @{\n            @\"lens_active\" : @(YES),\n            @\"lens_id\" : ([_captureResource.lensProcessingCore activeLensId] ?: [NSNull null])\n        };\n    }\n    return nil;\n}\n\n- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer\n  didAppendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer\n       presentationTimestamp:(CMTime)presentationTimestamp\n{\n    CFRetain(sampleBuffer);\n    [_captureResource.queuePerformer perform:^{\n        SCManagedCapturerSampleMetadata *sampleMetadata =\n            [[SCManagedCapturerSampleMetadata alloc] initWithPresentationTimestamp:presentationTimestamp\n                                                                       fieldOfView:_captureResource.device.fieldOfView];\n        [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]\n                         didAppendVideoSampleBuffer:sampleBuffer\n                                     sampleMetadata:sampleMetadata];\n        CFRelease(sampleBuffer);\n    }];\n}\n\n- (void)_videoRecordingCleanup\n{\n    SCTraceODPCompatibleStart(2);\n    SCAssert(_captureResource.videoRecording, @\"clean up function only can be called if the \"\n                                              @\"video recording is still in progress.\");\n    SCAssert([_captureResource.queuePerformer isCurrentPerformer], @\"\");\n    SCLogCapturerInfo(@\"Video recording cleanup. previous state:%@\", _captureResource.state);\n    [_captureResource.videoDataSource removeListener:_captureResource.videoCapturer];\n    if (_captureResource.videoFrameSampler) {\n        SCManagedVideoFrameSampler *sampler = _captureResource.videoFrameSampler;\n        _captureResource.videoFrameSampler = nil;\n        [_captureResource.announcer removeListener:sampler];\n    }\n    // Add back other listeners to video streamer\n    [_captureResource.videoDataSource addListener:_captureResource.deviceCapacityAnalyzer];\n    if (!_captureResource.state.torchActive) {\n        // We should turn off torch for the device that we specifically turned on\n        // for recording\n        [_captureResource.device setTorchActive:NO];\n        if (_captureResource.state.devicePosition == SCManagedCaptureDevicePositionFront) {\n            _captureResource.frontFlashController.torchActive = NO;\n        }\n    }\n\n    // Unlock focus on both front and back camera if they were locked.\n    // Even if ARKit was being used during recording, it'll be shut down by the time we get here\n    // So DON'T match the ARKit check we use around [_ setRecording:YES]\n    SCManagedCaptureDevice *front = [SCManagedCaptureDevice front];\n    SCManagedCaptureDevice *back = [SCManagedCaptureDevice back];\n    [front setRecording:NO];\n    [back setRecording:NO];\n    _captureResource.videoRecording = NO;\n    if (_captureResource.state.lensesActive) {\n        BOOL modifySource = _captureResource.videoRecording || _captureResource.state.liveVideoStreaming;\n        [_captureResource.lensProcessingCore setModifySource:modifySource];\n    }\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedVideoCapturerLogger.h",
    "content": "//\n//  SCCaptureLogger.h\n//  Snapchat\n//\n//  Created by Pinlin on 12/04/2017.\n//  Copyright © 2017 Snapchat, Inc. All rights reserved.\n//\n\n#import <Foundation/Foundation.h>\n\nstatic NSString *const kSCCapturerStartingStepAudioSession = @\"audio_session\";\nstatic NSString *const kSCCapturerStartingStepTranscodeingVideoBitrate = @\"transcoding_video_bitrate\";\nstatic NSString *const kSCCapturerStartingStepOutputSettings = @\"output_settings\";\nstatic NSString *const kSCCapturerStartingStepVideoFrameRawData = @\"video_frame_raw_data\";\nstatic NSString *const kSCCapturerStartingStepAudioRecording = @\"audio_recording\";\nstatic NSString *const kSCCapturerStartingStepAssetWriterConfiguration = @\"asset_writer_config\";\nstatic NSString *const kSCCapturerStartingStepStartingWriting = @\"start_writing\";\nstatic NSString *const kCapturerStartingTotalDelay = @\"total_delay\";\n\n@interface SCManagedVideoCapturerLogger : NSObject\n\n- (void)prepareForStartingLog;\n- (void)logStartingStep:(NSString *)stepName;\n- (void)endLoggingForStarting;\n- (void)logEventIfStartingTooSlow;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedVideoCapturerLogger.m",
    "content": "//\n//  SCManagedVideoCapturerLogger.m\n//  Snapchat\n//\n//  Created by Pinlin on 12/04/2017.\n//  Copyright © 2017 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCManagedVideoCapturerLogger.h\"\n\n#import <SCFoundation/SCAssertWrapper.h>\n#import <SCFoundation/SCLog.h>\n#import <SCLogger/SCCameraMetrics.h>\n#import <SCLogger/SCLogger.h>\n\n@import QuartzCore;\n\n@interface SCManagedVideoCapturerLogger () {\n    // For time profiles metric during start recording\n    NSMutableDictionary *_startingStepsDelayTime;\n    NSTimeInterval _beginStartTime;\n    NSTimeInterval _lastCheckpointTime;\n    NSTimeInterval _startedTime;\n}\n\n@end\n\n@implementation SCManagedVideoCapturerLogger\n\n- (instancetype)init\n{\n    self = [super init];\n    if (self) {\n        _startingStepsDelayTime = [NSMutableDictionary dictionary];\n    }\n    return self;\n}\n\n- (void)prepareForStartingLog\n{\n    _beginStartTime = CACurrentMediaTime();\n    _lastCheckpointTime = _beginStartTime;\n    [_startingStepsDelayTime removeAllObjects];\n}\n\n- (void)logStartingStep:(NSString *)stepname\n{\n    SCAssert(_beginStartTime > 0, @\"logger is not ready yet, please call prepareForStartingLog at first\");\n    NSTimeInterval currentCheckpointTime = CACurrentMediaTime();\n    _startingStepsDelayTime[stepname] = @(currentCheckpointTime - _lastCheckpointTime);\n    _lastCheckpointTime = currentCheckpointTime;\n}\n\n- (void)endLoggingForStarting\n{\n    SCAssert(_beginStartTime > 0, @\"logger is not ready yet, please call prepareForStartingLog at first\");\n    _startedTime = CACurrentMediaTime();\n    [self logStartingStep:kSCCapturerStartingStepStartingWriting];\n    _startingStepsDelayTime[kCapturerStartingTotalDelay] = @(CACurrentMediaTime() - _beginStartTime);\n}\n\n- (void)logEventIfStartingTooSlow\n{\n    if (_beginStartTime > 0) {\n        if (_startingStepsDelayTime.count == 0) {\n            // It should not be here. We only need to log once.\n            return;\n        }\n        SCLogGeneralWarning(@\"Capturer starting delay(in second):%f\", _startedTime - _beginStartTime);\n        [[SCLogger sharedInstance] logEvent:kSCCameraMetricsVideoCapturerStartDelay parameters:_startingStepsDelayTime];\n        // Clean all delay times after logging\n        [_startingStepsDelayTime removeAllObjects];\n        _beginStartTime = 0;\n    }\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedVideoCapturerOutputSettings.h",
    "content": "// 42f6113daff3eebf06d809a073c99651867c42ea\n// Generated by the value-object.rb  DO NOT EDIT!!\n\n#import \"SCManagedVideoCapturerOutputType.h\"\n\n#import <AvailabilityMacros.h>\n\n#import <CoreGraphics/CoreGraphics.h>\n#import <Foundation/Foundation.h>\n\n@protocol SCManagedVideoCapturerOutputSettings <NSObject, NSCoding, NSCopying>\n\n@property (nonatomic, assign, readonly) CGFloat width;\n\n@property (nonatomic, assign, readonly) CGFloat height;\n\n@property (nonatomic, assign, readonly) CGFloat videoBitRate;\n\n@property (nonatomic, assign, readonly) CGFloat audioBitRate;\n\n@property (nonatomic, assign, readonly) NSUInteger keyFrameInterval;\n\n@property (nonatomic, assign, readonly) SCManagedVideoCapturerOutputType outputType;\n\n@end\n\n@interface SCManagedVideoCapturerOutputSettings : NSObject <SCManagedVideoCapturerOutputSettings>\n\n@property (nonatomic, assign, readonly) CGFloat width;\n\n@property (nonatomic, assign, readonly) CGFloat height;\n\n@property (nonatomic, assign, readonly) CGFloat videoBitRate;\n\n@property (nonatomic, assign, readonly) CGFloat audioBitRate;\n\n@property (nonatomic, assign, readonly) NSUInteger keyFrameInterval;\n\n@property (nonatomic, assign, readonly) SCManagedVideoCapturerOutputType outputType;\n\n- (instancetype)initWithWidth:(CGFloat)width\n                       height:(CGFloat)height\n                 videoBitRate:(CGFloat)videoBitRate\n                 audioBitRate:(CGFloat)audioBitRate\n             keyFrameInterval:(NSUInteger)keyFrameInterval\n                   outputType:(SCManagedVideoCapturerOutputType)outputType;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedVideoCapturerOutputSettings.m",
    "content": "// 42f6113daff3eebf06d809a073c99651867c42ea\n// Generated by the value-object.rb  DO NOT EDIT!!\n\n#import \"SCManagedVideoCapturerOutputSettings.h\"\n\n#import <SCFoundation/SCValueObjectHelpers.h>\n\n#import <FastCoding/FastCoder.h>\n\n@implementation SCManagedVideoCapturerOutputSettings\n\nstatic ptrdiff_t sSCManagedVideoCapturerOutputSettingsOffsets[0];\nstatic BOOL sSCManagedVideoCapturerOutputSettingsHasOffsets;\n\n- (instancetype)initWithWidth:(CGFloat)width\n                       height:(CGFloat)height\n                 videoBitRate:(CGFloat)videoBitRate\n                 audioBitRate:(CGFloat)audioBitRate\n             keyFrameInterval:(NSUInteger)keyFrameInterval\n                   outputType:(SCManagedVideoCapturerOutputType)outputType\n{\n    self = [super init];\n    if (self) {\n        _width = width;\n        _height = height;\n        _videoBitRate = videoBitRate;\n        _audioBitRate = audioBitRate;\n        _keyFrameInterval = keyFrameInterval;\n        _outputType = outputType;\n    }\n    return self;\n}\n\n#pragma mark - NSCopying\n\n- (instancetype)copyWithZone:(NSZone *)zone\n{\n    // Immutable object, bypass copy\n    return self;\n}\n\n#pragma mark - NSCoding\n\n- (instancetype)initWithCoder:(NSCoder *)aDecoder\n{\n    self = [super init];\n    if (self) {\n        _width = [aDecoder decodeFloatForKey:@\"width\"];\n        _height = [aDecoder decodeFloatForKey:@\"height\"];\n        _videoBitRate = [aDecoder decodeFloatForKey:@\"videoBitRate\"];\n        _audioBitRate = [aDecoder decodeFloatForKey:@\"audioBitRate\"];\n        _keyFrameInterval = [[aDecoder decodeObjectForKey:@\"keyFrameInterval\"] unsignedIntegerValue];\n        _outputType = (SCManagedVideoCapturerOutputType)[aDecoder decodeIntegerForKey:@\"outputType\"];\n    }\n    return self;\n}\n\n- (void)encodeWithCoder:(NSCoder *)aCoder\n{\n    [aCoder encodeFloat:_width forKey:@\"width\"];\n    [aCoder encodeFloat:_height forKey:@\"height\"];\n    [aCoder encodeFloat:_videoBitRate forKey:@\"videoBitRate\"];\n    [aCoder encodeFloat:_audioBitRate forKey:@\"audioBitRate\"];\n    [aCoder encodeObject:@(_keyFrameInterval) forKey:@\"keyFrameInterval\"];\n    [aCoder encodeInteger:(NSInteger)_outputType forKey:@\"outputType\"];\n}\n\n#pragma mark - FasterCoding\n\n- (BOOL)preferFasterCoding\n{\n    return YES;\n}\n\n- (void)encodeWithFasterCoder:(id<FCFasterCoder>)fasterCoder\n{\n    [fasterCoder encodeFloat64:_audioBitRate];\n    [fasterCoder encodeFloat64:_height];\n    [fasterCoder encodeUInt64:_keyFrameInterval];\n    [fasterCoder encodeSInt32:_outputType];\n    [fasterCoder encodeFloat64:_videoBitRate];\n    [fasterCoder encodeFloat64:_width];\n}\n\n- (void)decodeWithFasterDecoder:(id<FCFasterDecoder>)fasterDecoder\n{\n    _audioBitRate = (CGFloat)[fasterDecoder decodeFloat64];\n    _height = (CGFloat)[fasterDecoder decodeFloat64];\n    _keyFrameInterval = (NSUInteger)[fasterDecoder decodeUInt64];\n    _outputType = (SCManagedVideoCapturerOutputType)[fasterDecoder decodeSInt32];\n    _videoBitRate = (CGFloat)[fasterDecoder decodeFloat64];\n    _width = (CGFloat)[fasterDecoder decodeFloat64];\n}\n\n- (void)setSInt32:(int32_t)val forUInt64Key:(uint64_t)key\n{\n    switch (key) {\n    case 54425104364133881ULL:\n        _outputType = (SCManagedVideoCapturerOutputType)val;\n        break;\n    }\n}\n\n- (void)setUInt64:(uint64_t)val forUInt64Key:(uint64_t)key\n{\n    switch (key) {\n    case 47327990652274883ULL:\n        _keyFrameInterval = (NSUInteger)val;\n        break;\n    }\n}\n\n- (void)setFloat64:(double)val forUInt64Key:(uint64_t)key\n{\n    switch (key) {\n    case 50995534680662654ULL:\n        _audioBitRate = (CGFloat)val;\n        break;\n    case 11656660716170763ULL:\n        _height = (CGFloat)val;\n        break;\n    case 29034524155663716ULL:\n        _videoBitRate = (CGFloat)val;\n        break;\n    case 30689178641753681ULL:\n        _width = (CGFloat)val;\n        break;\n    }\n}\n\n+ (uint64_t)fasterCodingVersion\n{\n    return 14709152111692666517ULL;\n}\n\n+ (uint64_t *)fasterCodingKeys\n{\n    static uint64_t keys[] = {\n        6 /* Total */,\n        FC_ENCODE_KEY_TYPE(50995534680662654, FCEncodeTypeFloat64),\n        FC_ENCODE_KEY_TYPE(11656660716170763, FCEncodeTypeFloat64),\n        FC_ENCODE_KEY_TYPE(47327990652274883, FCEncodeTypeUInt64),\n        FC_ENCODE_KEY_TYPE(54425104364133881, FCEncodeTypeSInt32),\n        FC_ENCODE_KEY_TYPE(29034524155663716, FCEncodeTypeFloat64),\n        FC_ENCODE_KEY_TYPE(30689178641753681, FCEncodeTypeFloat64),\n    };\n    return keys;\n}\n\n#pragma mark - isEqual\n\n- (BOOL)isEqual:(id)object\n{\n    if (!SCObjectsIsEqual(self, object, &sSCManagedVideoCapturerOutputSettingsHasOffsets,\n                          sSCManagedVideoCapturerOutputSettingsOffsets, 6, 0)) {\n        return NO;\n    }\n    SCManagedVideoCapturerOutputSettings *other = (SCManagedVideoCapturerOutputSettings *)object;\n    if (other->_width != _width) {\n        return NO;\n    }\n\n    if (other->_height != _height) {\n        return NO;\n    }\n\n    if (other->_videoBitRate != _videoBitRate) {\n        return NO;\n    }\n\n    if (other->_audioBitRate != _audioBitRate) {\n        return NO;\n    }\n\n    if (other->_keyFrameInterval != _keyFrameInterval) {\n        return NO;\n    }\n\n    if (other->_outputType != _outputType) {\n        return NO;\n    }\n\n    return YES;\n}\n\n- (NSUInteger)hash\n{\n    NSUInteger subhashes[] = {(NSUInteger)_width,        (NSUInteger)_height,           (NSUInteger)_videoBitRate,\n                              (NSUInteger)_audioBitRate, (NSUInteger)_keyFrameInterval, (NSUInteger)_outputType};\n    NSUInteger result = subhashes[0];\n    for (int i = 1; i < 6; i++) {\n        unsigned long long base = (((unsigned long long)result) << 32 | subhashes[i]);\n        base = (~base) + (base << 18);\n        base ^= (base >> 31);\n        base *= 21;\n        base ^= (base >> 11);\n        base += (base << 6);\n        base ^= (base >> 22);\n        result = (NSUInteger)base;\n    }\n    return result;\n}\n\n#pragma mark - Print description in console: lldb> po #{variable name}\n\n- (NSString *)description\n{\n    NSMutableString *desc = [NSMutableString string];\n    [desc appendString:@\"{\\n\"];\n    [desc appendFormat:@\"\\twidth:%@\\n\", [@(_width) description]];\n    [desc appendFormat:@\"\\theight:%@\\n\", [@(_height) description]];\n    [desc appendFormat:@\"\\tvideoBitRate:%@\\n\", [@(_videoBitRate) description]];\n    [desc appendFormat:@\"\\taudioBitRate:%@\\n\", [@(_audioBitRate) description]];\n    [desc appendFormat:@\"\\tkeyFrameInterval:%@\\n\", [@(_keyFrameInterval) description]];\n    [desc appendFormat:@\"\\toutputType:%@\\n\", [@(_outputType) description]];\n    [desc appendString:@\"}\\n\"];\n\n    return [desc copy];\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedVideoCapturerOutputSettings.value",
    "content": "#import \"SCManagedVideoCapturerOutputType.h\"\n\ninterface SCManagedVideoCapturerOutputSettings\n    CGFloat width\n    CGFloat height\n    CGFloat videoBitRate\n    CGFloat audioBitRate\n    NSUInteger keyFrameInterval\n    enum SCManagedVideoCapturerOutputType outputType\nend"
  },
  {
    "path": "ManagedCapturer/SCManagedVideoCapturerOutputType.h",
    "content": "//\n//  SCManagedVideoCapturerOutputType.h\n//  Snapchat\n//\n//  Created by Chao Pang on 8/8/16.\n//  Copyright © 2016 Snapchat, Inc. All rights reserved.\n//\n\n#import <Foundation/Foundation.h>\n\ntypedef NS_ENUM(NSInteger, SCManagedVideoCapturerOutputType) {\n    SCManagedVideoCapturerOutputTypeVideoSnap = 0,\n    SCManagedVideoCapturerOutputTypeVideoNote,\n};\n"
  },
  {
    "path": "ManagedCapturer/SCManagedVideoCapturerTimeObserver.h",
    "content": "//\n//  SCManagedVideoCapturerTimeObserver.h\n//  Snapchat\n//\n//  Created by Michel Loenngren on 4/3/17.\n//  Copyright © 2017 Snapchat, Inc. All rights reserved.\n//\n\n#import <CoreMedia/CoreMedia.h>\n#import <Foundation/Foundation.h>\n\n@class SCTimedTask;\n\n/*\n Class keeping track of SCTimedTasks and firing them on the main thread\n when needed.\n */\n@interface SCManagedVideoCapturerTimeObserver : NSObject\n\n- (void)addTimedTask:(SCTimedTask *_Nonnull)task;\n\n- (void)processTime:(CMTime)relativePresentationTime\n    sessionStartTimeDelayInSecond:(CGFloat)sessionStartTimeDelayInSecond;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedVideoCapturerTimeObserver.m",
    "content": "//\n//  SCManagedVideoCapturerTimeObserver.m\n//  Snapchat\n//\n//  Created by Michel Loenngren on 4/3/17.\n//  Copyright © 2017 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCManagedVideoCapturerTimeObserver.h\"\n\n#import \"SCTimedTask.h\"\n\n#import <SCFoundation/SCAssertWrapper.h>\n#import <SCFoundation/SCLog.h>\n#import <SCFoundation/SCThreadHelpers.h>\n\n@implementation SCManagedVideoCapturerTimeObserver {\n    NSMutableArray<SCTimedTask *> *_tasks;\n    BOOL _isProcessing;\n}\n\n- (instancetype)init\n{\n    if (self = [super init]) {\n        _tasks = [NSMutableArray new];\n        _isProcessing = NO;\n    }\n    return self;\n}\n\n- (void)addTimedTask:(SCTimedTask *_Nonnull)task\n{\n    SCAssert(!_isProcessing,\n             @\"[SCManagedVideoCapturerTimeObserver] Trying to add an SCTimedTask after streaming started.\");\n    SCAssert(CMTIME_IS_VALID(task.targetTime),\n             @\"[SCManagedVideoCapturerTimeObserver] Trying to add an SCTimedTask with invalid time.\");\n    [_tasks addObject:task];\n    [_tasks sortUsingComparator:^NSComparisonResult(SCTimedTask *_Nonnull obj1, SCTimedTask *_Nonnull obj2) {\n        return (NSComparisonResult)CMTimeCompare(obj2.targetTime, obj1.targetTime);\n    }];\n    SCLogGeneralInfo(@\"[SCManagedVideoCapturerTimeObserver] Adding task: %@, task count: %lu\", task,\n                     (unsigned long)_tasks.count);\n}\n\n- (void)processTime:(CMTime)relativePresentationTime\n    sessionStartTimeDelayInSecond:(CGFloat)sessionStartTimeDelayInSecond\n{\n    _isProcessing = YES;\n    SCTimedTask *last = _tasks.lastObject;\n    while (last && last.task && CMTimeCompare(relativePresentationTime, last.targetTime) >= 0) {\n        [_tasks removeLastObject];\n        void (^task)(CMTime relativePresentationTime, CGFloat sessionStartTimeDelay) = last.task;\n        last.task = nil;\n        runOnMainThreadAsynchronously(^{\n            task(relativePresentationTime, sessionStartTimeDelayInSecond);\n        });\n        last = _tasks.lastObject;\n    }\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedVideoFileStreamer.h",
    "content": "//\n//  SCManagedVideoFileStreamer.h\n//  Snapchat\n//\n//  Created by Alexander Grytsiuk on 3/4/16.\n//  Copyright © 2016 Snapchat, Inc. All rights reserved.\n//\n\n#import <SCCameraFoundation/SCManagedVideoDataSource.h>\n\n#import <AVFoundation/AVFoundation.h>\n#import <Foundation/Foundation.h>\n\ntypedef void (^sc_managed_video_file_streamer_pixel_buffer_completion_handler_t)(CVPixelBufferRef pixelBuffer);\n\n/**\n * SCManagedVideoFileStreamer reads a video file from provided NSURL to create\n * and publish video output frames. SCManagedVideoFileStreamer also conforms\n * to SCManagedVideoDataSource allowing chained consumption of video frames.\n */\n@interface SCManagedVideoFileStreamer : NSObject <SCManagedVideoDataSource>\n\n- (instancetype)initWithPlaybackForURL:(NSURL *)URL;\n- (void)getNextPixelBufferWithCompletion:(sc_managed_video_file_streamer_pixel_buffer_completion_handler_t)completion;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedVideoFileStreamer.m",
    "content": "//\n//  SCManagedVideoFileStreamer.m\n//  Snapchat\n//\n//  Created by Alexander Grytsiuk on 3/4/16.\n//  Copyright © 2016 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCManagedVideoFileStreamer.h\"\n\n#import \"SCManagedCapturePreviewLayerController.h\"\n\n#import <SCCameraFoundation/SCManagedVideoDataSourceListenerAnnouncer.h>\n#import <SCFoundation/SCLog.h>\n#import <SCFoundation/SCPlayer.h>\n#import <SCFoundation/SCQueuePerformer.h>\n#import <SCFoundation/SCTrace.h>\n\n@import AVFoundation;\n@import CoreMedia;\n\nstatic char *const kSCManagedVideoFileStreamerQueueLabel = \"com.snapchat.managed-video-file-streamer\";\n\n@interface SCManagedVideoFileStreamer () <AVPlayerItemOutputPullDelegate>\n@end\n\n@implementation SCManagedVideoFileStreamer {\n    SCManagedVideoDataSourceListenerAnnouncer *_announcer;\n    SCManagedCaptureDevicePosition _devicePosition;\n    sc_managed_video_file_streamer_pixel_buffer_completion_handler_t _nextPixelBufferHandler;\n\n    id _notificationToken;\n    id<SCPerforming> _performer;\n    dispatch_semaphore_t _semaphore;\n\n    CADisplayLink *_displayLink;\n    AVPlayerItemVideoOutput *_videoOutput;\n    AVPlayer *_player;\n\n    BOOL _sampleBufferDisplayEnabled;\n    id<SCManagedSampleBufferDisplayController> _sampleBufferDisplayController;\n}\n\n@synthesize isStreaming = _isStreaming;\n@synthesize performer = _performer;\n@synthesize videoOrientation = _videoOrientation;\n\n- (instancetype)initWithPlaybackForURL:(NSURL *)URL\n{\n    SCTraceStart();\n    self = [super init];\n    if (self) {\n        _videoOrientation = AVCaptureVideoOrientationLandscapeRight;\n        _announcer = [[SCManagedVideoDataSourceListenerAnnouncer alloc] init];\n        _semaphore = dispatch_semaphore_create(1);\n        _performer = [[SCQueuePerformer alloc] initWithLabel:kSCManagedVideoFileStreamerQueueLabel\n                                            qualityOfService:QOS_CLASS_UNSPECIFIED\n                                                   queueType:DISPATCH_QUEUE_SERIAL\n                                                     context:SCQueuePerformerContextStories];\n\n        // Setup CADisplayLink which will callback displayPixelBuffer: at every vsync.\n        _displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(displayLinkCallback:)];\n        [_displayLink addToRunLoop:[NSRunLoop mainRunLoop] forMode:NSDefaultRunLoopMode];\n        [_displayLink setPaused:YES];\n\n        // Prepare player\n        _player = [[SCPlayer alloc] initWithPlayerDomain:SCPlayerDomainCameraFileStreamer URL:URL];\n#if TARGET_IPHONE_SIMULATOR\n        _player.volume = 0.0;\n#endif\n        // Configure output\n        [self configureOutput];\n    }\n    return self;\n}\n\n- (void)addSampleBufferDisplayController:(id<SCManagedSampleBufferDisplayController>)sampleBufferDisplayController\n{\n    _sampleBufferDisplayController = sampleBufferDisplayController;\n}\n\n- (void)setSampleBufferDisplayEnabled:(BOOL)sampleBufferDisplayEnabled\n{\n    _sampleBufferDisplayEnabled = sampleBufferDisplayEnabled;\n    SCLogGeneralInfo(@\"[SCManagedVideoFileStreamer] sampleBufferDisplayEnabled set to:%d\", _sampleBufferDisplayEnabled);\n}\n\n- (void)setKeepLateFrames:(BOOL)keepLateFrames\n{\n    // Do nothing\n}\n\n- (BOOL)getKeepLateFrames\n{\n    // return default NO value\n    return NO;\n}\n\n- (void)waitUntilSampleBufferDisplayed:(dispatch_queue_t)queue completionHandler:(dispatch_block_t)completionHandler\n{\n    SCAssert(queue, @\"callback queue must be provided\");\n    SCAssert(completionHandler, @\"completion handler must be provided\");\n    dispatch_async(queue, completionHandler);\n}\n\n- (void)startStreaming\n{\n    SCTraceStart();\n    if (!_isStreaming) {\n        _isStreaming = YES;\n        [self addDidPlayToEndTimeNotificationForPlayerItem:_player.currentItem];\n        [_player play];\n    }\n}\n\n- (void)stopStreaming\n{\n    SCTraceStart();\n    if (_isStreaming) {\n        _isStreaming = NO;\n        [_player pause];\n        [self removePlayerObservers];\n    }\n}\n\n- (void)pauseStreaming\n{\n    [self stopStreaming];\n}\n\n- (void)addListener:(id<SCManagedVideoDataSourceListener>)listener\n{\n    SCTraceStart();\n    [_announcer addListener:listener];\n}\n\n- (void)removeListener:(id<SCManagedVideoDataSourceListener>)listener\n{\n    SCTraceStart();\n    [_announcer removeListener:listener];\n}\n\n- (void)setAsOutput:(AVCaptureSession *)session devicePosition:(SCManagedCaptureDevicePosition)devicePosition\n{\n    _devicePosition = devicePosition;\n}\n\n- (void)setDevicePosition:(SCManagedCaptureDevicePosition)devicePosition\n{\n    _devicePosition = devicePosition;\n}\n\n- (void)setVideoOrientation:(AVCaptureVideoOrientation)videoOrientation\n{\n    _videoOrientation = videoOrientation;\n}\n\n- (void)removeAsOutput:(AVCaptureSession *)session\n{\n    // Ignored\n}\n\n- (void)setVideoStabilizationEnabledIfSupported:(BOOL)videoStabilizationIfSupported\n{\n    // Ignored\n}\n\n- (void)beginConfiguration\n{\n    // Ignored\n}\n\n- (void)commitConfiguration\n{\n    // Ignored\n}\n\n- (void)setPortraitModePointOfInterest:(CGPoint)pointOfInterest\n{\n    // Ignored\n}\n\n#pragma mark - AVPlayerItemOutputPullDelegate\n\n- (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender\n{\n    if (![_videoOutput hasNewPixelBufferForItemTime:CMTimeMake(1, 10)]) {\n        [self configureOutput];\n    }\n    [_displayLink setPaused:NO];\n}\n\n#pragma mark - Internal\n\n- (void)displayLinkCallback:(CADisplayLink *)sender\n{\n    CFTimeInterval nextVSync = [sender timestamp] + [sender duration];\n\n    CMTime time = [_videoOutput itemTimeForHostTime:nextVSync];\n    if (dispatch_semaphore_wait(_semaphore, DISPATCH_TIME_NOW) == 0) {\n        [_performer perform:^{\n            if ([_videoOutput hasNewPixelBufferForItemTime:time]) {\n                CVPixelBufferRef pixelBuffer = [_videoOutput copyPixelBufferForItemTime:time itemTimeForDisplay:NULL];\n                if (pixelBuffer != NULL) {\n                    if (_nextPixelBufferHandler) {\n                        _nextPixelBufferHandler(pixelBuffer);\n                        _nextPixelBufferHandler = nil;\n                    } else {\n                        CMSampleBufferRef sampleBuffer =\n                            [self createSampleBufferFromPixelBuffer:pixelBuffer\n                                                   presentationTime:CMTimeMake(CACurrentMediaTime() * 1000, 1000)];\n                        if (sampleBuffer) {\n                            if (_sampleBufferDisplayEnabled) {\n                                [_sampleBufferDisplayController enqueueSampleBuffer:sampleBuffer];\n                            }\n                            [_announcer managedVideoDataSource:self\n                                         didOutputSampleBuffer:sampleBuffer\n                                                devicePosition:_devicePosition];\n                            CFRelease(sampleBuffer);\n                        }\n                    }\n                    CVBufferRelease(pixelBuffer);\n                }\n            }\n            dispatch_semaphore_signal(_semaphore);\n        }];\n    }\n}\n\n- (CMSampleBufferRef)createSampleBufferFromPixelBuffer:(CVPixelBufferRef)pixelBuffer presentationTime:(CMTime)time\n{\n    CMSampleBufferRef sampleBuffer = NULL;\n    CMVideoFormatDescriptionRef formatDesc = NULL;\n\n    OSStatus err = CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, &formatDesc);\n    if (err != noErr) {\n        return NULL;\n    }\n\n    CMSampleTimingInfo sampleTimingInfo = {kCMTimeInvalid, time, kCMTimeInvalid};\n    CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, true, NULL, NULL, formatDesc,\n                                       &sampleTimingInfo, &sampleBuffer);\n\n    CFRelease(formatDesc);\n\n    return sampleBuffer;\n}\n\n- (void)configureOutput\n{\n    // Remove old output\n    if (_videoOutput) {\n        [[_player currentItem] removeOutput:_videoOutput];\n    }\n\n    // Setup AVPlayerItemVideoOutput with the required pixelbuffer attributes.\n    _videoOutput = [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:@{\n        (id) kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)\n    }];\n    _videoOutput.suppressesPlayerRendering = YES;\n    [_videoOutput setDelegate:self queue:_performer.queue];\n\n    // Add new output\n    [[_player currentItem] addOutput:_videoOutput];\n    [_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:1.0 / 30.0];\n}\n\n- (void)getNextPixelBufferWithCompletion:(sc_managed_video_file_streamer_pixel_buffer_completion_handler_t)completion\n{\n    _nextPixelBufferHandler = completion;\n}\n\n- (void)addDidPlayToEndTimeNotificationForPlayerItem:(AVPlayerItem *)item\n{\n    if (_notificationToken) {\n        _notificationToken = nil;\n    }\n\n    _player.actionAtItemEnd = AVPlayerActionAtItemEndNone;\n    _notificationToken =\n        [[NSNotificationCenter defaultCenter] addObserverForName:AVPlayerItemDidPlayToEndTimeNotification\n                                                          object:item\n                                                           queue:[NSOperationQueue mainQueue]\n                                                      usingBlock:^(NSNotification *note) {\n                                                          [[_player currentItem] seekToTime:kCMTimeZero];\n                                                      }];\n}\n\n- (void)removePlayerObservers\n{\n    if (_notificationToken) {\n        [[NSNotificationCenter defaultCenter] removeObserver:_notificationToken\n                                                        name:AVPlayerItemDidPlayToEndTimeNotification\n                                                      object:_player.currentItem];\n        _notificationToken = nil;\n    }\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedVideoFrameSampler.h",
    "content": "//\n//  SCManagedVideoFrameSampler.h\n//  Snapchat\n//\n//  Created by Michel Loenngren on 3/10/17.\n//  Copyright © 2017 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCManagedCapturerListener.h\"\n\n#import <Foundation/Foundation.h>\n\n/**\n Allows consumer to register a block to sample the next CMSampleBufferRef and\n automatically leverages Core image to convert the pixel buffer to a UIImage.\n Returned image will be a copy.\n */\n@interface SCManagedVideoFrameSampler : NSObject <SCManagedCapturerListener>\n\n- (void)sampleNextFrame:(void (^)(UIImage *frame, CMTime presentationTime))completeBlock;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedVideoFrameSampler.m",
    "content": "//\n//  SCManagedVideoFrameSampler.m\n//  Snapchat\n//\n//  Created by Michel Loenngren on 3/10/17.\n//  Copyright © 2017 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCManagedVideoFrameSampler.h\"\n\n#import <SCFoundation/SCThreadHelpers.h>\n#import <SCFoundation/UIImage+CVPixelBufferRef.h>\n\n@import CoreImage;\n@import ImageIO;\n\n@interface SCManagedVideoFrameSampler ()\n\n@property (nonatomic, copy) void (^frameSampleBlock)(UIImage *, CMTime);\n@property (nonatomic, strong) CIContext *ciContext;\n\n@end\n\n@implementation SCManagedVideoFrameSampler\n\n- (void)sampleNextFrame:(void (^)(UIImage *, CMTime))completeBlock\n{\n    _frameSampleBlock = completeBlock;\n}\n\n#pragma mark - SCManagedCapturerListener\n\n- (void)managedCapturer:(id<SCCapturer>)managedCapturer\n    didAppendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer\n                sampleMetadata:(SCManagedCapturerSampleMetadata *)sampleMetadata\n{\n    void (^block)(UIImage *, CMTime) = _frameSampleBlock;\n    _frameSampleBlock = nil;\n\n    if (!block) {\n        return;\n    }\n\n    CVImageBufferRef cvImageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);\n    CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);\n    UIImage *image;\n    if (cvImageBuffer) {\n        CGImageRef cgImage = SCCreateCGImageFromPixelBufferRef(cvImageBuffer);\n        image = [[UIImage alloc] initWithCGImage:cgImage scale:1.0 orientation:UIImageOrientationRight];\n        CGImageRelease(cgImage);\n    }\n    runOnMainThreadAsynchronously(^{\n        block(image, presentationTime);\n    });\n}\n\n- (CIContext *)ciContext\n{\n    if (!_ciContext) {\n        _ciContext = [CIContext context];\n    }\n    return _ciContext;\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedVideoNoSoundLogger.h",
    "content": "//\n//  SCManagedVideoNoSoundLogger.h\n//  Snapchat\n//\n//  Created by Pinlin Chen on 15/07/2017.\n//\n//\n\n#import <SCBase/SCMacros.h>\n\n#import <AVFoundation/AVFoundation.h>\n#import <Foundation/Foundation.h>\n\n@protocol SCManiphestTicketCreator;\n\n@interface SCManagedVideoNoSoundLogger : NSObject\n\n@property (nonatomic, strong) NSError *audioSessionError;\n@property (nonatomic, strong) NSError *audioQueueError;\n@property (nonatomic, strong) NSError *assetWriterError;\n@property (nonatomic, assign) BOOL retryAudioQueueSuccess;\n@property (nonatomic, assign) BOOL retryAudioQueueSuccessSetDataSource;\n@property (nonatomic, strong) NSString *brokenMicCodeType;\n@property (nonatomic, assign) BOOL lenseActiveWhileRecording;\n@property (nonatomic, strong) NSString *activeLensId;\n@property (nonatomic, assign) CMTime firstWrittenAudioBufferDelay;\n@property (nonatomic, assign) BOOL audioQueueStarted;\n\nSC_INIT_AND_NEW_UNAVAILABLE\n- (instancetype)initWithTicketCreator:(id<SCManiphestTicketCreator>)ticketCreator;\n\n/* Use to counting how many no sound issue we have fixed */\n// Call at the place where we have fixed the AVPlayer leak before\n+ (void)startCountingVideoNoSoundHaveBeenFixed;\n\n/* Use to report the detail of new no sound issue */\n// Reset all the properties of recording error\n- (void)resetAll;\n// Log if the audio track is empty\n- (void)checkVideoFileAndLogIfNeeded:(NSURL *)videoURL;\n// called by AVCameraViewController when lense resume audio\n- (void)managedLensesProcessorDidCallResumeAllSounds;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedVideoNoSoundLogger.m",
    "content": "//\n//  SCManagedVideoNoSoundLogger.m\n//  Snapchat\n//\n//  Created by Pinlin Chen on 15/07/2017.\n//\n//\n\n#import \"SCManagedVideoNoSoundLogger.h\"\n\n#import \"SCManagedCapturer.h\"\n#import \"SCManiphestTicketCreator.h\"\n\n#import <SCAudio/SCAudioSession+Debug.h>\n#import <SCAudio/SCAudioSession.h>\n#import <SCFoundation/NSString+Helpers.h>\n#import <SCFoundation/SCLog.h>\n#import <SCFoundation/SCLogHelper.h>\n#import <SCFoundation/SCThreadHelpers.h>\n#import <SCFoundation/SCUUID.h>\n#import <SCLogger/SCCameraMetrics.h>\n#import <SCLogger/SCLogger.h>\n\n@import AVFoundation;\n\nstatic BOOL s_startCountingVideoNoSoundFixed;\n// Count the number of no sound errors for an App session\nstatic NSUInteger s_noSoundCaseCount = 0;\n\n@interface SCManagedVideoNoSoundLogger () {\n    BOOL _isAudioSessionDeactivated;\n    int _lenseResumeCount;\n}\n\n@property (nonatomic) id<SCManiphestTicketCreator> ticketCreator;\n\n@end\n\n@implementation SCManagedVideoNoSoundLogger\n\n- (instancetype)initWithTicketCreator:(id<SCManiphestTicketCreator>)ticketCreator\n{\n    if (self = [super init]) {\n        _ticketCreator = ticketCreator;\n    }\n    return self;\n}\n\n+ (NSUInteger)noSoundCount\n{\n    return s_noSoundCaseCount;\n}\n\n+ (void)increaseNoSoundCount\n{\n    s_noSoundCaseCount += 1;\n}\n\n+ (void)startCountingVideoNoSoundHaveBeenFixed\n{\n    static dispatch_once_t onceToken;\n    dispatch_once(&onceToken, ^{\n        s_startCountingVideoNoSoundFixed = YES;\n        SCLogGeneralInfo(@\"start counting video no sound have been fixed\");\n    });\n}\n\n+ (NSString *)appSessionIdForNoSound\n{\n    static dispatch_once_t onceToken;\n    static NSString *s_AppSessionIdForNoSound = @\"SCDefaultSession\";\n    dispatch_once(&onceToken, ^{\n        s_AppSessionIdForNoSound = SCUUID();\n    });\n    return s_AppSessionIdForNoSound;\n}\n\n+ (void)logVideoNoSoundHaveBeenFixedIfNeeded\n{\n    if (s_startCountingVideoNoSoundFixed) {\n        [[SCLogger sharedInstance] logUnsampledEvent:kSCCameraMetricsVideoNoSoundError\n                                          parameters:@{\n                                              @\"have_been_fixed\" : @\"true\",\n                                              @\"fixed_type\" : @\"player_leak\",\n                                              @\"asset_writer_success\" : @\"true\",\n                                              @\"audio_session_success\" : @\"true\",\n                                              @\"audio_queue_success\" : @\"true\",\n                                          }\n                                    secretParameters:nil\n                                             metrics:nil];\n    }\n}\n\n+ (void)logAudioSessionCategoryHaveBeenFixed\n{\n    [[SCLogger sharedInstance] logUnsampledEvent:kSCCameraMetricsVideoNoSoundError\n                                      parameters:@{\n                                          @\"have_been_fixed\" : @\"true\",\n                                          @\"fixed_type\" : @\"audio_session_category_mismatch\",\n                                          @\"asset_writer_success\" : @\"true\",\n                                          @\"audio_session_success\" : @\"true\",\n                                          @\"audio_queue_success\" : @\"true\",\n                                      }\n                                secretParameters:nil\n                                         metrics:nil];\n}\n\n+ (void)logAudioSessionBrokenMicHaveBeenFixed:(NSString *)type\n{\n    [[SCLogger sharedInstance]\n        logUnsampledEvent:kSCCameraMetricsVideoNoSoundError\n               parameters:@{\n                   @\"have_been_fixed\" : @\"true\",\n                   @\"fixed_type\" : @\"broken_microphone\",\n                   @\"asset_writer_success\" : @\"true\",\n                   @\"audio_session_success\" : @\"true\",\n                   @\"audio_queue_success\" : @\"true\",\n                   @\"mic_broken_type\" : SC_NULL_STRING_IF_NIL(type),\n                   @\"audio_session_debug_info\" :\n                       [SCAudioSession sharedInstance].lastRecordingRequestDebugInfo ?: @\"(null)\",\n               }\n         secretParameters:nil\n                  metrics:nil];\n}\n\n- (instancetype)init\n{\n    if (self = [super init]) {\n        [[NSNotificationCenter defaultCenter] addObserver:self\n                                                 selector:@selector(_audioSessionWillDeactivate)\n                                                     name:SCAudioSessionWillDeactivateNotification\n                                                   object:nil];\n        [[NSNotificationCenter defaultCenter] addObserver:self\n                                                 selector:@selector(_audioSessionDidActivate)\n                                                     name:SCAudioSessionActivatedNotification\n                                                   object:nil];\n        _firstWrittenAudioBufferDelay = kCMTimeInvalid;\n    }\n    return self;\n}\n\n- (void)resetAll\n{\n    _audioQueueError = nil;\n    _audioSessionError = nil;\n    _assetWriterError = nil;\n    _retryAudioQueueSuccess = NO;\n    _retryAudioQueueSuccessSetDataSource = NO;\n    _brokenMicCodeType = nil;\n    _lenseActiveWhileRecording = NO;\n    _lenseResumeCount = 0;\n    _activeLensId = nil;\n    self.firstWrittenAudioBufferDelay = kCMTimeInvalid;\n}\n\n- (void)checkVideoFileAndLogIfNeeded:(NSURL *)videoURL\n{\n    AVURLAsset *asset = [AVURLAsset assetWithURL:videoURL];\n\n    __block BOOL hasAudioTrack = ([asset tracksWithMediaType:AVMediaTypeAudio].count > 0);\n\n    dispatch_block_t block = ^{\n\n        // Log no audio issues have been fixed\n        if (hasAudioTrack) {\n            if (_retryAudioQueueSuccess) {\n                [SCManagedVideoNoSoundLogger logAudioSessionCategoryHaveBeenFixed];\n            } else if (_retryAudioQueueSuccessSetDataSource) {\n                [SCManagedVideoNoSoundLogger logAudioSessionBrokenMicHaveBeenFixed:_brokenMicCodeType];\n            } else {\n                [SCManagedVideoNoSoundLogger logVideoNoSoundHaveBeenFixedIfNeeded];\n            }\n        } else {\n            // Log no audio issues caused by no permission into \"wont_fixed_type\", won't show in Grafana\n            BOOL isPermissonGranted =\n                [[SCAudioSession sharedInstance] recordPermission] == AVAudioSessionRecordPermissionGranted;\n            if (!isPermissonGranted) {\n                [SCManagedVideoNoSoundLogger increaseNoSoundCount];\n                [[SCLogger sharedInstance]\n                    logUnsampledEvent:kSCCameraMetricsVideoNoSoundError\n                           parameters:@{\n                               @\"wont_fix_type\" : @\"no_permission\",\n                               @\"no_sound_count\" :\n                                   [@([SCManagedVideoNoSoundLogger noSoundCount]) stringValue] ?: @\"(null)\",\n                               @\"session_id\" : [SCManagedVideoNoSoundLogger appSessionIdForNoSound] ?: @\"(null)\"\n                           }\n                     secretParameters:nil\n                              metrics:nil];\n\n            }\n            // Log no audio issues caused by microphone occupied into \"wont_fixed_type\", for example Phone Call,\n            // It won't show in Grafana\n            // TODO: maybe we should prompt the user of these errors in the future\n            else if (_audioSessionError.code == AVAudioSessionErrorInsufficientPriority ||\n                     _audioQueueError.code == AVAudioSessionErrorInsufficientPriority) {\n                NSDictionary *parameters = @{\n                    @\"wont_fix_type\" : @\"microphone_in_use\",\n                    @\"asset_writer_error\" : _assetWriterError ? [_assetWriterError description] : @\"(null)\",\n                    @\"audio_session_error\" : _audioSessionError.userInfo ?: @\"(null)\",\n                    @\"audio_queue_error\" : _audioQueueError.userInfo ?: @\"(null)\",\n                    @\"audio_session_deactivated\" : _isAudioSessionDeactivated ? @\"true\" : @\"false\",\n                    @\"audio_session_debug_info\" :\n                        [SCAudioSession sharedInstance].lastRecordingRequestDebugInfo ?: @\"(null)\",\n                    @\"no_sound_count\" : [@([SCManagedVideoNoSoundLogger noSoundCount]) stringValue] ?: @\"(null)\",\n                    @\"session_id\" : [SCManagedVideoNoSoundLogger appSessionIdForNoSound] ?: @\"(null)\"\n                };\n\n                [SCManagedVideoNoSoundLogger increaseNoSoundCount];\n                [[SCLogger sharedInstance] logUnsampledEvent:kSCCameraMetricsVideoNoSoundError\n                                                  parameters:parameters\n                                            secretParameters:nil\n                                                     metrics:nil];\n                [_ticketCreator createAndFileBetaReport:JSONStringSerializeObjectForLogging(parameters)];\n            } else {\n                // Log other new no audio issues, use \"have_been_fixed=false\" to show in Grafana\n                NSDictionary *parameters = @{\n                    @\"have_been_fixed\" : @\"false\",\n                    @\"asset_writer_error\" : _assetWriterError ? [_assetWriterError description] : @\"(null)\",\n                    @\"audio_session_error\" : _audioSessionError.userInfo ?: @\"(null)\",\n                    @\"audio_queue_error\" : _audioQueueError.userInfo ?: @\"(null)\",\n                    @\"asset_writer_success\" : [NSString stringWithBool:_assetWriterError == nil],\n                    @\"audio_session_success\" : [NSString stringWithBool:_audioSessionError == nil],\n                    @\"audio_queue_success\" : [NSString stringWithBool:_audioQueueError == nil],\n                    @\"audio_session_deactivated\" : _isAudioSessionDeactivated ? @\"true\" : @\"false\",\n                    @\"video_duration\" : [NSString sc_stringWithFormat:@\"%f\", CMTimeGetSeconds(asset.duration)],\n                    @\"is_audio_session_nil\" :\n                        [[SCAudioSession sharedInstance] noSoundCheckAudioSessionIsNil] ? @\"true\" : @\"false\",\n                    @\"lenses_active\" : [NSString stringWithBool:self.lenseActiveWhileRecording],\n                    @\"active_lense_id\" : self.activeLensId ?: @\"(null)\",\n                    @\"lense_audio_resume_count\" : @(_lenseResumeCount),\n                    @\"first_audio_buffer_delay\" :\n                        [NSString sc_stringWithFormat:@\"%f\", CMTimeGetSeconds(self.firstWrittenAudioBufferDelay)],\n                    @\"audio_session_debug_info\" :\n                        [SCAudioSession sharedInstance].lastRecordingRequestDebugInfo ?: @\"(null)\",\n                    @\"audio_queue_started\" : [NSString stringWithBool:_audioQueueStarted],\n                    @\"no_sound_count\" : [@([SCManagedVideoNoSoundLogger noSoundCount]) stringValue] ?: @\"(null)\",\n                    @\"session_id\" : [SCManagedVideoNoSoundLogger appSessionIdForNoSound] ?: @\"(null)\"\n                };\n                [SCManagedVideoNoSoundLogger increaseNoSoundCount];\n                [[SCLogger sharedInstance] logUnsampledEvent:kSCCameraMetricsVideoNoSoundError\n                                                  parameters:parameters\n                                            secretParameters:nil\n                                                     metrics:nil];\n                [_ticketCreator createAndFileBetaReport:JSONStringSerializeObjectForLogging(parameters)];\n            }\n        }\n    };\n    if (hasAudioTrack) {\n        block();\n    } else {\n        // Wait for all tracks to be loaded, in case of error counting the metric\n        [asset loadValuesAsynchronouslyForKeys:@[ @\"tracks\" ]\n                             completionHandler:^{\n                                 // Return when the tracks couldn't be loaded\n                                 NSError *error = nil;\n                                 if ([asset statusOfValueForKey:@\"tracks\" error:&error] != AVKeyValueStatusLoaded ||\n                                     error != nil) {\n                                     return;\n                                 }\n\n                                 // check audio track again\n                                 hasAudioTrack = ([asset tracksWithMediaType:AVMediaTypeAudio].count > 0);\n                                 runOnMainThreadAsynchronously(block);\n                             }];\n    }\n}\n\n- (void)_audioSessionWillDeactivate\n{\n    _isAudioSessionDeactivated = YES;\n}\n\n- (void)_audioSessionDidActivate\n{\n    _isAudioSessionDeactivated = NO;\n}\n\n- (void)managedLensesProcessorDidCallResumeAllSounds\n{\n    _lenseResumeCount += 1;\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedVideoScanner.h",
    "content": "//\n//  SCManagedVideoScanner.h\n//  Snapchat\n//\n//  Created by Liu Liu on 5/5/15.\n//  Copyright (c) 2015 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCManagedCapturer.h\"\n#import \"SCManagedDeviceCapacityAnalyzerListener.h\"\n\n#import <SCCameraFoundation/SCManagedVideoDataSourceListener.h>\n\n#import <Foundation/Foundation.h>\n\n@class SCScanConfiguration;\n\n@interface SCManagedVideoScanner : NSObject <SCManagedVideoDataSourceListener, SCManagedDeviceCapacityAnalyzerListener>\n\n/**\n * Calling this method to start scan, scan will automatically stop when a snapcode detected\n */\n- (void)startScanAsynchronouslyWithScanConfiguration:(SCScanConfiguration *)configuration;\n\n/**\n * Calling this method to stop scan immediately (it is still possible that a successful scan can happen after this is\n * called)\n */\n- (void)stopScanAsynchronously;\n\n- (instancetype)initWithMaxFrameDefaultDuration:(NSTimeInterval)maxFrameDefaultDuration\n                        maxFramePassiveDuration:(NSTimeInterval)maxFramePassiveDuration\n                                      restCycle:(float)restCycle;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedVideoScanner.m",
    "content": "//\n//  SCManagedVideoScanner.m\n//  Snapchat\n//\n//  Created by Liu Liu on 5/5/15.\n//  Copyright (c) 2015 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCManagedVideoScanner.h\"\n\n#import \"SCScanConfiguration.h\"\n\n#import <SCFeatureSettings/SCFeatureSettingsManager+Property.h>\n#import <SCFoundation/NSData+Base64.h>\n#import <SCFoundation/NSString+SCFormat.h>\n#import <SCFoundation/SCAssertWrapper.h>\n#import <SCFoundation/SCLog.h>\n#import <SCFoundation/SCQueuePerformer.h>\n#import <SCFoundation/SCThreadHelpers.h>\n#import <SCFoundation/SCTrace.h>\n#import <SCFoundation/UIDevice+Filter.h>\n#import <SCLogger/SCLogger.h>\n#import <SCScanTweaks/SCScanTweaks.h>\n#import <SCScanner/SCMachineReadableCodeResult.h>\n#import <SCScanner/SCSnapScanner.h>\n#import <SCVisualProductSearchTweaks/SCVisualProductSearchTweaks.h>\n\n// In seconds\nstatic NSTimeInterval const kDefaultScanTimeout = 60;\n\nstatic const char *kSCManagedVideoScannerQueueLabel = \"com.snapchat.scvideoscanningcapturechannel.video.snapcode-scan\";\n\n@interface SCManagedVideoScanner ()\n\n@end\n\n@implementation SCManagedVideoScanner {\n    SCSnapScanner *_snapScanner;\n    dispatch_semaphore_t _activeSemaphore;\n    NSTimeInterval _maxFrameDuration; // Used to restrict how many frames the scanner processes\n    NSTimeInterval _maxFrameDefaultDuration;\n    NSTimeInterval _maxFramePassiveDuration;\n    float _restCycleOfBusyCycle;\n    NSTimeInterval _scanStartTime;\n    BOOL _active;\n    BOOL _shouldEmitEvent;\n    dispatch_block_t _completionHandler;\n    NSTimeInterval _scanTimeout;\n    SCManagedCaptureDevicePosition _devicePosition;\n    SCQueuePerformer *_performer;\n    BOOL _adjustingFocus;\n    NSArray *_codeTypes;\n    NSArray *_codeTypesOld;\n    sc_managed_capturer_scan_results_handler_t _scanResultsHandler;\n\n    SCUserSession *_userSession;\n}\n\n- (instancetype)initWithMaxFrameDefaultDuration:(NSTimeInterval)maxFrameDefaultDuration\n                        maxFramePassiveDuration:(NSTimeInterval)maxFramePassiveDuration\n                                      restCycle:(float)restCycle\n{\n    SCTraceStart();\n    self = [super init];\n    if (self) {\n        _snapScanner = [SCSnapScanner sharedInstance];\n        _performer = [[SCQueuePerformer alloc] initWithLabel:kSCManagedVideoScannerQueueLabel\n                                            qualityOfService:QOS_CLASS_UNSPECIFIED\n                                                   queueType:DISPATCH_QUEUE_SERIAL\n                                                     context:SCQueuePerformerContextCamera];\n        _activeSemaphore = dispatch_semaphore_create(0);\n        SCAssert(restCycle >= 0 && restCycle < 1, @\"rest cycle should be between 0 to 1\");\n        _maxFrameDefaultDuration = maxFrameDefaultDuration;\n        _maxFramePassiveDuration = maxFramePassiveDuration;\n        _restCycleOfBusyCycle = restCycle / (1 - restCycle); // Give CPU time to rest\n    }\n    return self;\n}\n#pragma mark - Public methods\n\n- (void)startScanAsynchronouslyWithScanConfiguration:(SCScanConfiguration *)configuration\n{\n    SCTraceStart();\n    [_performer perform:^{\n        _shouldEmitEvent = YES;\n        _completionHandler = nil;\n        _scanResultsHandler = configuration.scanResultsHandler;\n        _userSession = configuration.userSession;\n        _scanTimeout = kDefaultScanTimeout;\n        _maxFrameDuration = _maxFrameDefaultDuration;\n        _codeTypes = [self _scanCodeTypes];\n        _codeTypesOld = @[ @(SCCodeTypeSnapcode18x18Old), @(SCCodeTypeQRCode) ];\n\n        SCTraceStart();\n        // Set the scan start time properly, if we call startScan multiple times while it is active,\n        // This makes sure we can scan long enough.\n        _scanStartTime = CACurrentMediaTime();\n        // we are not active, need to send the semaphore to start the scan\n        if (!_active) {\n            _active = YES;\n\n            // Signal the semaphore that we can start scan!\n            dispatch_semaphore_signal(_activeSemaphore);\n        }\n    }];\n}\n\n- (void)stopScanAsynchronously\n{\n    SCTraceStart();\n    [_performer perform:^{\n        SCTraceStart();\n        if (_active) {\n            SCLogScanDebug(@\"VideoScanner:stopScanAsynchronously turn off from active\");\n            _active = NO;\n            _scanStartTime = 0;\n            _scanResultsHandler = nil;\n            _userSession = nil;\n        } else {\n            SCLogScanDebug(@\"VideoScanner:stopScanAsynchronously off already\");\n        }\n    }];\n}\n\n#pragma mark - Private Methods\n\n- (void)_handleSnapScanResult:(SCSnapScannedData *)scannedData\n{\n    if (scannedData.hasScannedData) {\n        if (scannedData.codeType == SCCodeTypeSnapcode18x18 || scannedData.codeType == SCCodeTypeSnapcodeBitmoji ||\n            scannedData.codeType == SCCodeTypeSnapcode18x18Old) {\n            NSString *data = [scannedData.rawData base64EncodedString];\n            NSString *version = [NSString sc_stringWithFormat:@\"%i\", scannedData.codeTypeMeta];\n            [[SCLogger sharedInstance] logEvent:@\"SNAPCODE_18x18_SCANNED_FROM_CAMERA\"\n                                     parameters:@{\n                                         @\"version\" : version\n                                     }\n                               secretParameters:@{\n                                   @\"data\" : data\n                               }];\n\n            if (_completionHandler != nil) {\n                runOnMainThreadAsynchronously(_completionHandler);\n                _completionHandler = nil;\n            }\n        } else if (scannedData.codeType == SCCodeTypeBarcode) {\n            if (!_userSession || !_userSession.featureSettingsManager.barCodeScanEnabled) {\n                return;\n            }\n            NSString *data = scannedData.data;\n            NSString *type = [SCSnapScannedData stringFromBarcodeType:scannedData.codeTypeMeta];\n            [[SCLogger sharedInstance] logEvent:@\"BARCODE_SCANNED_FROM_CAMERA\"\n                                     parameters:@{\n                                         @\"type\" : type\n                                     }\n                               secretParameters:@{\n                                   @\"data\" : data\n                               }];\n        } else if (scannedData.codeType == SCCodeTypeQRCode) {\n            if (!_userSession || !_userSession.featureSettingsManager.qrCodeScanEnabled) {\n                return;\n            }\n            NSURL *url = [NSURL URLWithString:scannedData.data];\n            [[SCLogger sharedInstance] logEvent:@\"QR_CODE_SCANNED_FROM_CAMERA\"\n                                     parameters:@{\n                                         @\"type\" : (url) ? @\"url\" : @\"other\"\n                                     }\n                               secretParameters:@{}];\n        }\n\n        if (_shouldEmitEvent) {\n            sc_managed_capturer_scan_results_handler_t scanResultsHandler = _scanResultsHandler;\n            runOnMainThreadAsynchronously(^{\n                if (scanResultsHandler != nil && scannedData) {\n                    SCMachineReadableCodeResult *result =\n                        [SCMachineReadableCodeResult machineReadableCodeResultWithScannedData:scannedData];\n                    scanResultsHandler(result);\n                }\n            });\n        }\n    }\n}\n\n- (NSArray *)_scanCodeTypes\n{\n    // Scan types are defined by codetypes. SnapScan will scan the frame based on codetype.\n    NSMutableArray *codeTypes = [[NSMutableArray alloc]\n        initWithObjects:@(SCCodeTypeSnapcode18x18), @(SCCodeTypeQRCode), @(SCCodeTypeSnapcodeBitmoji), nil];\n    if (SCSearchEnableBarcodeProductSearch()) {\n        [codeTypes addObject:@(SCCodeTypeBarcode)];\n    }\n    return [codeTypes copy];\n}\n\n#pragma mark - SCManagedVideoDataSourceListener\n\n- (void)managedVideoDataSource:(id<SCManagedVideoDataSource>)managedVideoDataSource\n         didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer\n                devicePosition:(SCManagedCaptureDevicePosition)devicePosition\n{\n    SCTraceStart();\n    _devicePosition = devicePosition;\n\n    if (!_active) {\n        SCLogScanDebug(@\"VideoScanner: Scanner is not active\");\n        return;\n    }\n    SCLogScanDebug(@\"VideoScanner: Scanner is active\");\n\n    // If we have the semaphore now, enqueue a new buffer, otherwise drop the buffer\n    if (dispatch_semaphore_wait(_activeSemaphore, DISPATCH_TIME_NOW) == 0) {\n        CFRetain(sampleBuffer);\n        NSTimeInterval startTime = CACurrentMediaTime();\n        [_performer perform:^{\n            SCTraceStart();\n            CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);\n            SCLogScanInfo(@\"VideoScanner: Scanner will scan a frame\");\n            SCSnapScannedData *scannedData;\n\n            SCLogScanInfo(@\"VideoScanner:Use new scanner without false alarm check\");\n            scannedData = [_snapScanner scanPixelBuffer:pixelBuffer forCodeTypes:_codeTypes];\n\n            if ([UIDevice shouldLogPerfEvents]) {\n                NSInteger loadingMs = (CACurrentMediaTime() - startTime) * 1000;\n                // Since there are too many unsuccessful scans, we will only log 1/10 of them for now.\n                if (scannedData.hasScannedData || (!scannedData.hasScannedData && arc4random() % 10 == 0)) {\n                    [[SCLogger sharedInstance] logEvent:@\"SCAN_SINGLE_FRAME\"\n                                             parameters:@{\n                                                 @\"time_span\" : @(loadingMs),\n                                                 @\"has_scanned_data\" : @(scannedData.hasScannedData),\n                                             }];\n                }\n            }\n\n            [self _handleSnapScanResult:scannedData];\n            // If it is not turned off, we will continue to scan if result is not presetn\n            if (_active) {\n                _active = !scannedData.hasScannedData;\n            }\n\n            // Clean up if result is reported for scan\n            if (!_active) {\n                _scanResultsHandler = nil;\n                _completionHandler = nil;\n            }\n\n            CFRelease(sampleBuffer);\n\n            NSTimeInterval currentTime = CACurrentMediaTime();\n            SCLogScanInfo(@\"VideoScanner:Scan time %f maxFrameDuration:%f timeout:%f\", currentTime - startTime,\n                          _maxFrameDuration, _scanTimeout);\n            // Haven't found the scanned data yet, haven't reached maximum scan timeout yet, haven't turned this off\n            // yet, ready for the next frame\n            if (_active && currentTime < _scanStartTime + _scanTimeout) {\n                // We've finished processing current sample buffer, ready for next one, but before that, we need to rest\n                // a bit (if possible)\n                if (currentTime - startTime >= _maxFrameDuration && _restCycleOfBusyCycle < FLT_MIN) {\n                    // If we already reached deadline (used too much time) and don't want to rest CPU, give the signal\n                    // now to grab the next frame\n                    SCLogScanInfo(@\"VideoScanner:Signal to get next frame for snapcode scanner\");\n                    dispatch_semaphore_signal(_activeSemaphore);\n                } else {\n                    NSTimeInterval afterTime = MAX((currentTime - startTime) * _restCycleOfBusyCycle,\n                                                   _maxFrameDuration - (currentTime - startTime));\n                    // If we need to wait more than 0 second, then do that, otherwise grab the next frame immediately\n                    if (afterTime > 0) {\n                        [_performer perform:^{\n                            SCLogScanInfo(\n                                @\"VideoScanner:Waited and now signaling to get next frame for snapcode scanner\");\n                            dispatch_semaphore_signal(_activeSemaphore);\n                        }\n                                      after:afterTime];\n                    } else {\n                        SCLogScanInfo(@\"VideoScanner:Now signaling to get next frame for snapcode scanner\");\n                        dispatch_semaphore_signal(_activeSemaphore);\n                    }\n                }\n            } else {\n                // We are not active, and not going to be active any more.\n                SCLogScanInfo(@\"VideoScanner:not active anymore\");\n                _active = NO;\n                _scanResultsHandler = nil;\n                _completionHandler = nil;\n            }\n        }];\n    }\n}\n\n#pragma mark - SCManagedDeviceCapacityAnalyzerListener\n\n- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer\n              didChangeAdjustingFocus:(BOOL)adjustingFocus\n{\n    [_performer perform:^{\n        _adjustingFocus = adjustingFocus;\n    }];\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedVideoStreamReporter.h",
    "content": "//\n//  SCManagedVideoStreamReporter.h\n//  Snapchat\n//\n//  Created by Liu Liu on 5/16/15.\n//  Copyright (c) 2015 Snapchat, Inc. All rights reserved.\n//\n\n#import <SCCameraFoundation/SCManagedVideoDataSourceListener.h>\n\n#import <Foundation/Foundation.h>\n\n@interface SCManagedVideoStreamReporter : NSObject <SCManagedVideoDataSourceListener>\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedVideoStreamReporter.m",
    "content": "//\n//  SCManagedVideoStreamReporter.m\n//  Snapchat\n//\n//  Created by Liu Liu on 5/16/15.\n//  Copyright (c) 2015 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCManagedVideoStreamReporter.h\"\n\n#import <SCFoundation/SCLog.h>\n#import <SCLogger/SCLogger.h>\n\nstatic NSTimeInterval const SCManagedVideoStreamReporterInterval = 10;\n\n@implementation SCManagedVideoStreamReporter {\n    NSUInteger _droppedSampleBuffers;\n    NSUInteger _outputSampleBuffers;\n    NSTimeInterval _lastReportTime;\n}\n\n- (instancetype)init\n{\n    self = [super init];\n    if (self) {\n        _lastReportTime = CACurrentMediaTime();\n    }\n    return self;\n}\n\n- (void)_reportIfNeeded\n{\n    NSTimeInterval currentTime = CACurrentMediaTime();\n    if (currentTime - _lastReportTime > SCManagedVideoStreamReporterInterval) {\n        SCLogGeneralInfo(@\"Time: (%.3f - %.3f], Video Streamer Dropped %tu, Output %tu\", _lastReportTime, currentTime,\n                         _droppedSampleBuffers, _outputSampleBuffers);\n        _droppedSampleBuffers = _outputSampleBuffers = 0;\n        _lastReportTime = currentTime;\n    }\n}\n\n- (void)managedVideoDataSource:(id<SCManagedVideoDataSource>)managedVideoDataSource\n         didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer\n                devicePosition:(SCManagedCaptureDevicePosition)devicePosition\n{\n    ++_outputSampleBuffers;\n    [self _reportIfNeeded];\n}\n\n- (void)managedVideoDataSource:(id<SCManagedVideoDataSource>)managedVideoDataSource\n           didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer\n                devicePosition:(SCManagedCaptureDevicePosition)devicePosition\n{\n    ++_droppedSampleBuffers;\n    [self _reportIfNeeded];\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedVideoStreamer.h",
    "content": "//\n//  SCManagedVideoStreamer.h\n//  Snapchat\n//\n//  Created by Liu Liu on 4/30/15.\n//  Copyright (c) 2015 Liu Liu. All rights reserved.\n//\n\n#import \"SCManagedVideoARDataSource.h\"\n\n#import <SCCameraFoundation/SCManagedVideoDataSource.h>\n\n#import <AVFoundation/AVFoundation.h>\n#import <Foundation/Foundation.h>\n\n@class ARSession;\n\n/**\n * SCManagedVideoStreamer uses the current AVCaptureSession to create\n * and publish video output frames. SCManagedVideoStreamer also conforms\n * to SCManagedVideoDataSource allowing chained consumption of video frames.\n */\n@interface SCManagedVideoStreamer : NSObject <SCManagedVideoDataSource, SCManagedVideoARDataSource>\n\n- (instancetype)initWithSession:(AVCaptureSession *)session\n                 devicePosition:(SCManagedCaptureDevicePosition)devicePosition;\n\n- (instancetype)initWithSession:(AVCaptureSession *)session\n                      arSession:(ARSession *)arSession\n                 devicePosition:(SCManagedCaptureDevicePosition)devicePosition NS_AVAILABLE_IOS(11_0);\n\n- (void)setupWithSession:(AVCaptureSession *)session devicePosition:(SCManagedCaptureDevicePosition)devicePosition;\n\n- (void)setupWithARSession:(ARSession *)arSession NS_AVAILABLE_IOS(11_0);\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCManagedVideoStreamer.m",
    "content": "//\n//  SCManagedVideoStreamer.m\n//  Snapchat\n//\n//  Created by Liu Liu on 4/30/15.\n//  Copyright (c) 2015 Liu Liu. All rights reserved.\n//\n\n#import \"SCManagedVideoStreamer.h\"\n\n#import \"ARConfiguration+SCConfiguration.h\"\n#import \"SCCameraTweaks.h\"\n#import \"SCCapturerDefines.h\"\n#import \"SCLogger+Camera.h\"\n#import \"SCManagedCapturePreviewLayerController.h\"\n#import \"SCMetalUtils.h\"\n#import \"SCProcessingPipeline.h\"\n#import \"SCProcessingPipelineBuilder.h\"\n\n#import <SCCameraFoundation/SCManagedVideoDataSourceListenerAnnouncer.h>\n#import <SCFoundation/NSString+SCFormat.h>\n#import <SCFoundation/SCLog.h>\n#import <SCFoundation/SCQueuePerformer.h>\n#import <SCFoundation/SCTrace.h>\n#import <SCLogger/SCCameraMetrics.h>\n\n#import <Looksery/Looksery.h>\n\n#import <libkern/OSAtomic.h>\n#import <stdatomic.h>\n\n@import ARKit;\n@import AVFoundation;\n\n#define SCLogVideoStreamerInfo(fmt, ...) SCLogCoreCameraInfo(@\"[SCManagedVideoStreamer] \" fmt, ##__VA_ARGS__)\n#define SCLogVideoStreamerWarning(fmt, ...) SCLogCoreCameraWarning(@\"[SCManagedVideoStreamer] \" fmt, ##__VA_ARGS__)\n#define SCLogVideoStreamerError(fmt, ...) SCLogCoreCameraError(@\"[SCManagedVideoStreamer] \" fmt, ##__VA_ARGS__)\n\nstatic NSInteger const kSCCaptureFrameRate = 30;\nstatic CGFloat const kSCLogInterval = 3.0;\nstatic char *const kSCManagedVideoStreamerQueueLabel = \"com.snapchat.managed-video-streamer\";\nstatic char *const kSCManagedVideoStreamerCallbackQueueLabel = \"com.snapchat.managed-video-streamer.dequeue\";\nstatic NSTimeInterval const kSCManagedVideoStreamerMaxAllowedLatency = 1; // Drop the frame if it is 1 second late.\n\nstatic NSTimeInterval const kSCManagedVideoStreamerStalledDisplay =\n    5; // If the frame is not updated for 5 seconds, it is considered to be stalled.\n\nstatic NSTimeInterval const kSCManagedVideoStreamerARSessionFramerateCap =\n    1.0 / (kSCCaptureFrameRate + 1); // Restrict ARSession to 30fps\nstatic int32_t const kSCManagedVideoStreamerMaxProcessingBuffers = 15;\n\n@interface SCManagedVideoStreamer () <AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureDepthDataOutputDelegate,\n                                      AVCaptureDataOutputSynchronizerDelegate, ARSessionDelegate>\n\n@property (nonatomic, strong) AVCaptureSession *captureSession;\n\n@end\n\n@implementation SCManagedVideoStreamer {\n    AVCaptureVideoDataOutput *_videoDataOutput;\n    AVCaptureDepthDataOutput *_depthDataOutput NS_AVAILABLE_IOS(11_0);\n    AVCaptureDataOutputSynchronizer *_dataOutputSynchronizer NS_AVAILABLE_IOS(11_0);\n    BOOL _performingConfigurations;\n    SCManagedCaptureDevicePosition _devicePosition;\n    BOOL _videoStabilizationEnabledIfSupported;\n    SCManagedVideoDataSourceListenerAnnouncer *_announcer;\n\n    BOOL _sampleBufferDisplayEnabled;\n    id<SCManagedSampleBufferDisplayController> _sampleBufferDisplayController;\n    dispatch_block_t _flushOutdatedPreviewBlock;\n    NSMutableArray<NSArray *> *_waitUntilSampleBufferDisplayedBlocks;\n    SCProcessingPipeline *_processingPipeline;\n\n    NSTimeInterval _lastDisplayedFrameTimestamp;\n#ifdef SC_USE_ARKIT_FACE\n    NSTimeInterval _lastDisplayedDepthFrameTimestamp;\n#endif\n\n    BOOL _depthCaptureEnabled;\n    CGPoint _portraitModePointOfInterest;\n\n    // For sticky video tweaks\n    BOOL _keepLateFrames;\n    SCQueuePerformer *_callbackPerformer;\n    atomic_int _processingBuffersCount;\n}\n\n@synthesize isStreaming = _isStreaming;\n@synthesize performer = _performer;\n@synthesize currentFrame = _currentFrame;\n@synthesize fieldOfView = _fieldOfView;\n#ifdef SC_USE_ARKIT_FACE\n@synthesize lastDepthData = _lastDepthData;\n#endif\n@synthesize videoOrientation = _videoOrientation;\n\n- (instancetype)initWithSession:(AVCaptureSession *)session\n                 devicePosition:(SCManagedCaptureDevicePosition)devicePosition\n{\n    SCTraceStart();\n    self = [super init];\n    if (self) {\n        _sampleBufferDisplayEnabled = YES;\n        _announcer = [[SCManagedVideoDataSourceListenerAnnouncer alloc] init];\n        // We discard frames to support lenses in real time\n        _keepLateFrames = NO;\n        _performer = [[SCQueuePerformer alloc] initWithLabel:kSCManagedVideoStreamerQueueLabel\n                                            qualityOfService:QOS_CLASS_USER_INTERACTIVE\n                                                   queueType:DISPATCH_QUEUE_SERIAL\n                                                     context:SCQueuePerformerContextCamera];\n\n        _videoOrientation = AVCaptureVideoOrientationLandscapeRight;\n\n        [self setupWithSession:session devicePosition:devicePosition];\n        SCLogVideoStreamerInfo(@\"init with position:%lu\", (unsigned long)devicePosition);\n    }\n    return self;\n}\n\n- (instancetype)initWithSession:(AVCaptureSession *)session\n                      arSession:(ARSession *)arSession\n                 devicePosition:(SCManagedCaptureDevicePosition)devicePosition NS_AVAILABLE_IOS(11_0)\n{\n    self = [self initWithSession:session devicePosition:devicePosition];\n    if (self) {\n        [self setupWithARSession:arSession];\n        self.currentFrame = nil;\n#ifdef SC_USE_ARKIT_FACE\n        self.lastDepthData = nil;\n#endif\n    }\n    return self;\n}\n\n- (AVCaptureVideoDataOutput *)_newVideoDataOutput\n{\n    AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init];\n    // All inbound frames are going to be the native format of the camera avoid\n    // any need for transcoding.\n    output.videoSettings =\n        @{(NSString *) kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) };\n    return output;\n}\n\n- (void)setupWithSession:(AVCaptureSession *)session devicePosition:(SCManagedCaptureDevicePosition)devicePosition\n{\n    [self stopStreaming];\n    self.captureSession = session;\n    _devicePosition = devicePosition;\n\n    _videoDataOutput = [self _newVideoDataOutput];\n    if (SCDeviceSupportsMetal()) {\n        // We default to start the streaming if the Metal is supported at startup time.\n        _isStreaming = YES;\n        // Set the sample buffer delegate before starting it.\n        [_videoDataOutput setSampleBufferDelegate:self queue:[self callbackPerformer].queue];\n    }\n\n    if ([session canAddOutput:_videoDataOutput]) {\n        [session addOutput:_videoDataOutput];\n        [self _enableVideoMirrorForDevicePosition:devicePosition];\n    }\n\n    if (SCCameraTweaksEnablePortraitModeButton()) {\n        if (@available(iOS 11.0, *)) {\n            _depthDataOutput = [[AVCaptureDepthDataOutput alloc] init];\n            [[_depthDataOutput connectionWithMediaType:AVMediaTypeDepthData] setEnabled:NO];\n            if ([session canAddOutput:_depthDataOutput]) {\n                [session addOutput:_depthDataOutput];\n                [_depthDataOutput setDelegate:self callbackQueue:_performer.queue];\n            }\n            _depthCaptureEnabled = NO;\n        }\n        _portraitModePointOfInterest = CGPointMake(0.5, 0.5);\n    }\n\n    [self setVideoStabilizationEnabledIfSupported:YES];\n}\n\n- (void)setupWithARSession:(ARSession *)arSession NS_AVAILABLE_IOS(11_0)\n{\n    arSession.delegateQueue = _performer.queue;\n    arSession.delegate = self;\n}\n\n- (void)addSampleBufferDisplayController:(id<SCManagedSampleBufferDisplayController>)sampleBufferDisplayController\n{\n    [_performer perform:^{\n        _sampleBufferDisplayController = sampleBufferDisplayController;\n        SCLogVideoStreamerInfo(@\"add sampleBufferDisplayController:%@\", _sampleBufferDisplayController);\n    }];\n}\n\n- (void)setSampleBufferDisplayEnabled:(BOOL)sampleBufferDisplayEnabled\n{\n    [_performer perform:^{\n        _sampleBufferDisplayEnabled = sampleBufferDisplayEnabled;\n        SCLogVideoStreamerInfo(@\"sampleBufferDisplayEnabled set to:%d\", _sampleBufferDisplayEnabled);\n    }];\n}\n\n- (void)waitUntilSampleBufferDisplayed:(dispatch_queue_t)queue completionHandler:(dispatch_block_t)completionHandler\n{\n    SCAssert(queue, @\"callback queue must be provided\");\n    SCAssert(completionHandler, @\"completion handler must be provided\");\n    SCLogVideoStreamerInfo(@\"waitUntilSampleBufferDisplayed queue:%@ completionHandler:%p isStreaming:%d\", queue,\n                           completionHandler, _isStreaming);\n    if (_isStreaming) {\n        [_performer perform:^{\n            if (!_waitUntilSampleBufferDisplayedBlocks) {\n                _waitUntilSampleBufferDisplayedBlocks = [NSMutableArray array];\n            }\n            [_waitUntilSampleBufferDisplayedBlocks addObject:@[ queue, completionHandler ]];\n            SCLogVideoStreamerInfo(@\"waitUntilSampleBufferDisplayed add block:%p\", completionHandler);\n        }];\n    } else {\n        dispatch_async(queue, completionHandler);\n    }\n}\n\n- (void)startStreaming\n{\n    SCTraceStart();\n    SCLogVideoStreamerInfo(@\"start streaming. _isStreaming:%d\", _isStreaming);\n    if (!_isStreaming) {\n        _isStreaming = YES;\n        [self _cancelFlushOutdatedPreview];\n        if (@available(ios 11.0, *)) {\n            if (_depthCaptureEnabled) {\n                [[_depthDataOutput connectionWithMediaType:AVMediaTypeDepthData] setEnabled:YES];\n            }\n        }\n        [_videoDataOutput setSampleBufferDelegate:self queue:[self callbackPerformer].queue];\n    }\n}\n\n- (void)setAsOutput:(AVCaptureSession *)session devicePosition:(SCManagedCaptureDevicePosition)devicePosition\n{\n    SCTraceStart();\n    if ([session canAddOutput:_videoDataOutput]) {\n        SCLogVideoStreamerError(@\"add videoDataOutput:%@\", _videoDataOutput);\n        [session addOutput:_videoDataOutput];\n        [self _enableVideoMirrorForDevicePosition:devicePosition];\n    } else {\n        SCLogVideoStreamerError(@\"cannot add videoDataOutput:%@ to session:%@\", _videoDataOutput, session);\n    }\n    [self _enableVideoStabilizationIfSupported];\n}\n\n- (void)removeAsOutput:(AVCaptureSession *)session\n{\n    SCTraceStart();\n    SCLogVideoStreamerInfo(@\"remove videoDataOutput:%@ from session:%@\", _videoDataOutput, session);\n    [session removeOutput:_videoDataOutput];\n}\n\n- (void)_cancelFlushOutdatedPreview\n{\n    SCLogVideoStreamerInfo(@\"cancel flush outdated preview:%p\", _flushOutdatedPreviewBlock);\n    if (_flushOutdatedPreviewBlock) {\n        dispatch_block_cancel(_flushOutdatedPreviewBlock);\n        _flushOutdatedPreviewBlock = nil;\n    }\n}\n\n- (SCQueuePerformer *)callbackPerformer\n{\n    // If sticky video tweak is on, use a separated performer queue\n    if (_keepLateFrames) {\n        if (!_callbackPerformer) {\n            _callbackPerformer = [[SCQueuePerformer alloc] initWithLabel:kSCManagedVideoStreamerCallbackQueueLabel\n                                                        qualityOfService:QOS_CLASS_USER_INTERACTIVE\n                                                               queueType:DISPATCH_QUEUE_SERIAL\n                                                                 context:SCQueuePerformerContextCamera];\n        }\n        return _callbackPerformer;\n    }\n    return _performer;\n}\n\n- (void)pauseStreaming\n{\n    SCTraceStart();\n    SCLogVideoStreamerInfo(@\"pauseStreaming isStreaming:%d\", _isStreaming);\n    if (_isStreaming) {\n        _isStreaming = NO;\n        [_videoDataOutput setSampleBufferDelegate:nil queue:NULL];\n        if (@available(ios 11.0, *)) {\n            if (_depthCaptureEnabled) {\n                [[_depthDataOutput connectionWithMediaType:AVMediaTypeDepthData] setEnabled:NO];\n            }\n        }\n        @weakify(self);\n        _flushOutdatedPreviewBlock = dispatch_block_create(0, ^{\n            SCLogVideoStreamerInfo(@\"execute flushOutdatedPreviewBlock\");\n            @strongify(self);\n            SC_GUARD_ELSE_RETURN(self);\n            [self->_sampleBufferDisplayController flushOutdatedPreview];\n        });\n        [_performer perform:_flushOutdatedPreviewBlock\n                      after:SCCameraTweaksEnableKeepLastFrameOnCamera() ? kSCManagedVideoStreamerStalledDisplay : 0];\n        [_performer perform:^{\n            [self _performCompletionHandlersForWaitUntilSampleBufferDisplayed];\n        }];\n    }\n}\n\n- (void)stopStreaming\n{\n    SCTraceStart();\n    SCLogVideoStreamerInfo(@\"stopStreaming isStreaming:%d\", _isStreaming);\n    if (_isStreaming) {\n        _isStreaming = NO;\n        [_videoDataOutput setSampleBufferDelegate:nil queue:NULL];\n        if (@available(ios 11.0, *)) {\n            if (_depthCaptureEnabled) {\n                [[_depthDataOutput connectionWithMediaType:AVMediaTypeDepthData] setEnabled:NO];\n            }\n        }\n    }\n    [self _cancelFlushOutdatedPreview];\n    [_performer perform:^{\n        SCLogVideoStreamerInfo(@\"stopStreaming in perfome queue\");\n        [_sampleBufferDisplayController flushOutdatedPreview];\n        [self _performCompletionHandlersForWaitUntilSampleBufferDisplayed];\n    }];\n}\n\n- (void)beginConfiguration\n{\n    SCLogVideoStreamerInfo(@\"enter beginConfiguration\");\n    [_performer perform:^{\n        SCLogVideoStreamerInfo(@\"performingConfigurations set to YES\");\n        _performingConfigurations = YES;\n    }];\n}\n\n- (void)setDevicePosition:(SCManagedCaptureDevicePosition)devicePosition\n{\n    SCLogVideoStreamerInfo(@\"setDevicePosition with newPosition:%lu\", (unsigned long)devicePosition);\n    [self _enableVideoMirrorForDevicePosition:devicePosition];\n    [self _enableVideoStabilizationIfSupported];\n    [_performer perform:^{\n        SCLogVideoStreamerInfo(@\"setDevicePosition in perform queue oldPosition:%lu newPosition:%lu\",\n                               (unsigned long)_devicePosition, (unsigned long)devicePosition);\n        if (_devicePosition != devicePosition) {\n            _devicePosition = devicePosition;\n        }\n    }];\n}\n\n- (void)setVideoOrientation:(AVCaptureVideoOrientation)videoOrientation\n{\n    SCTraceStart();\n    // It is not neccessary call these changes on private queue, because is is just only data output configuration.\n    // It should be called from manged capturer queue to prevent lock capture session in two different(private and\n    // managed capturer) queues that will cause the deadlock.\n    SCLogVideoStreamerInfo(@\"setVideoOrientation oldOrientation:%lu newOrientation:%lu\",\n                           (unsigned long)_videoOrientation, (unsigned long)videoOrientation);\n    _videoOrientation = videoOrientation;\n    AVCaptureConnection *connection = [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo];\n    connection.videoOrientation = _videoOrientation;\n}\n\n- (void)setKeepLateFrames:(BOOL)keepLateFrames\n{\n    SCTraceStart();\n    [_performer perform:^{\n        SCTraceStart();\n        if (keepLateFrames != _keepLateFrames) {\n            _keepLateFrames = keepLateFrames;\n            // Get and set corresponding queue base on keepLateFrames.\n            // We don't use AVCaptureVideoDataOutput.alwaysDiscardsLateVideo anymore, because it will potentially\n            // result in lenses regression, and we could use all 15 sample buffers by adding a separated calllback\n            // queue.\n            [_videoDataOutput setSampleBufferDelegate:self queue:[self callbackPerformer].queue];\n            SCLogVideoStreamerInfo(@\"keepLateFrames was set to:%d\", keepLateFrames);\n        }\n    }];\n}\n\n- (void)setDepthCaptureEnabled:(BOOL)enabled NS_AVAILABLE_IOS(11_0)\n{\n    _depthCaptureEnabled = enabled;\n    [[_depthDataOutput connectionWithMediaType:AVMediaTypeDepthData] setEnabled:enabled];\n    if (enabled) {\n        _dataOutputSynchronizer =\n            [[AVCaptureDataOutputSynchronizer alloc] initWithDataOutputs:@[ _videoDataOutput, _depthDataOutput ]];\n        [_dataOutputSynchronizer setDelegate:self queue:_performer.queue];\n    } else {\n        _dataOutputSynchronizer = nil;\n    }\n}\n\n- (void)setPortraitModePointOfInterest:(CGPoint)pointOfInterest\n{\n    _portraitModePointOfInterest = pointOfInterest;\n}\n\n- (BOOL)getKeepLateFrames\n{\n    return _keepLateFrames;\n}\n\n- (void)commitConfiguration\n{\n    SCLogVideoStreamerInfo(@\"enter commitConfiguration\");\n    [_performer perform:^{\n        SCLogVideoStreamerInfo(@\"performingConfigurations set to NO\");\n        _performingConfigurations = NO;\n    }];\n}\n\n- (void)addListener:(id<SCManagedVideoDataSourceListener>)listener\n{\n    SCTraceStart();\n    SCLogVideoStreamerInfo(@\"add listener:%@\", listener);\n    [_announcer addListener:listener];\n}\n\n- (void)removeListener:(id<SCManagedVideoDataSourceListener>)listener\n{\n    SCTraceStart();\n    SCLogVideoStreamerInfo(@\"remove listener:%@\", listener);\n    [_announcer removeListener:listener];\n}\n\n- (void)addProcessingPipeline:(SCProcessingPipeline *)processingPipeline\n{\n    SCLogVideoStreamerInfo(@\"enter addProcessingPipeline:%@\", processingPipeline);\n    [_performer perform:^{\n        SCLogVideoStreamerInfo(@\"processingPipeline set to %@\", processingPipeline);\n        _processingPipeline = processingPipeline;\n    }];\n}\n\n- (void)removeProcessingPipeline\n{\n    SCLogVideoStreamerInfo(@\"enter removeProcessingPipeline\");\n    [_performer perform:^{\n        SCLogVideoStreamerInfo(@\"processingPipeline set to nil\");\n        _processingPipeline = nil;\n    }];\n}\n\n- (BOOL)isVideoMirrored\n{\n    SCTraceStart();\n    AVCaptureConnection *connection = [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo];\n    return connection.isVideoMirrored;\n}\n\n#pragma mark - Common Sample Buffer Handling\n\n- (void)didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer\n{\n    return [self didOutputSampleBuffer:sampleBuffer depthData:nil];\n}\n\n- (void)didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer depthData:(CVPixelBufferRef)depthDataMap\n{\n    // Don't send the sample buffer if we are perform configurations\n    if (_performingConfigurations) {\n        SCLogVideoStreamerError(@\"didOutputSampleBuffer return because performingConfigurations is YES\");\n        return;\n    }\n    SC_GUARD_ELSE_RETURN([_performer isCurrentPerformer]);\n\n    // We can't set alwaysDiscardsLateVideoFrames to YES when lens is activated because it will cause camera freezing.\n    // When alwaysDiscardsLateVideoFrames is set to NO, the late frames will not be dropped until it reach 15 frames,\n    // so we should simulate the dropping behaviour as AVFoundation do.\n    NSTimeInterval presentationTime = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer));\n    _lastDisplayedFrameTimestamp = presentationTime;\n    NSTimeInterval frameLatency = CACurrentMediaTime() - presentationTime;\n    // Log interval definied in macro LOG_INTERVAL, now is 3.0s\n    BOOL shouldLog =\n        (long)(CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) * kSCCaptureFrameRate) %\n            ((long)(kSCCaptureFrameRate * kSCLogInterval)) ==\n        0;\n    if (shouldLog) {\n        SCLogVideoStreamerInfo(@\"didOutputSampleBuffer:%p\", sampleBuffer);\n    }\n    if (_processingPipeline) {\n        RenderData renderData = {\n            .sampleBuffer = sampleBuffer,\n            .depthDataMap = depthDataMap,\n            .depthBlurPointOfInterest =\n                SCCameraTweaksEnablePortraitModeAutofocus() || SCCameraTweaksEnablePortraitModeTapToFocus()\n                    ? &_portraitModePointOfInterest\n                    : nil,\n        };\n        // Ensure we are doing all render operations (i.e. accessing textures) on performer to prevent race condition\n        SCAssertPerformer(_performer);\n        sampleBuffer = [_processingPipeline render:renderData];\n\n        if (shouldLog) {\n            SCLogVideoStreamerInfo(@\"rendered sampleBuffer:%p in processingPipeline:%@\", sampleBuffer,\n                                   _processingPipeline);\n        }\n    }\n\n    if (sampleBuffer && _sampleBufferDisplayEnabled) {\n        // Send the buffer only if it is valid, set it to be displayed immediately (See the enqueueSampleBuffer method\n        // header, need to get attachments array and set the dictionary).\n        CFArrayRef attachmentsArray = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, YES);\n        if (!attachmentsArray) {\n            SCLogVideoStreamerError(@\"Error getting attachment array for CMSampleBuffer\");\n        } else if (CFArrayGetCount(attachmentsArray) > 0) {\n            CFMutableDictionaryRef attachment = (CFMutableDictionaryRef)CFArrayGetValueAtIndex(attachmentsArray, 0);\n            CFDictionarySetValue(attachment, kCMSampleAttachmentKey_DisplayImmediately, kCFBooleanTrue);\n        }\n        // Warn if frame that went through is not most recent enough.\n        if (frameLatency >= kSCManagedVideoStreamerMaxAllowedLatency) {\n            SCLogVideoStreamerWarning(\n                @\"The sample buffer we received is too late, why? presentationTime:%lf frameLatency:%f\",\n                presentationTime, frameLatency);\n        }\n        [_sampleBufferDisplayController enqueueSampleBuffer:sampleBuffer];\n        if (shouldLog) {\n            SCLogVideoStreamerInfo(@\"displayed sampleBuffer:%p in Metal\", sampleBuffer);\n        }\n\n        [self _performCompletionHandlersForWaitUntilSampleBufferDisplayed];\n    }\n\n    if (shouldLog) {\n        SCLogVideoStreamerInfo(@\"begin annoucing sampleBuffer:%p of devicePosition:%lu\", sampleBuffer,\n                               (unsigned long)_devicePosition);\n    }\n    [_announcer managedVideoDataSource:self didOutputSampleBuffer:sampleBuffer devicePosition:_devicePosition];\n    if (shouldLog) {\n        SCLogVideoStreamerInfo(@\"end annoucing sampleBuffer:%p\", sampleBuffer);\n    }\n}\n\n- (void)didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer\n{\n    if (_performingConfigurations) {\n        return;\n    }\n    SC_GUARD_ELSE_RETURN([_performer isCurrentPerformer]);\n    NSTimeInterval currentProcessingTime = CACurrentMediaTime();\n    NSTimeInterval currentSampleTime = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer));\n    // Only logging it when sticky tweak is on, which means sticky time is too long, and AVFoundation have to drop the\n    // sampleBuffer\n    if (_keepLateFrames) {\n        SCLogVideoStreamerInfo(@\"didDropSampleBuffer:%p timestamp:%f latency:%f\", sampleBuffer, currentProcessingTime,\n                               currentSampleTime);\n    }\n    [_announcer managedVideoDataSource:self didDropSampleBuffer:sampleBuffer devicePosition:_devicePosition];\n}\n\n#pragma mark - AVCaptureVideoDataOutputSampleBufferDelegate\n\n- (void)captureOutput:(AVCaptureOutput *)captureOutput\ndidOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer\n       fromConnection:(AVCaptureConnection *)connection NS_AVAILABLE_IOS(11_0)\n{\n    // Sticky video tweak is off, i.e. lenses is on,\n    // we use same queue for callback and processing, and let AVFoundation decide which frame should be dropped\n    if (!_keepLateFrames) {\n        [self didOutputSampleBuffer:sampleBuffer];\n    }\n    // Sticky video tweak is on\n    else {\n        if ([_performer isCurrentPerformer]) {\n            // Note: there might be one frame callbacked in processing queue when switching callback queue,\n            // it should be fine. But if following log appears too much, it is not our design.\n            SCLogVideoStreamerWarning(@\"The callback queue should be a separated queue when sticky tweak is on\");\n        }\n        // TODO: In sticky video v2, we should consider check free memory\n        if (_processingBuffersCount >= kSCManagedVideoStreamerMaxProcessingBuffers - 1) {\n            SCLogVideoStreamerWarning(@\"processingBuffersCount reached to the max. current count:%d\",\n                                      _processingBuffersCount);\n            [self didDropSampleBuffer:sampleBuffer];\n            return;\n        }\n        atomic_fetch_add(&_processingBuffersCount, 1);\n        CFRetain(sampleBuffer);\n        // _performer should always be the processing queue\n        [_performer perform:^{\n            [self didOutputSampleBuffer:sampleBuffer];\n            CFRelease(sampleBuffer);\n            atomic_fetch_sub(&_processingBuffersCount, 1);\n        }];\n    }\n}\n\n- (void)captureOutput:(AVCaptureOutput *)captureOutput\n  didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer\n       fromConnection:(AVCaptureConnection *)connection\n{\n    [self didDropSampleBuffer:sampleBuffer];\n}\n\n#pragma mark - AVCaptureDataOutputSynchronizer (Video + Depth)\n\n- (void)dataOutputSynchronizer:(AVCaptureDataOutputSynchronizer *)synchronizer\n    didOutputSynchronizedDataCollection:(AVCaptureSynchronizedDataCollection *)synchronizedDataCollection\n    NS_AVAILABLE_IOS(11_0)\n{\n    AVCaptureSynchronizedDepthData *syncedDepthData = (AVCaptureSynchronizedDepthData *)[synchronizedDataCollection\n        synchronizedDataForCaptureOutput:_depthDataOutput];\n    AVDepthData *depthData = nil;\n    if (syncedDepthData && !syncedDepthData.depthDataWasDropped) {\n        depthData = syncedDepthData.depthData;\n    }\n\n    AVCaptureSynchronizedSampleBufferData *syncedVideoData =\n        (AVCaptureSynchronizedSampleBufferData *)[synchronizedDataCollection\n            synchronizedDataForCaptureOutput:_videoDataOutput];\n    if (syncedVideoData && !syncedVideoData.sampleBufferWasDropped) {\n        CMSampleBufferRef videoSampleBuffer = syncedVideoData.sampleBuffer;\n        [self didOutputSampleBuffer:videoSampleBuffer depthData:depthData ? depthData.depthDataMap : nil];\n    }\n}\n\n#pragma mark - ARSessionDelegate\n\n- (void)session:(ARSession *)session cameraDidChangeTrackingState:(ARCamera *)camera NS_AVAILABLE_IOS(11_0)\n{\n    NSString *state = nil;\n    NSString *reason = nil;\n    switch (camera.trackingState) {\n    case ARTrackingStateNormal:\n        state = @\"Normal\";\n        break;\n    case ARTrackingStateLimited:\n        state = @\"Limited\";\n        break;\n    case ARTrackingStateNotAvailable:\n        state = @\"Not Available\";\n        break;\n    }\n    switch (camera.trackingStateReason) {\n    case ARTrackingStateReasonNone:\n        reason = @\"None\";\n        break;\n    case ARTrackingStateReasonInitializing:\n        reason = @\"Initializing\";\n        break;\n    case ARTrackingStateReasonExcessiveMotion:\n        reason = @\"Excessive Motion\";\n        break;\n    case ARTrackingStateReasonInsufficientFeatures:\n        reason = @\"Insufficient Features\";\n        break;\n#if SC_AT_LEAST_SDK_11_3\n    case ARTrackingStateReasonRelocalizing:\n        reason = @\"Relocalizing\";\n        break;\n#endif\n    }\n    SCLogVideoStreamerInfo(@\"ARKit changed tracking state - %@ (reason: %@)\", state, reason);\n}\n\n- (void)session:(ARSession *)session didUpdateFrame:(ARFrame *)frame NS_AVAILABLE_IOS(11_0)\n{\n#ifdef SC_USE_ARKIT_FACE\n    // This is extremely weird, but LOOK-10251 indicates that despite the class having it defined, on some specific\n    // devices there are ARFrame instances that don't respond to `capturedDepthData`.\n    // (note: this was discovered to be due to some people staying on iOS 11 betas).\n    AVDepthData *depth = nil;\n    if ([frame respondsToSelector:@selector(capturedDepthData)]) {\n        depth = frame.capturedDepthData;\n    }\n#endif\n\n    CGFloat timeSince = frame.timestamp - _lastDisplayedFrameTimestamp;\n    // Don't deliver more than 30 frames per sec\n    BOOL framerateMinimumElapsed = timeSince >= kSCManagedVideoStreamerARSessionFramerateCap;\n\n#ifdef SC_USE_ARKIT_FACE\n    if (depth) {\n        CGFloat timeSince = frame.timestamp - _lastDisplayedDepthFrameTimestamp;\n        framerateMinimumElapsed |= timeSince >= kSCManagedVideoStreamerARSessionFramerateCap;\n    }\n\n#endif\n\n    SC_GUARD_ELSE_RETURN(framerateMinimumElapsed);\n\n#ifdef SC_USE_ARKIT_FACE\n    if (depth) {\n        self.lastDepthData = depth;\n        _lastDisplayedDepthFrameTimestamp = frame.timestamp;\n    }\n#endif\n\n    // Make sure that current frame is no longer being used, otherwise drop current frame.\n    SC_GUARD_ELSE_RETURN(self.currentFrame == nil);\n\n    CVPixelBufferRef pixelBuffer = frame.capturedImage;\n    CVPixelBufferLockBaseAddress(pixelBuffer, 0);\n    CMTime time = CMTimeMakeWithSeconds(frame.timestamp, 1000000);\n    CMSampleTimingInfo timing = {kCMTimeInvalid, time, kCMTimeInvalid};\n\n    CMVideoFormatDescriptionRef videoInfo;\n    CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, &videoInfo);\n\n    CMSampleBufferRef buffer;\n    CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, YES, nil, nil, videoInfo, &timing, &buffer);\n    CFRelease(videoInfo);\n    CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);\n\n    self.currentFrame = frame;\n    [self didOutputSampleBuffer:buffer];\n    [self _updateFieldOfViewWithARFrame:frame];\n\n    CFRelease(buffer);\n}\n\n- (void)session:(ARSession *)session didAddAnchors:(NSArray<ARAnchor *> *)anchors NS_AVAILABLE_IOS(11_0)\n{\n    for (ARAnchor *anchor in anchors) {\n        if ([anchor isKindOfClass:[ARPlaneAnchor class]]) {\n            SCLogVideoStreamerInfo(@\"ARKit added plane anchor\");\n            return;\n        }\n    }\n}\n\n- (void)session:(ARSession *)session didFailWithError:(NSError *)error NS_AVAILABLE_IOS(11_0)\n{\n    SCLogVideoStreamerError(@\"ARKit session failed with error: %@. Resetting\", error);\n    [session runWithConfiguration:[ARConfiguration sc_configurationForDevicePosition:_devicePosition]];\n}\n\n- (void)sessionWasInterrupted:(ARSession *)session NS_AVAILABLE_IOS(11_0)\n{\n    SCLogVideoStreamerWarning(@\"ARKit session interrupted\");\n}\n\n- (void)sessionInterruptionEnded:(ARSession *)session NS_AVAILABLE_IOS(11_0)\n{\n    SCLogVideoStreamerInfo(@\"ARKit interruption ended\");\n}\n\n#pragma mark - Private methods\n\n- (void)_performCompletionHandlersForWaitUntilSampleBufferDisplayed\n{\n    for (NSArray *completion in _waitUntilSampleBufferDisplayedBlocks) {\n        // Call the completion handlers.\n        dispatch_async(completion[0], completion[1]);\n    }\n    [_waitUntilSampleBufferDisplayedBlocks removeAllObjects];\n}\n\n// This is the magic that ensures the VideoDataOutput will have the correct\n// orientation.\n- (void)_enableVideoMirrorForDevicePosition:(SCManagedCaptureDevicePosition)devicePosition\n{\n    SCLogVideoStreamerInfo(@\"enable video mirror for device position:%lu\", (unsigned long)devicePosition);\n    AVCaptureConnection *connection = [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo];\n    connection.videoOrientation = _videoOrientation;\n    if (devicePosition == SCManagedCaptureDevicePositionFront) {\n        connection.videoMirrored = YES;\n    }\n}\n\n- (void)_enableVideoStabilizationIfSupported\n{\n    SCTraceStart();\n    if (!SCCameraTweaksEnableVideoStabilization()) {\n        SCLogVideoStreamerWarning(@\"SCCameraTweaksEnableVideoStabilization is NO, won't enable video stabilization\");\n        return;\n    }\n\n    AVCaptureConnection *videoConnection = [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo];\n    if (!videoConnection) {\n        SCLogVideoStreamerError(@\"cannot get videoConnection from videoDataOutput:%@\", videoConnection);\n        return;\n    }\n    // Set the video stabilization mode to auto. Default is off.\n    if ([videoConnection isVideoStabilizationSupported]) {\n        videoConnection.preferredVideoStabilizationMode = _videoStabilizationEnabledIfSupported\n                                                              ? AVCaptureVideoStabilizationModeStandard\n                                                              : AVCaptureVideoStabilizationModeOff;\n        NSDictionary *params = @{ @\"iOS8_Mode\" : @(videoConnection.activeVideoStabilizationMode) };\n        [[SCLogger sharedInstance] logEvent:@\"VIDEO_STABILIZATION_MODE\" parameters:params];\n        SCLogVideoStreamerInfo(@\"set video stabilization mode:%ld to videoConnection:%@\",\n                               (long)videoConnection.preferredVideoStabilizationMode, videoConnection);\n    } else {\n        SCLogVideoStreamerInfo(@\"video stabilization isn't supported on videoConnection:%@\", videoConnection);\n    }\n}\n\n- (void)setVideoStabilizationEnabledIfSupported:(BOOL)videoStabilizationIfSupported\n{\n    SCLogVideoStreamerInfo(@\"setVideoStabilizationEnabledIfSupported:%d\", videoStabilizationIfSupported);\n    _videoStabilizationEnabledIfSupported = videoStabilizationIfSupported;\n    [self _enableVideoStabilizationIfSupported];\n}\n\n- (void)_updateFieldOfViewWithARFrame:(ARFrame *)frame NS_AVAILABLE_IOS(11_0)\n{\n    SC_GUARD_ELSE_RETURN(frame.camera);\n    CGSize imageResolution = frame.camera.imageResolution;\n    matrix_float3x3 intrinsics = frame.camera.intrinsics;\n    float xFovDegrees = 2 * atan(imageResolution.width / (2 * intrinsics.columns[0][0])) * 180 / M_PI;\n    if (_fieldOfView != xFovDegrees) {\n        self.fieldOfView = xFovDegrees;\n    }\n}\n\n- (NSString *)description\n{\n    return [self debugDescription];\n}\n\n- (NSString *)debugDescription\n{\n    NSDictionary *debugDict = @{\n        @\"_sampleBufferDisplayEnabled\" : _sampleBufferDisplayEnabled ? @\"Yes\" : @\"No\",\n        @\"_videoStabilizationEnabledIfSupported\" : _videoStabilizationEnabledIfSupported ? @\"Yes\" : @\"No\",\n        @\"_performingConfigurations\" : _performingConfigurations ? @\"Yes\" : @\"No\",\n        @\"alwaysDiscardLateVideoFrames\" : _videoDataOutput.alwaysDiscardsLateVideoFrames ? @\"Yes\" : @\"No\"\n    };\n    return [NSString sc_stringWithFormat:@\"%@\", debugDict];\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCMetalUtils.h",
    "content": "//\n//  SCMetalUtils.h\n//  Snapchat\n//\n//  Created by Michel Loenngren on 7/11/17.\n//\n//  Utility class for metal related helpers.\n\n#import <Foundation/Foundation.h>\n#if !TARGET_IPHONE_SIMULATOR\n#import <Metal/Metal.h>\n#endif\n#import <AVFoundation/AVFoundation.h>\n\n#import <SCBase/SCMacros.h>\n\nSC_EXTERN_C_BEGIN\n\n#if !TARGET_IPHONE_SIMULATOR\nextern id<MTLDevice> SCGetManagedCaptureMetalDevice(void);\n#endif\n\nstatic SC_ALWAYS_INLINE BOOL SCDeviceSupportsMetal(void)\n{\n#if TARGET_CPU_ARM64\n    return YES; // All 64 bit system supports Metal.\n#else\n    return NO;\n#endif\n}\n\n#if !TARGET_IPHONE_SIMULATOR\nstatic inline id<MTLTexture> SCMetalTextureFromPixelBuffer(CVPixelBufferRef pixelBuffer, size_t planeIndex,\n                                                           MTLPixelFormat pixelFormat,\n                                                           CVMetalTextureCacheRef textureCache)\n{\n    size_t width = CVPixelBufferGetWidthOfPlane(pixelBuffer, planeIndex);\n    size_t height = CVPixelBufferGetHeightOfPlane(pixelBuffer, planeIndex);\n    CVMetalTextureRef textureRef;\n    if (kCVReturnSuccess != CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, textureCache, pixelBuffer,\n                                                                      nil, pixelFormat, width, height, planeIndex,\n                                                                      &textureRef)) {\n        return nil;\n    }\n    id<MTLTexture> texture = CVMetalTextureGetTexture(textureRef);\n    CVBufferRelease(textureRef);\n    return texture;\n}\n\nstatic inline void SCMetalCopyTexture(id<MTLTexture> texture, CVPixelBufferRef pixelBuffer, NSUInteger planeIndex)\n{\n    CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);\n    void *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, planeIndex);\n    NSUInteger bytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, planeIndex);\n    MTLRegion region = MTLRegionMake2D(0, 0, CVPixelBufferGetWidthOfPlane(pixelBuffer, planeIndex),\n                                       CVPixelBufferGetHeightOfPlane(pixelBuffer, planeIndex));\n\n    [texture getBytes:baseAddress bytesPerRow:bytesPerRow fromRegion:region mipmapLevel:0];\n    CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);\n}\n#endif\n\nSC_EXTERN_C_END\n"
  },
  {
    "path": "ManagedCapturer/SCMetalUtils.m",
    "content": "//\n//  SCMetalUtils.m\n//  Snapchat\n//\n//  Created by Michel Loenngren on 8/16/17.\n//\n//\n\n#import \"SCMetalUtils.h\"\n\n#import <SCFoundation/SCTrace.h>\n\nid<MTLDevice> SCGetManagedCaptureMetalDevice(void)\n{\n#if !TARGET_IPHONE_SIMULATOR\n    SCTraceStart();\n    static dispatch_once_t onceToken;\n    static id<MTLDevice> device;\n    dispatch_once(&onceToken, ^{\n        device = MTLCreateSystemDefaultDevice();\n    });\n    return device;\n#endif\n    return nil;\n}\n"
  },
  {
    "path": "ManagedCapturer/SCScanConfiguration.h",
    "content": "//\n//  SCScanConfiguration.h\n//  Snapchat\n//\n//  Created by Yang Dai on 3/7/17.\n//  Copyright © 2017 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCManagedCapturer.h\"\n\n#import <SCSession/SCUserSession.h>\n\n@interface SCScanConfiguration : NSObject\n\n@property (nonatomic, strong) sc_managed_capturer_scan_results_handler_t scanResultsHandler;\n@property (nonatomic, strong) SCUserSession *userSession;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCScanConfiguration.m",
    "content": "//\n//  SCScanConfiguration.m\n//  Snapchat\n//\n//  Created by Yang Dai on 3/7/17.\n//  Copyright © 2017 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCScanConfiguration.h\"\n\n@implementation SCScanConfiguration\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCSingleFrameStreamCapturer.h",
    "content": "//\n//  SCSingleFrameStreamCapturer.h\n//  Snapchat\n//\n//  Created by Benjamin Hollis on 5/3/16.\n//  Copyright © 2016 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCCaptureCommon.h\"\n\n#import <SCCameraFoundation/SCManagedVideoDataSourceListener.h>\n\n#import <Foundation/Foundation.h>\n\n@interface SCSingleFrameStreamCapturer : NSObject <SCManagedVideoDataSourceListener>\n- (instancetype)initWithCompletion:(sc_managed_capturer_capture_video_frame_completion_handler_t)completionHandler;\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCSingleFrameStreamCapturer.m",
    "content": "//\n//  SCSingleFrameStreamCapturer.m\n//  Snapchat\n//\n//  Created by Benjamin Hollis on 5/3/16.\n//  Copyright © 2016 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCSingleFrameStreamCapturer.h\"\n\n#import \"SCManagedCapturer.h\"\n\n@implementation SCSingleFrameStreamCapturer {\n    sc_managed_capturer_capture_video_frame_completion_handler_t _callback;\n}\n\n- (instancetype)initWithCompletion:(sc_managed_capturer_capture_video_frame_completion_handler_t)completionHandler\n{\n    self = [super init];\n    if (self) {\n        _callback = completionHandler;\n    }\n    return self;\n}\n\n#pragma mark - SCManagedVideoDataSourceListener\n\n- (void)managedVideoDataSource:(id<SCManagedVideoDataSource>)managedVideoDataSource\n         didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer\n                devicePosition:(SCManagedCaptureDevicePosition)devicePosition\n{\n    if (_callback) {\n        UIImage *image = [self imageFromSampleBuffer:sampleBuffer];\n        _callback(image);\n    }\n    _callback = nil;\n}\n\n/**\n * Decode a CMSampleBufferRef to our native camera format (kCVPixelFormatType_420YpCbCr8BiPlanarFullRange,\n * as set in SCManagedVideoStreamer) to a UIImage.\n *\n * Code from http://stackoverflow.com/a/31553521/11284\n */\n#define clamp(a) (a > 255 ? 255 : (a < 0 ? 0 : a))\n// TODO: Use the transform code from SCImageProcessIdentityYUVCommand\n- (UIImage *)imageFromSampleBuffer:(CMSampleBufferRef)sampleBuffer\n{\n    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);\n    CVPixelBufferLockBaseAddress(imageBuffer, 0);\n\n    size_t width = CVPixelBufferGetWidth(imageBuffer);\n    size_t height = CVPixelBufferGetHeight(imageBuffer);\n    uint8_t *yBuffer = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);\n    size_t yPitch = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 0);\n    uint8_t *cbCrBuffer = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 1);\n    size_t cbCrPitch = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 1);\n\n    int bytesPerPixel = 4;\n    uint8_t *rgbBuffer = malloc(width * height * bytesPerPixel);\n\n    for (int y = 0; y < height; y++) {\n        uint8_t *rgbBufferLine = &rgbBuffer[y * width * bytesPerPixel];\n        uint8_t *yBufferLine = &yBuffer[y * yPitch];\n        uint8_t *cbCrBufferLine = &cbCrBuffer[(y >> 1) * cbCrPitch];\n\n        for (int x = 0; x < width; x++) {\n            int16_t y = yBufferLine[x];\n            int16_t cb = cbCrBufferLine[x & ~1] - 128;\n            int16_t cr = cbCrBufferLine[x | 1] - 128;\n\n            uint8_t *rgbOutput = &rgbBufferLine[x * bytesPerPixel];\n\n            int16_t r = (int16_t)roundf(y + cr * 1.4);\n            int16_t g = (int16_t)roundf(y + cb * -0.343 + cr * -0.711);\n            int16_t b = (int16_t)roundf(y + cb * 1.765);\n\n            rgbOutput[0] = 0xff;\n            rgbOutput[1] = clamp(b);\n            rgbOutput[2] = clamp(g);\n            rgbOutput[3] = clamp(r);\n        }\n    }\n\n    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();\n    CGContextRef context = CGBitmapContextCreate(rgbBuffer, width, height, 8, width * bytesPerPixel, colorSpace,\n                                                 kCGBitmapByteOrder32Little | kCGImageAlphaNoneSkipLast);\n    CGImageRef quartzImage = CGBitmapContextCreateImage(context);\n\n    // TODO: Hardcoding UIImageOrientationRight seems cheesy\n    UIImage *image = [UIImage imageWithCGImage:quartzImage scale:1.0 orientation:UIImageOrientationRight];\n\n    CGContextRelease(context);\n    CGColorSpaceRelease(colorSpace);\n    CGImageRelease(quartzImage);\n    free(rgbBuffer);\n\n    CVPixelBufferUnlockBaseAddress(imageBuffer, 0);\n\n    return image;\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCStillImageCaptureVideoInputMethod.h",
    "content": "//\n//  SCStillImageCaptureVideoInputMethod.h\n//  Snapchat\n//\n//  Created by Alexander Grytsiuk on 3/16/16.\n//  Copyright © 2016 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCManagedCapturerState.h\"\n\n#import <AVFoundation/AVFoundation.h>\n\n@interface SCStillImageCaptureVideoInputMethod : NSObject\n\n- (void)captureStillImageWithCapturerState:(SCManagedCapturerState *)state\n                              successBlock:(void (^)(NSData *imageData, NSDictionary *cameraInfo,\n                                                     NSError *error))successBlock\n                              failureBlock:(void (^)(NSError *error))failureBlock;\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCStillImageCaptureVideoInputMethod.m",
    "content": "//\n//  SCStillImageCaptureVideoInputMethod.m\n//  Snapchat\n//\n//  Created by Alexander Grytsiuk on 3/16/16.\n//  Copyright © 2016 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCStillImageCaptureVideoInputMethod.h\"\n\n#import \"SCManagedCapturer.h\"\n#import \"SCManagedVideoFileStreamer.h\"\n\ntypedef unsigned char uchar_t;\nint clamp(int val, int low, int high)\n{\n    if (val < low)\n        val = low;\n    if (val > high)\n        val = high;\n    return val;\n}\n\nvoid yuv2rgb(uchar_t yValue, uchar_t uValue, uchar_t vValue, uchar_t *r, uchar_t *g, uchar_t *b)\n{\n    double red = yValue + (1.370705 * (vValue - 128));\n    double green = yValue - (0.698001 * (vValue - 128)) - (0.337633 * (uValue - 128));\n    double blue = yValue + (1.732446 * (uValue - 128));\n    *r = clamp(red, 0, 255);\n    *g = clamp(green, 0, 255);\n    *b = clamp(blue, 0, 255);\n}\n\nvoid convertNV21DataToRGBData(int width, int height, uchar_t *nv21Data, uchar_t *rgbData, int rgbBytesPerPixel,\n                              int rgbBytesPerRow)\n{\n    uchar_t *uvData = nv21Data + height * width;\n    for (int h = 0; h < height; h++) {\n        uchar_t *yRowBegin = nv21Data + h * width;\n        uchar_t *uvRowBegin = uvData + h / 2 * width;\n        uchar_t *rgbRowBegin = rgbData + rgbBytesPerRow * h;\n        for (int w = 0; w < width; w++) {\n            uchar_t *rgbPixelBegin = rgbRowBegin + rgbBytesPerPixel * w;\n            yuv2rgb(yRowBegin[w], uvRowBegin[w / 2 * 2], uvRowBegin[w / 2 * 2 + 1], &(rgbPixelBegin[0]),\n                    &(rgbPixelBegin[1]), &(rgbPixelBegin[2]));\n        }\n    }\n}\n\n@implementation SCStillImageCaptureVideoInputMethod\n\n- (void)captureStillImageWithCapturerState:(SCManagedCapturerState *)state\n                              successBlock:(void (^)(NSData *imageData, NSDictionary *cameraInfo,\n                                                     NSError *error))successBlock\n                              failureBlock:(void (^)(NSError *error))failureBlock\n{\n    id<SCManagedVideoDataSource> videoDataSource = [[SCManagedCapturer sharedInstance] currentVideoDataSource];\n    if ([videoDataSource isKindOfClass:[SCManagedVideoFileStreamer class]]) {\n        SCManagedVideoFileStreamer *videoFileStreamer = (SCManagedVideoFileStreamer *)videoDataSource;\n        [videoFileStreamer getNextPixelBufferWithCompletion:^(CVPixelBufferRef pixelBuffer) {\n            BOOL shouldFlip = state.devicePosition == SCManagedCaptureDevicePositionFront;\n#if TARGET_IPHONE_SIMULATOR\n            UIImage *uiImage = [self imageWithCVPixelBuffer:pixelBuffer];\n            CGImageRef videoImage = uiImage.CGImage;\n            UIImage *capturedImage = [UIImage\n                imageWithCGImage:shouldFlip ? [self flipCGImage:videoImage size:uiImage.size].CGImage : videoImage\n                           scale:1.0\n                     orientation:UIImageOrientationRight];\n#else\n            CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pixelBuffer];\n            CIContext *temporaryContext = [CIContext contextWithOptions:nil];\n\n            CGSize size = CGSizeMake(CVPixelBufferGetWidth(pixelBuffer), CVPixelBufferGetHeight(pixelBuffer));\n            CGImageRef videoImage =\n                [temporaryContext createCGImage:ciImage fromRect:CGRectMake(0, 0, size.width, size.height)];\n\n            UIImage *capturedImage =\n                [UIImage imageWithCGImage:shouldFlip ? [self flipCGImage:videoImage size:size].CGImage : videoImage\n                                    scale:1.0\n                              orientation:UIImageOrientationRight];\n\n            CGImageRelease(videoImage);\n#endif\n            if (successBlock) {\n                successBlock(UIImageJPEGRepresentation(capturedImage, 1.0), nil, nil);\n            }\n        }];\n    } else {\n        if (failureBlock) {\n            failureBlock([NSError errorWithDomain:NSStringFromClass(self.class) code:-1 userInfo:nil]);\n        }\n    }\n}\n\n- (UIImage *)flipCGImage:(CGImageRef)cgImage size:(CGSize)size\n{\n    UIGraphicsBeginImageContext(size);\n    CGContextDrawImage(UIGraphicsGetCurrentContext(), CGRectMake(0, 0, size.width, size.height), cgImage);\n    UIImage *image = UIGraphicsGetImageFromCurrentImageContext();\n    UIGraphicsEndImageContext();\n    return image;\n}\n\n- (UIImage *)imageWithCVPixelBuffer:(CVPixelBufferRef)imageBuffer\n{\n    CVPixelBufferLockBaseAddress(imageBuffer, 0);\n\n    size_t width = CVPixelBufferGetWidth(imageBuffer);\n    size_t height = CVPixelBufferGetHeight(imageBuffer);\n    size_t rgbBytesPerPixel = 4;\n    size_t rgbBytesPerRow = width * rgbBytesPerPixel;\n\n    uchar_t *nv21Data = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);\n    uchar_t *rgbData = malloc(rgbBytesPerRow * height);\n\n    convertNV21DataToRGBData((int)width, (int)height, nv21Data, rgbData, (int)rgbBytesPerPixel, (int)rgbBytesPerRow);\n\n    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();\n    CGContextRef context =\n        CGBitmapContextCreate(rgbData, width, height, 8, rgbBytesPerRow, colorSpace, kCGImageAlphaNoneSkipLast);\n    CGImageRef cgImage = CGBitmapContextCreateImage(context);\n\n    UIImage *result = [UIImage imageWithCGImage:cgImage];\n\n    CGImageRelease(cgImage);\n    CGContextRelease(context);\n    CGColorSpaceRelease(colorSpace);\n    free(rgbData);\n\n    CVPixelBufferUnlockBaseAddress(imageBuffer, 0);\n\n    return result;\n}\n\n- (NSString *)methodName\n{\n    return @\"VideoInput\";\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCTimedTask.h",
    "content": "//\n//  SCTimedTask.h\n//  Snapchat\n//\n//  Created by Michel Loenngren on 4/2/17.\n//  Copyright © 2017 Snapchat, Inc. All rights reserved.\n//\n\n#import <AVFoundation/AVFoundation.h>\n#import <Foundation/Foundation.h>\n\n/*\n Block based timed task\n */\n@interface SCTimedTask : NSObject\n\n@property (nonatomic, assign) CMTime targetTime;\n@property (nonatomic, copy) void (^task)(CMTime relativePresentationTime, CGFloat sessionStartTimeDelayInSecond);\n\n- (instancetype)init NS_UNAVAILABLE;\n\n- (instancetype)initWithTargetTime:(CMTime)targetTime\n                              task:(void (^)(CMTime relativePresentationTime,\n                                             CGFloat sessionStartTimeDelayInSecond))task;\n\n- (NSString *)description;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCTimedTask.m",
    "content": "//\n//  SCTimedTask.m\n//  Snapchat\n//\n//  Created by Michel Loenngren on 4/2/17.\n//  Copyright © 2017 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCTimedTask.h\"\n\n#import <SCFoundation/NSString+SCFormat.h>\n\n@implementation SCTimedTask\n\n- (instancetype)initWithTargetTime:(CMTime)targetTime\n                              task:\n                                  (void (^)(CMTime relativePresentationTime, CGFloat sessionStartTimeDelayInSecond))task\n{\n    if (self = [super init]) {\n        _targetTime = targetTime;\n        _task = task;\n    }\n    return self;\n}\n\n- (NSString *)description\n{\n    return [NSString\n        sc_stringWithFormat:@\"<%@: %p, targetTime: %lld>\", NSStringFromClass([self class]), self, _targetTime.value];\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/SCVideoCaptureSessionInfo.h",
    "content": "//\n//  SCVideoCaptureSessionInfo.h\n//  Snapchat\n//\n//  Created by Michel Loenngren on 3/27/17.\n//  Copyright © 2017 Snapchat, Inc. All rights reserved.\n//\n\n#import <SCFoundation/NSString+SCFormat.h>\n\n#import <CoreMedia/CoreMedia.h>\n#import <Foundation/Foundation.h>\n\ntypedef NS_ENUM(NSInteger, SCManagedVideoCapturerInfoType) {\n    SCManagedVideoCapturerInfoAudioQueueError,\n    SCManagedVideoCapturerInfoAssetWriterError,\n    SCManagedVideoCapturerInfoAudioSessionError,\n    SCManagedVideoCapturerInfoAudioQueueRetrySuccess,\n    SCManagedVideoCapturerInfoAudioQueueRetryDataSourceSuccess_audioQueue,\n    SCManagedVideoCapturerInfoAudioQueueRetryDataSourceSuccess_hardware\n};\n\ntypedef u_int32_t sc_managed_capturer_recording_session_t;\n\n/*\n Container object holding information about the\n current recording session.\n */\ntypedef struct {\n    CMTime startTime;\n    CMTime endTime;\n    CMTime duration;\n    sc_managed_capturer_recording_session_t sessionId;\n} SCVideoCaptureSessionInfo;\n\nstatic inline SCVideoCaptureSessionInfo SCVideoCaptureSessionInfoMake(CMTime startTime, CMTime endTime,\n                                                                      sc_managed_capturer_recording_session_t sessionId)\n{\n    SCVideoCaptureSessionInfo session;\n    session.startTime = startTime;\n    session.endTime = endTime;\n    if (CMTIME_IS_VALID(startTime) && CMTIME_IS_VALID(endTime)) {\n        session.duration = CMTimeSubtract(endTime, startTime);\n    } else {\n        session.duration = kCMTimeInvalid;\n    }\n    session.sessionId = sessionId;\n    return session;\n}\n\nstatic inline NSTimeInterval SCVideoCaptureSessionInfoGetCurrentDuration(SCVideoCaptureSessionInfo sessionInfo)\n{\n    if (CMTIME_IS_VALID(sessionInfo.startTime)) {\n        if (CMTIME_IS_VALID(sessionInfo.endTime)) {\n            return CMTimeGetSeconds(sessionInfo.duration);\n        }\n        return CACurrentMediaTime() - CMTimeGetSeconds(sessionInfo.startTime);\n    }\n    return 0;\n}\n\nstatic inline NSString *SCVideoCaptureSessionInfoGetDebugString(CMTime time, NSString *label)\n{\n    if (CMTIME_IS_VALID(time)) {\n        return [NSString sc_stringWithFormat:@\"%@: %f\", label, CMTimeGetSeconds(time)];\n    } else {\n        return [NSString sc_stringWithFormat:@\"%@: Invalid\", label];\n    }\n}\n\nstatic inline NSString *SCVideoCaptureSessionInfoGetDebugDescription(SCVideoCaptureSessionInfo sessionInfo)\n{\n    NSMutableString *description = [NSMutableString new];\n    [description appendString:SCVideoCaptureSessionInfoGetDebugString(sessionInfo.startTime, @\"StartTime\")];\n    [description appendString:@\", \"];\n    [description appendString:SCVideoCaptureSessionInfoGetDebugString(sessionInfo.endTime, @\"EndTime\")];\n    [description appendString:@\", \"];\n    [description appendString:SCVideoCaptureSessionInfoGetDebugString(sessionInfo.duration, @\"Duration\")];\n    [description appendString:@\", \"];\n    [description appendString:[NSString sc_stringWithFormat:@\"Id: %u\", sessionInfo.sessionId]];\n\n    return [description copy];\n}\n"
  },
  {
    "path": "ManagedCapturer/StateMachine/SCCaptureBaseState.h",
    "content": "//\n//  SCCaptureBaseState.h\n//  Snapchat\n//\n//  Created by Lin Jia on 10/19/17.\n//\n//\n\n#import \"SCCaptureCommon.h\"\n#import \"SCCaptureStateDelegate.h\"\n#import \"SCCaptureStateMachineBookKeeper.h\"\n#import \"SCCaptureStateUtil.h\"\n#import \"SCCaptureWorker.h\"\n#import \"SCManagedCaptureDevice.h\"\n#import \"SCManagedCapturerState.h\"\n#import \"SCStateTransitionPayload.h\"\n\n#import <Foundation/Foundation.h>\n\n@class SCCaptureResource;\n\n@class SCCapturerToken;\n\n@class SCAudioConfiguration;\n\n@class SCQueuePerformer;\n/*\n Every state machine state needs to inherent SCCaptureBaseState to have the APIs. State machine state in general will\n only implement APIs which are legal for itself. If illegal APIs are invoked, SCCaptureBaseState will handle it.\n The intended behavior:\n 1) crash using SCAssert in Debug build,\n 2) ignore api call, and log the call, for alpha/master/production.\n 3) in the future, we will introduce dangerous API call concept, and restart camera in such case, to avoid bad state.\n\n Every state machine state is going to be built to follow functional programming as more as possible. The shared\n resources between them will be passed into the API via SCCaptureResource.\n */\n\n@interface SCCaptureBaseState : NSObject\n\n- (instancetype)init NS_UNAVAILABLE;\n\n- (instancetype)initWithPerformer:(SCQueuePerformer *)performer\n                       bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper\n                         delegate:(id<SCCaptureStateDelegate>)delegate;\n\n/* The following API will be invoked at the moment state context promote the state to be current state. State use this\n * chance to do something, such as start recording for recording state.\n */\n- (void)didBecomeCurrentState:(SCStateTransitionPayload *)payload\n                     resource:(SCCaptureResource *)resource\n                      context:(NSString *)context;\n\n- (SCCaptureStateMachineStateId)stateId;\n\n- (void)initializeCaptureWithDevicePosition:(SCManagedCaptureDevicePosition)devicePosition\n                                   resource:(SCCaptureResource *)resource\n                          completionHandler:(dispatch_block_t)completionHandler\n                                    context:(NSString *)context;\n\n- (void)startRunningWithCapturerToken:(SCCapturerToken *)token\n                             resource:(SCCaptureResource *)resource\n                    completionHandler:(dispatch_block_t)completionHandler\n                              context:(NSString *)context;\n\n- (void)stopRunningWithCapturerToken:(SCCapturerToken *)token\n                            resource:(SCCaptureResource *)resource\n                   completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler\n                             context:(NSString *)context;\n\n- (void)prepareForRecordingWithResource:(SCCaptureResource *)resource\n                     audioConfiguration:(SCAudioConfiguration *)configuration\n                                context:(NSString *)context;\n\n- (void)startRecordingWithResource:(SCCaptureResource *)resource\n                audioConfiguration:(SCAudioConfiguration *)configuration\n                    outputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings\n                       maxDuration:(NSTimeInterval)maxDuration\n                           fileURL:(NSURL *)fileURL\n                  captureSessionID:(NSString *)captureSessionID\n                 completionHandler:(sc_managed_capturer_start_recording_completion_handler_t)completionHandler\n                           context:(NSString *)context;\n\n- (void)stopRecordingWithResource:(SCCaptureResource *)resource context:(NSString *)context;\n\n- (void)cancelRecordingWithResource:(SCCaptureResource *)resource context:(NSString *)context;\n\n- (void)captureStillImageWithResource:(SCCaptureResource *)resource\n                          aspectRatio:(CGFloat)aspectRatio\n                     captureSessionID:(NSString *)captureSessionID\n                    completionHandler:(sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler\n                              context:(NSString *)context;\n\n- (void)startScanWithScanConfiguration:(SCScanConfiguration *)configuration\n                              resource:(SCCaptureResource *)resource\n                               context:(NSString *)context;\n\n- (void)stopScanWithCompletionHandler:(dispatch_block_t)completionHandler\n                             resource:(SCCaptureResource *)resource\n                              context:(NSString *)context;\n\n@property (nonatomic, strong, readonly) SCCaptureStateMachineBookKeeper *bookKeeper;\n@end\n"
  },
  {
    "path": "ManagedCapturer/StateMachine/SCCaptureBaseState.m",
    "content": "//\n//  SCCaptureBaseState.m\n//  Snapchat\n//\n//  Created by Lin Jia on 10/19/17.\n//\n//\n\n#import \"SCCaptureBaseState.h\"\n\n#import \"SCCaptureStateMachineBookKeeper.h\"\n#import \"SCCapturerToken.h\"\n#import \"SCManagedCapturerV1_Private.h\"\n\n#import <SCFoundation/SCAppEnvironment.h>\n#import <SCFoundation/SCAssertWrapper.h>\n#import <SCFoundation/SCQueuePerformer.h>\n\n@implementation SCCaptureBaseState {\n    SCCaptureStateMachineBookKeeper *_bookKeeper;\n    SCQueuePerformer *_performer;\n    __weak id<SCCaptureStateDelegate> _delegate;\n}\n\n- (instancetype)initWithPerformer:(SCQueuePerformer *)performer\n                       bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper\n                         delegate:(id<SCCaptureStateDelegate>)delegate\n{\n    self = [super init];\n    if (self) {\n        SCAssert(performer, @\"\");\n        SCAssert(bookKeeper, @\"\");\n        _bookKeeper = bookKeeper;\n        _performer = performer;\n        _delegate = delegate;\n    }\n    return self;\n}\n\n- (SCCaptureStateMachineStateId)stateId\n{\n    return SCCaptureBaseStateId;\n}\n\n- (void)didBecomeCurrentState:(SCStateTransitionPayload *)payload\n                     resource:(SCCaptureResource *)resource\n                      context:(NSString *)context\n{\n    [self _handleBaseStateBehavior:@\"didBecomeCurrentState\" context:context];\n}\n\n- (void)initializeCaptureWithDevicePosition:(SCManagedCaptureDevicePosition)devicePosition\n                                   resource:(SCCaptureResource *)resource\n                          completionHandler:(dispatch_block_t)completionHandler\n                                    context:(NSString *)context\n{\n    [self _handleBaseStateBehavior:@\"initializeCaptureWithDevicePosition\" context:context];\n}\n\n- (void)startRunningWithCapturerToken:(SCCapturerToken *)token\n                             resource:(SCCaptureResource *)resource\n                    completionHandler:(dispatch_block_t)completionHandler\n                              context:(NSString *)context\n{\n    [self _handleBaseStateBehavior:@\"startRunningWithCapturerToken\" context:context];\n}\n\n- (void)stopRunningWithCapturerToken:(SCCapturerToken *)token\n                            resource:(SCCaptureResource *)resource\n                   completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler\n                             context:(NSString *)context\n{\n    SCAssertPerformer(_performer);\n    BOOL actuallyStopped = [[SCManagedCapturerV1 sharedInstance] stopRunningWithCaptureToken:token\n                                                                           completionHandler:completionHandler\n                                                                                     context:context];\n    // TODO: Fix CCAM-14450\n    // This is a temporary solution for https://jira.sc-corp.net/browse/CCAM-14450\n    // It is caused by switching from scanning state to stop running state when the view is disappearing in the scanning\n    // state, which can be reproduced by triggering scanning and then switch to maps page.\n    // We remove SCAssert to ingore the crashes in master branch and will find a solution for the illegal call for the\n    // state machine later\n\n    if (self.stateId != SCCaptureScanningStateId) {\n        SCAssert(!actuallyStopped, @\"actuallyStopped in state: %@ with context: %@\", SCCaptureStateName([self stateId]),\n                 context);\n    } else {\n        SCLogCaptureStateMachineInfo(@\"actuallyStopped:%d in state: %@ with context: %@\", actuallyStopped,\n                                     SCCaptureStateName([self stateId]), context);\n    }\n\n    if (actuallyStopped) {\n        [_delegate currentState:self\n            requestToTransferToNewState:SCCaptureInitializedStateId\n                                payload:nil\n                                context:context];\n    }\n}\n\n- (void)prepareForRecordingWithResource:(SCCaptureResource *)resource\n                     audioConfiguration:(SCAudioConfiguration *)configuration\n                                context:(NSString *)context\n{\n    [self _handleBaseStateBehavior:@\"prepareForRecordingWithResource\" context:context];\n}\n\n- (void)startRecordingWithResource:(SCCaptureResource *)resource\n                audioConfiguration:(SCAudioConfiguration *)configuration\n                    outputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings\n                       maxDuration:(NSTimeInterval)maxDuration\n                           fileURL:(NSURL *)fileURL\n                  captureSessionID:(NSString *)captureSessionID\n                 completionHandler:(sc_managed_capturer_start_recording_completion_handler_t)completionHandler\n                           context:(NSString *)context\n{\n    [self _handleBaseStateBehavior:@\"startRecordingWithResource\" context:context];\n}\n\n- (void)stopRecordingWithResource:(SCCaptureResource *)resource context:(NSString *)context\n{\n    [self _handleBaseStateBehavior:@\"stopRecordingWithResource\" context:context];\n}\n\n- (void)cancelRecordingWithResource:(SCCaptureResource *)resource context:(NSString *)context\n{\n    [self _handleBaseStateBehavior:@\"cancelRecordingWithResource\" context:context];\n}\n\n- (void)captureStillImageWithResource:(SCCaptureResource *)resource\n                          aspectRatio:(CGFloat)aspectRatio\n                     captureSessionID:(NSString *)captureSessionID\n                    completionHandler:(sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler\n                              context:(NSString *)context\n{\n    [self _handleBaseStateBehavior:@\"captureStillImageWithResource\" context:context];\n}\n\n- (void)startScanWithScanConfiguration:(SCScanConfiguration *)configuration\n                              resource:(SCCaptureResource *)resource\n                               context:(NSString *)context\n{\n    [self _handleBaseStateBehavior:@\"startScanWithScanConfiguration\" context:context];\n}\n\n- (void)stopScanWithCompletionHandler:(dispatch_block_t)completionHandler\n                             resource:(SCCaptureResource *)resource\n                              context:(NSString *)context\n{\n    // Temporary solution until IDT-12520 is resolved.\n    [SCCaptureWorker stopScanWithCompletionHandler:completionHandler resource:resource];\n    //[self _handleBaseStateBehavior:@\"stopScanWithCompletionHandler\"];\n}\n\n- (void)_handleBaseStateBehavior:(NSString *)illegalAPIName context:(NSString *)context\n{\n    [_bookKeeper state:[self stateId]\n        illegalAPIcalled:illegalAPIName\n               callStack:[NSThread callStackSymbols]\n                 context:context];\n    if (SCIsDebugBuild()) {\n        SCAssertFail(@\"illegal API invoked on capture state machine\");\n    }\n}\n\n- (SCCaptureStateMachineBookKeeper *)bookKeeper\n{\n    return _bookKeeper;\n}\n@end\n"
  },
  {
    "path": "ManagedCapturer/StateMachine/SCCaptureStateDelegate.h",
    "content": "//\n//  SCCaptureStateDelegate.h\n//  Snapchat\n//\n//  Created by Lin Jia on 10/27/17.\n//\n//\n\n#import \"SCCaptureStateUtil.h\"\n\n#import <Foundation/Foundation.h>\n\n@class SCCaptureBaseState;\n@class SCStateTransitionPayload;\n/*\n The state machine state delegate is used by state machine states to hint to the system that \"I am done, now transfer\n to other state\".\n\n Currently, SCCaptureStateMachineContext is the central piece that glues all states together, and it is the delegate for\n those states.\n */\n\n@protocol SCCaptureStateDelegate <NSObject>\n\n- (void)currentState:(SCCaptureBaseState *)state\n    requestToTransferToNewState:(SCCaptureStateMachineStateId)newState\n                        payload:(SCStateTransitionPayload *)payload\n                        context:(NSString *)context;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/StateMachine/SCCaptureStateMachineBookKeeper.h",
    "content": "//\n//  SCCaptureStateTransitionBookKeeper.h\n//  Snapchat\n//\n//  Created by Lin Jia on 10/27/17.\n//\n//\n\n#import \"SCCaptureStateUtil.h\"\n\n#import <Foundation/Foundation.h>\n\n/*\n Book keeper is used to record every state transition, and every illegal API call.\n */\n\n@interface SCCaptureStateMachineBookKeeper : NSObject\n\n- (void)stateTransitionFrom:(SCCaptureStateMachineStateId)fromId\n                         to:(SCCaptureStateMachineStateId)toId\n                    context:(NSString *)context;\n\n- (void)state:(SCCaptureStateMachineStateId)captureState\n    illegalAPIcalled:(NSString *)illegalAPIName\n           callStack:(NSArray<NSString *> *)callStack\n             context:(NSString *)context;\n\n- (void)logAPICalled:(NSString *)apiName context:(NSString *)context;\n@end\n"
  },
  {
    "path": "ManagedCapturer/StateMachine/SCCaptureStateMachineBookKeeper.m",
    "content": "//\n//  SCCaptureStateTransitionBookKeeper.m\n//  Snapchat\n//\n//  Created by Lin Jia on 10/27/17.\n//\n//\n\n#import \"SCCaptureStateMachineBookKeeper.h\"\n\n#import \"SCCaptureStateUtil.h\"\n#import \"SCLogger+Camera.h\"\n\n#import <SCFoundation/SCAssertWrapper.h>\n#import <SCLogger/SCCameraMetrics.h>\n\n@interface SCCaptureStateMachineBookKeeper () {\n    NSDate *_lastStateStartTime;\n}\n@end\n\n@implementation SCCaptureStateMachineBookKeeper\n\n- (void)stateTransitionFrom:(SCCaptureStateMachineStateId)fromId\n                         to:(SCCaptureStateMachineStateId)toId\n                    context:(NSString *)context\n{\n    NSDate *date = [NSDate date];\n    SCLogCaptureStateMachineInfo(@\"State %@ life span: %f seconds, transition to: %@, in context:%@, at: %@ \\n\",\n                                 SCCaptureStateName(fromId), [date timeIntervalSinceDate:_lastStateStartTime],\n                                 SCCaptureStateName(toId), context, date);\n    _lastStateStartTime = date;\n}\n\n- (void)state:(SCCaptureStateMachineStateId)captureState\n    illegalAPIcalled:(NSString *)illegalAPIName\n           callStack:(NSArray<NSString *> *)callStack\n             context:(NSString *)context\n\n{\n    SCAssert(callStack, @\"call stack empty\");\n    SCAssert(illegalAPIName, @\"\");\n    SCAssert(context, @\"Context is empty\");\n    SCLogCaptureStateMachineError(@\"State: %@, illegal API invoke: %@, at: %@, callstack: %@ \\n\",\n                                  SCCaptureStateName(captureState), illegalAPIName, [NSDate date], callStack);\n    NSArray<NSString *> *reportedArray =\n        [callStack count] > 15 ? [callStack subarrayWithRange:NSMakeRange(0, 15)] : callStack;\n    [[SCLogger sharedInstance] logEvent:kSCCameraStateMachineIllegalAPICall\n                             parameters:@{\n                                 @\"state\" : SCCaptureStateName(captureState),\n                                 @\"API\" : illegalAPIName,\n                                 @\"call_stack\" : reportedArray,\n                                 @\"context\" : context\n                             }];\n}\n\n- (void)logAPICalled:(NSString *)apiName context:(NSString *)context\n{\n    SCAssert(apiName, @\"API name is empty\");\n    SCAssert(context, @\"Context is empty\");\n    SCLogCaptureStateMachineInfo(@\"api: %@ context: %@\", apiName, context);\n}\n@end\n"
  },
  {
    "path": "ManagedCapturer/StateMachine/SCCaptureStateMachineContext.h",
    "content": "//\n//  SCCaptureStateMachineContext.h\n//  Snapchat\n//\n//  Created by Lin Jia on 10/18/17.\n//\n//\n\n#import \"SCCaptureCommon.h\"\n#import \"SCManagedCaptureDevice.h\"\n\n#import <SCAudio/SCAudioConfiguration.h>\n\n#import <Foundation/Foundation.h>\n\n/*\n SCCaptureStateMachineContext is the central piece that glues all states together.\n\n It will pass API calls to the current state.\n\n The classic state machine design pattern:\n https://en.wikipedia.org/wiki/State_pattern\n\n It is also the delegate for the states it manages, so that those states can tell stateMachineContext to transit to next\n state.\n */\n\n@class SCCaptureResource;\n\n@class SCCapturerToken;\n\n@interface SCCaptureStateMachineContext : NSObject\n\n- (instancetype)initWithResource:(SCCaptureResource *)resource;\n\n- (void)initializeCaptureWithDevicePositionAsynchronously:(SCManagedCaptureDevicePosition)devicePosition\n                                        completionHandler:(dispatch_block_t)completionHandler\n                                                  context:(NSString *)context;\n\n- (SCCapturerToken *)startRunningWithContext:(NSString *)context completionHandler:(dispatch_block_t)completionHandler;\n\n- (void)stopRunningWithCapturerToken:(SCCapturerToken *)token\n                   completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler\n                             context:(NSString *)context;\n\n- (void)stopRunningWithCapturerToken:(SCCapturerToken *)token\n                               after:(NSTimeInterval)delay\n                   completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler\n                             context:(NSString *)context;\n\n- (void)prepareForRecordingAsynchronouslyWithAudioConfiguration:(SCAudioConfiguration *)configuration\n                                                        context:(NSString *)context;\n\n- (void)startRecordingWithOutputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings\n                      audioConfiguration:(SCAudioConfiguration *)configuration\n                             maxDuration:(NSTimeInterval)maxDuration\n                                 fileURL:(NSURL *)fileURL\n                        captureSessionID:(NSString *)captureSessionID\n                       completionHandler:(sc_managed_capturer_start_recording_completion_handler_t)completionHandler\n                                 context:(NSString *)context;\n\n- (void)stopRecordingWithContext:(NSString *)context;\n\n- (void)cancelRecordingWithContext:(NSString *)context;\n\n- (void)captureStillImageAsynchronouslyWithAspectRatio:(CGFloat)aspectRatio\n                                      captureSessionID:(NSString *)captureSessionID\n                                     completionHandler:\n                                         (sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler\n                                               context:(NSString *)context;\n\n#pragma mark - Scanning\n- (void)startScanAsynchronouslyWithScanConfiguration:(SCScanConfiguration *)configuration context:(NSString *)context;\n- (void)stopScanAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler context:(NSString *)context;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/StateMachine/SCCaptureStateMachineContext.m",
    "content": "//\n//  SCCaptureStateMachineContext.m\n//  Snapchat\n//\n//  Created by Lin Jia on 10/18/17.\n//\n//\n\n#import \"SCCaptureStateMachineContext.h\"\n\n#import \"SCCaptureBaseState.h\"\n#import \"SCCaptureImageState.h\"\n#import \"SCCaptureImageWhileRecordingState.h\"\n#import \"SCCaptureInitializedState.h\"\n#import \"SCCaptureRecordingState.h\"\n#import \"SCCaptureResource.h\"\n#import \"SCCaptureRunningState.h\"\n#import \"SCCaptureScanningState.h\"\n#import \"SCCaptureStateMachineBookKeeper.h\"\n#import \"SCCaptureStateUtil.h\"\n#import \"SCCaptureUninitializedState.h\"\n#import \"SCCaptureWorker.h\"\n#import \"SCCapturerToken.h\"\n#import \"SCStateTransitionPayload.h\"\n\n#import <SCAudio/SCAudioConfiguration.h>\n#import <SCFoundation/SCAssertWrapper.h>\n#import <SCFoundation/SCQueuePerformer.h>\n#import <SCFoundation/SCTrace.h>\n#import <SCLogger/SCCameraMetrics.h>\n#import <SCLogger/SCLogger+Performance.h>\n\n@interface SCCaptureStateMachineContext () <SCCaptureStateDelegate> {\n    SCQueuePerformer *_queuePerformer;\n\n    // Cache all the states.\n    NSMutableDictionary<SCCaptureStateKey *, SCCaptureBaseState *> *_states;\n    SCCaptureBaseState *_currentState;\n    SCCaptureStateMachineBookKeeper *_bookKeeper;\n    SCCaptureResource *_captureResource;\n}\n@end\n\n@implementation SCCaptureStateMachineContext\n\n- (instancetype)initWithResource:(SCCaptureResource *)resource\n{\n    self = [super init];\n    if (self) {\n        SCAssert(resource, @\"\");\n        SCAssert(resource.queuePerformer, @\"\");\n        _captureResource = resource;\n        _queuePerformer = resource.queuePerformer;\n        _states = [[NSMutableDictionary<SCCaptureStateKey *, SCCaptureBaseState *> alloc] init];\n        _bookKeeper = [[SCCaptureStateMachineBookKeeper alloc] init];\n        [self _setCurrentState:SCCaptureUninitializedStateId payload:nil context:SCCapturerContext];\n    }\n    return self;\n}\n\n- (void)_setCurrentState:(SCCaptureStateMachineStateId)stateId\n                 payload:(SCStateTransitionPayload *)payload\n                 context:(NSString *)context\n{\n    switch (stateId) {\n    case SCCaptureUninitializedStateId:\n        if (![_states objectForKey:@(stateId)]) {\n            SCCaptureUninitializedState *uninitializedState =\n                [[SCCaptureUninitializedState alloc] initWithPerformer:_queuePerformer\n                                                            bookKeeper:_bookKeeper\n                                                              delegate:self];\n            [_states setObject:uninitializedState forKey:@(stateId)];\n        }\n        _currentState = [_states objectForKey:@(stateId)];\n        break;\n    case SCCaptureInitializedStateId:\n        if (![_states objectForKey:@(stateId)]) {\n            SCCaptureInitializedState *initializedState =\n                [[SCCaptureInitializedState alloc] initWithPerformer:_queuePerformer\n                                                          bookKeeper:_bookKeeper\n                                                            delegate:self];\n            [_states setObject:initializedState forKey:@(stateId)];\n        }\n        _currentState = [_states objectForKey:@(stateId)];\n        break;\n    case SCCaptureRunningStateId:\n        if (![_states objectForKey:@(stateId)]) {\n            SCCaptureRunningState *runningState =\n                [[SCCaptureRunningState alloc] initWithPerformer:_queuePerformer bookKeeper:_bookKeeper delegate:self];\n            [_states setObject:runningState forKey:@(stateId)];\n        }\n        _currentState = [_states objectForKey:@(stateId)];\n        break;\n    case SCCaptureImageStateId:\n        if (![_states objectForKey:@(stateId)]) {\n            SCCaptureImageState *captureImageState =\n                [[SCCaptureImageState alloc] initWithPerformer:_queuePerformer bookKeeper:_bookKeeper delegate:self];\n            [_states setObject:captureImageState forKey:@(stateId)];\n        }\n        _currentState = [_states objectForKey:@(stateId)];\n        break;\n    case SCCaptureImageWhileRecordingStateId:\n        if (![_states objectForKey:@(stateId)]) {\n            SCCaptureImageWhileRecordingState *captureImageWhileRecordingState =\n                [[SCCaptureImageWhileRecordingState alloc] initWithPerformer:_queuePerformer\n                                                                  bookKeeper:_bookKeeper\n                                                                    delegate:self];\n            [_states setObject:captureImageWhileRecordingState forKey:@(stateId)];\n        }\n        _currentState = [_states objectForKey:@(stateId)];\n        break;\n    case SCCaptureScanningStateId:\n        if (![_states objectForKey:@(stateId)]) {\n            SCCaptureScanningState *scanningState =\n                [[SCCaptureScanningState alloc] initWithPerformer:_queuePerformer bookKeeper:_bookKeeper delegate:self];\n            [_states setObject:scanningState forKey:@(stateId)];\n        }\n        _currentState = [_states objectForKey:@(stateId)];\n        break;\n    case SCCaptureRecordingStateId:\n        if (![_states objectForKey:@(stateId)]) {\n            SCCaptureRecordingState *recordingState = [[SCCaptureRecordingState alloc] initWithPerformer:_queuePerformer\n                                                                                              bookKeeper:_bookKeeper\n                                                                                                delegate:self];\n            [_states setObject:recordingState forKey:@(stateId)];\n        }\n        _currentState = [_states objectForKey:@(stateId)];\n        break;\n    default:\n        SCAssert(NO, @\"illigal state Id\");\n        break;\n    }\n    [_currentState didBecomeCurrentState:payload resource:_captureResource context:context];\n}\n\n- (void)initializeCaptureWithDevicePositionAsynchronously:(SCManagedCaptureDevicePosition)devicePosition\n                                        completionHandler:(dispatch_block_t)completionHandler\n                                                  context:(NSString *)context\n{\n    [SCCaptureWorker setupCapturePreviewLayerController];\n\n    SCTraceResumeToken resumeToken = SCTraceCapture();\n    [_queuePerformer perform:^{\n        SCTraceResume(resumeToken);\n        [_currentState initializeCaptureWithDevicePosition:devicePosition\n                                                  resource:_captureResource\n                                         completionHandler:completionHandler\n                                                   context:context];\n    }];\n}\n\n- (SCCapturerToken *)startRunningWithContext:(NSString *)context completionHandler:(dispatch_block_t)completionHandler\n{\n    [[SCLogger sharedInstance] updateLogTimedEventStart:kSCCameraMetricsOpen uniqueId:@\"\"];\n\n    SCCapturerToken *token = [[SCCapturerToken alloc] initWithIdentifier:context];\n    SCTraceResumeToken resumeToken = SCTraceCapture();\n    [_queuePerformer perform:^{\n        SCTraceResume(resumeToken);\n        [_currentState startRunningWithCapturerToken:token\n                                            resource:_captureResource\n                                   completionHandler:completionHandler\n                                             context:context];\n    }];\n\n    return token;\n}\n\n- (void)stopRunningWithCapturerToken:(SCCapturerToken *)token\n                   completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler\n                             context:(NSString *)context\n{\n    SCTraceResumeToken resumeToken = SCTraceCapture();\n    [_queuePerformer perform:^{\n        SCTraceResume(resumeToken);\n        [_currentState stopRunningWithCapturerToken:token\n                                           resource:_captureResource\n                                  completionHandler:completionHandler\n                                            context:context];\n    }];\n}\n\n- (void)stopRunningWithCapturerToken:(SCCapturerToken *)token\n                               after:(NSTimeInterval)delay\n                   completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler\n                             context:(NSString *)context\n{\n    SCTraceResumeToken resumeToken = SCTraceCapture();\n    [_queuePerformer perform:^{\n        SCTraceResume(resumeToken);\n        [_currentState stopRunningWithCapturerToken:token\n                                           resource:_captureResource\n                                  completionHandler:completionHandler\n                                            context:context];\n    }\n                       after:delay];\n}\n\n- (void)prepareForRecordingAsynchronouslyWithAudioConfiguration:(SCAudioConfiguration *)configuration\n                                                        context:(NSString *)context\n{\n    SCTraceResumeToken resumeToken = SCTraceCapture();\n    [_queuePerformer perform:^{\n        SCTraceResume(resumeToken);\n        [_currentState prepareForRecordingWithResource:_captureResource\n                                    audioConfiguration:configuration\n                                               context:context];\n    }];\n}\n\n- (void)startRecordingWithOutputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings\n                      audioConfiguration:(SCAudioConfiguration *)configuration\n                             maxDuration:(NSTimeInterval)maxDuration\n                                 fileURL:(NSURL *)fileURL\n                        captureSessionID:(NSString *)captureSessionID\n                       completionHandler:(sc_managed_capturer_start_recording_completion_handler_t)completionHandler\n                                 context:(NSString *)context\n{\n    SCTraceResumeToken resumeToken = SCTraceCapture();\n    [_queuePerformer perform:^{\n        SCTraceResume(resumeToken);\n        [_currentState startRecordingWithResource:_captureResource\n                               audioConfiguration:configuration\n                                   outputSettings:outputSettings\n                                      maxDuration:maxDuration\n                                          fileURL:fileURL\n                                 captureSessionID:captureSessionID\n                                completionHandler:completionHandler\n                                          context:context];\n    }];\n}\n\n- (void)stopRecordingWithContext:(NSString *)context\n{\n    SCTraceResumeToken resumeToken = SCTraceCapture();\n    [_queuePerformer perform:^{\n        SCTraceResume(resumeToken);\n        [_currentState stopRecordingWithResource:_captureResource context:context];\n    }];\n}\n\n- (void)cancelRecordingWithContext:(NSString *)context\n{\n    SCTraceResumeToken resumeToken = SCTraceCapture();\n    [_queuePerformer perform:^{\n        SCTraceResume(resumeToken);\n        [_currentState cancelRecordingWithResource:_captureResource context:context];\n    }];\n}\n\n- (void)captureStillImageAsynchronouslyWithAspectRatio:(CGFloat)aspectRatio\n                                      captureSessionID:(NSString *)captureSessionID\n                                     completionHandler:\n                                         (sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler\n                                               context:(NSString *)context\n{\n    [_queuePerformer perform:^() {\n        [_currentState captureStillImageWithResource:_captureResource\n                                         aspectRatio:aspectRatio\n                                    captureSessionID:captureSessionID\n                                   completionHandler:completionHandler\n                                             context:context];\n    }];\n}\n\n- (void)startScanAsynchronouslyWithScanConfiguration:(SCScanConfiguration *)configuration context:(NSString *)context\n{\n    [_queuePerformer perform:^() {\n        [_currentState startScanWithScanConfiguration:configuration resource:_captureResource context:context];\n    }];\n}\n\n- (void)stopScanAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler context:(NSString *)context\n{\n    [_queuePerformer perform:^() {\n        [_currentState stopScanWithCompletionHandler:completionHandler resource:_captureResource context:context];\n    }];\n}\n\n- (void)currentState:(SCCaptureBaseState *)state\n    requestToTransferToNewState:(SCCaptureStateMachineStateId)newState\n                        payload:(SCStateTransitionPayload *)payload\n                        context:(NSString *)context\n{\n    SCAssertPerformer(_queuePerformer);\n    SCAssert(_currentState == state, @\"state: %@ newState: %@ context:%@\", SCCaptureStateName([state stateId]),\n             SCCaptureStateName(newState), context);\n    if (payload) {\n        SCAssert(payload.fromState == [state stateId], @\"From state id check\");\n        SCAssert(payload.toState == newState, @\"To state id check\");\n    }\n\n    if (_currentState != state) {\n        return;\n    }\n\n    [_bookKeeper stateTransitionFrom:[state stateId] to:newState context:context];\n    [self _setCurrentState:newState payload:payload context:context];\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/StateMachine/SCCaptureStateUtil.h",
    "content": "//\n//  SCCaptureStateUtil.h\n//  Snapchat\n//\n//  Created by Lin Jia on 10/27/17.\n//\n//\n\n#import \"SCLogger+Camera.h\"\n\n#import <SCBase/SCMacros.h>\n#import <SCFoundation/SCLog.h>\n\n#import <Foundation/Foundation.h>\n\n#define SCLogCaptureStateMachineInfo(fmt, ...) SCLogCoreCameraInfo(@\"[SCCaptureStateMachine] \" fmt, ##__VA_ARGS__)\n#define SCLogCaptureStateMachineError(fmt, ...) SCLogCoreCameraError(@\"[SCCaptureStateMachine] \" fmt, ##__VA_ARGS__)\n\ntypedef NSNumber SCCaptureStateKey;\n\ntypedef NS_ENUM(NSUInteger, SCCaptureStateMachineStateId) {\n    SCCaptureBaseStateId = 0,\n    SCCaptureUninitializedStateId,\n    SCCaptureInitializedStateId,\n    SCCaptureImageStateId,\n    SCCaptureImageWhileRecordingStateId,\n    SCCaptureRunningStateId,\n    SCCaptureRecordingStateId,\n    SCCaptureScanningStateId,\n    SCCaptureStateMachineStateIdCount\n};\n\nSC_EXTERN_C_BEGIN\n\nNSString *SCCaptureStateName(SCCaptureStateMachineStateId stateId);\n\nSC_EXTERN_C_END\n"
  },
  {
    "path": "ManagedCapturer/StateMachine/SCCaptureStateUtil.m",
    "content": "//\n//  SCCaptureStateUtil.m\n//  Snapchat\n//\n//  Created by Lin Jia on 10/27/17.\n//\n//\n\n#import \"SCCaptureStateUtil.h\"\n\n#import <SCFoundation/SCAppEnvironment.h>\n#import <SCFoundation/SCAssertWrapper.h>\n\nNSString *SCCaptureStateName(SCCaptureStateMachineStateId stateId)\n{\n    switch (stateId) {\n    case SCCaptureBaseStateId:\n        return @\"SCCaptureBaseStateId\";\n    case SCCaptureUninitializedStateId:\n        return @\"SCCaptureUninitializedStateId\";\n    case SCCaptureInitializedStateId:\n        return @\"SCCaptureInitializedStateId\";\n    case SCCaptureImageStateId:\n        return @\"SCCaptureImageStateId\";\n    case SCCaptureImageWhileRecordingStateId:\n        return @\"SCCaptureImageWhileRecordingStateId\";\n    case SCCaptureRunningStateId:\n        return @\"SCCaptureRunningStateId\";\n    case SCCaptureRecordingStateId:\n        return @\"SCCaptureRecordingStateId\";\n    case SCCaptureScanningStateId:\n        return @\"SCCaptureScanningStateId\";\n    default:\n        SCCAssert(NO, @\"illegate state id\");\n        break;\n    }\n    return @\"SCIllegalStateId\";\n}\n"
  },
  {
    "path": "ManagedCapturer/StateMachine/SCManagedCapturerLogging.h",
    "content": "//\n//  SCManagedCapturerLogging.h\n//  Snapchat\n//\n//  Created by Lin Jia on 11/13/17.\n//\n\n#import <SCFoundation/SCLog.h>\n\n#define SCLogCapturerInfo(fmt, ...) SCLogCoreCameraInfo(@\"[SCManagedCapturer] \" fmt, ##__VA_ARGS__)\n#define SCLogCapturerWarning(fmt, ...) SCLogCoreCameraWarning(@\"[SCManagedCapturer] \" fmt, ##__VA_ARGS__)\n#define SCLogCapturerError(fmt, ...) SCLogCoreCameraError(@\"[SCManagedCapturer] \" fmt, ##__VA_ARGS__)\n"
  },
  {
    "path": "ManagedCapturer/StateMachine/States/SCCaptureImageState.h",
    "content": "//\n//  SCCaptureImageState.h\n//  Snapchat\n//\n//  Created by Lin Jia on 1/8/18.\n//\n\n#import \"SCCaptureBaseState.h\"\n\n#import <Foundation/Foundation.h>\n\n@class SCQueuePerformer;\n\n@interface SCCaptureImageState : SCCaptureBaseState\n\nSC_INIT_AND_NEW_UNAVAILABLE\n\n- (instancetype)initWithPerformer:(SCQueuePerformer *)performer\n                       bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper\n                         delegate:(id<SCCaptureStateDelegate>)delegate;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/StateMachine/States/SCCaptureImageState.m",
    "content": "//\n//  SCCaptureImageState.m\n//  Snapchat\n//\n//  Created by Lin Jia on 1/8/18.\n//\n\n#import \"SCCaptureImageState.h\"\n\n#import \"SCCaptureImageStateTransitionPayload.h\"\n#import \"SCManagedCapturerV1_Private.h\"\n#import \"SCStateTransitionPayload.h\"\n\n#import <SCFoundation/SCAssertWrapper.h>\n#import <SCFoundation/SCQueuePerformer.h>\n\n@interface SCCaptureImageState () {\n    __weak id<SCCaptureStateDelegate> _delegate;\n    SCQueuePerformer *_performer;\n}\n@end\n\n@implementation SCCaptureImageState\n\n- (instancetype)initWithPerformer:(SCQueuePerformer *)performer\n                       bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper\n                         delegate:(id<SCCaptureStateDelegate>)delegate\n{\n    self = [super initWithPerformer:performer bookKeeper:bookKeeper delegate:delegate];\n    if (self) {\n        _delegate = delegate;\n        _performer = performer;\n    }\n    return self;\n}\n\n- (void)didBecomeCurrentState:(SCStateTransitionPayload *)payload\n                     resource:(SCCaptureResource *)resource\n                      context:(NSString *)context\n{\n    SCAssertPerformer(_performer);\n    SCAssert(payload.toState == [self stateId], @\"\");\n    if (![payload isKindOfClass:[SCCaptureImageStateTransitionPayload class]]) {\n        SCAssertFail(@\"wrong payload pass in\");\n        [_delegate currentState:self requestToTransferToNewState:payload.fromState payload:nil context:context];\n        return;\n    }\n    SCCaptureImageStateTransitionPayload *captureImagePayload = (SCCaptureImageStateTransitionPayload *)payload;\n\n    [SCCaptureWorker\n        captureStillImageWithCaptureResource:resource\n                                 aspectRatio:captureImagePayload.aspectRatio\n                            captureSessionID:captureImagePayload.captureSessionID\n                      shouldCaptureFromVideo:[SCCaptureWorker shouldCaptureImageFromVideoWithResource:resource]\n                           completionHandler:captureImagePayload.block\n                                     context:context];\n\n    [_delegate currentState:self requestToTransferToNewState:SCCaptureRunningStateId payload:nil context:context];\n}\n\n- (SCCaptureStateMachineStateId)stateId\n{\n    return SCCaptureImageStateId;\n}\n@end\n"
  },
  {
    "path": "ManagedCapturer/StateMachine/States/SCCaptureImageStateTransitionPayload.h",
    "content": "//\n//  SCCaptureImageStateTransitionPayload.h\n//  Snapchat\n//\n//  Created by Lin Jia on 1/9/18.\n//\n\n#import \"SCCaptureCommon.h\"\n#import \"SCStateTransitionPayload.h\"\n\n#import <Foundation/Foundation.h>\n\n@interface SCCaptureImageStateTransitionPayload : SCStateTransitionPayload\n\n@property (nonatomic, readonly, strong) NSString *captureSessionID;\n\n@property (nonatomic, readonly, copy) sc_managed_capturer_capture_still_image_completion_handler_t block;\n\n@property (nonatomic, readonly, assign) CGFloat aspectRatio;\n\nSC_INIT_AND_NEW_UNAVAILABLE\n\n- (instancetype)initWithFromState:(SCCaptureStateMachineStateId)fromState\n                          toState:(SCCaptureStateMachineStateId)toState\n                 captureSessionId:(NSString *)captureSessionID\n                      aspectRatio:(CGFloat)aspectRatio\n                completionHandler:(sc_managed_capturer_capture_still_image_completion_handler_t)block;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/StateMachine/States/SCCaptureImageStateTransitionPayload.m",
    "content": "//\n//  SCCaptureImageStateTransitionPayload.m\n//  Snapchat\n//\n//  Created by Lin Jia on 1/9/18.\n//\n\n#import \"SCCaptureImageStateTransitionPayload.h\"\n\n@implementation SCCaptureImageStateTransitionPayload\n\n- (instancetype)initWithFromState:(SCCaptureStateMachineStateId)fromState\n                          toState:(SCCaptureStateMachineStateId)toState\n                 captureSessionId:(NSString *)captureSessionID\n                      aspectRatio:(CGFloat)aspectRatio\n                completionHandler:(sc_managed_capturer_capture_still_image_completion_handler_t)block\n{\n    self = [super initWithFromState:fromState toState:toState];\n    if (self) {\n        _captureSessionID = captureSessionID;\n        _aspectRatio = aspectRatio;\n        _block = block;\n    }\n    return self;\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/StateMachine/States/SCCaptureImageWhileRecordingState.h",
    "content": "//\n//  SCCaptureImageWhileRecordingState.h\n//  Snapchat\n//\n//  Created by Sun Lei on 22/02/2018.\n//\n\n#import \"SCCaptureBaseState.h\"\n\n#import <Foundation/Foundation.h>\n\n@class SCQueuePerformer;\n\n@interface SCCaptureImageWhileRecordingState : SCCaptureBaseState\n\nSC_INIT_AND_NEW_UNAVAILABLE\n\n- (instancetype)initWithPerformer:(SCQueuePerformer *)performer\n                       bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper\n                         delegate:(id<SCCaptureStateDelegate>)delegate;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/StateMachine/States/SCCaptureImageWhileRecordingState.m",
    "content": "//\n//  SCCaptureImageWhileRecordingState.m\n//  Snapchat\n//\n//  Created by Sun Lei on 22/02/2018.\n//\n\n#import \"SCCaptureImageWhileRecordingState.h\"\n\n#import \"SCCaptureImageWhileRecordingStateTransitionPayload.h\"\n#import \"SCManagedCapturerV1_Private.h\"\n\n#import <SCFoundation/SCAssertWrapper.h>\n#import <SCFoundation/SCQueuePerformer.h>\n\n@interface SCCaptureImageWhileRecordingState () {\n    __weak id<SCCaptureStateDelegate> _delegate;\n    SCQueuePerformer *_performer;\n}\n@end\n\n@implementation SCCaptureImageWhileRecordingState\n\n- (instancetype)initWithPerformer:(SCQueuePerformer *)performer\n                       bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper\n                         delegate:(id<SCCaptureStateDelegate>)delegate\n{\n    self = [super initWithPerformer:performer bookKeeper:bookKeeper delegate:delegate];\n    if (self) {\n        _delegate = delegate;\n        _performer = performer;\n    }\n    return self;\n}\n\n- (SCCaptureStateMachineStateId)stateId\n{\n    return SCCaptureImageWhileRecordingStateId;\n}\n\n- (void)didBecomeCurrentState:(SCStateTransitionPayload *)payload\n                     resource:(SCCaptureResource *)resource\n                      context:(NSString *)context\n{\n    SCAssertPerformer(_performer);\n    SCAssert(payload.fromState == SCCaptureRecordingStateId, @\"\");\n    SCAssert(payload.toState == [self stateId], @\"\");\n    SCAssert([payload isKindOfClass:[SCCaptureImageWhileRecordingStateTransitionPayload class]], @\"\");\n    ;\n    SCCaptureImageWhileRecordingStateTransitionPayload *captureImagePayload =\n        (SCCaptureImageWhileRecordingStateTransitionPayload *)payload;\n\n    @weakify(self);\n    sc_managed_capturer_capture_still_image_completion_handler_t block =\n        ^(UIImage *fullScreenImage, NSDictionary *metadata, NSError *error, SCManagedCapturerState *state) {\n            captureImagePayload.block(fullScreenImage, metadata, error, state);\n            [_performer perform:^{\n                @strongify(self);\n                [self _cancelRecordingWithContext:context resource:resource];\n            }];\n        };\n\n    [SCCaptureWorker\n        captureStillImageWithCaptureResource:resource\n                                 aspectRatio:captureImagePayload.aspectRatio\n                            captureSessionID:captureImagePayload.captureSessionID\n                      shouldCaptureFromVideo:[SCCaptureWorker shouldCaptureImageFromVideoWithResource:resource]\n                           completionHandler:block\n                                     context:context];\n\n    [_delegate currentState:self requestToTransferToNewState:SCCaptureRunningStateId payload:nil context:context];\n}\n\n- (void)_cancelRecordingWithContext:(NSString *)context resource:(SCCaptureResource *)resource\n{\n    SCTraceODPCompatibleStart(2);\n    SCAssertPerformer(_performer);\n\n    [SCCaptureWorker cancelRecordingWithCaptureResource:resource];\n\n    NSString *apiName =\n        [NSString sc_stringWithFormat:@\"%@/%@\", NSStringFromClass([self class]), NSStringFromSelector(_cmd)];\n    [self.bookKeeper logAPICalled:apiName context:context];\n}\n@end\n"
  },
  {
    "path": "ManagedCapturer/StateMachine/States/SCCaptureImageWhileRecordingStateTransitionPayload.h",
    "content": "//\n//  SCCaptureImageWhileRecordingStateTransitionPayload.h\n//  Snapchat\n//\n//  Created by Sun Lei on 22/02/2018.\n//\n\n#import \"SCCaptureCommon.h\"\n#import \"SCStateTransitionPayload.h\"\n\n#import <Foundation/Foundation.h>\n\n@interface SCCaptureImageWhileRecordingStateTransitionPayload : SCStateTransitionPayload\n\n@property (nonatomic, readonly, strong) NSString *captureSessionID;\n\n@property (nonatomic, readonly, copy) sc_managed_capturer_capture_still_image_completion_handler_t block;\n\n@property (nonatomic, readonly, assign) CGFloat aspectRatio;\n\nSC_INIT_AND_NEW_UNAVAILABLE\n\n- (instancetype)initWithFromState:(SCCaptureStateMachineStateId)fromState\n                          toState:(SCCaptureStateMachineStateId)toState\n                 captureSessionId:(NSString *)captureSessionID\n                      aspectRatio:(CGFloat)aspectRatio\n                completionHandler:(sc_managed_capturer_capture_still_image_completion_handler_t)block;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/StateMachine/States/SCCaptureImageWhileRecordingStateTransitionPayload.m",
    "content": "//\n//  SCCaptureImageWhileRecordingStateTransitionPayload.m\n//  Snapchat\n//\n//  Created by Sun Lei on 22/02/2018.\n//\n\n#import \"SCCaptureImageWhileRecordingStateTransitionPayload.h\"\n\n@implementation SCCaptureImageWhileRecordingStateTransitionPayload\n\n- (instancetype)initWithFromState:(SCCaptureStateMachineStateId)fromState\n                          toState:(SCCaptureStateMachineStateId)toState\n                 captureSessionId:(NSString *)captureSessionID\n                      aspectRatio:(CGFloat)aspectRatio\n                completionHandler:(sc_managed_capturer_capture_still_image_completion_handler_t)block\n{\n    self = [super initWithFromState:fromState toState:toState];\n    if (self) {\n        _captureSessionID = captureSessionID;\n        _aspectRatio = aspectRatio;\n        _block = block;\n    }\n    return self;\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/StateMachine/States/SCCaptureInitializedState.h",
    "content": "//\n//  SCCaptureInitializedState.h\n//  Snapchat\n//\n//  Created by Jingtian Yang on 20/12/2017.\n//\n\n#import \"SCCaptureBaseState.h\"\n\n#import <Foundation/Foundation.h>\n\n@class SCQueuePerformer;\n\n@interface SCCaptureInitializedState : SCCaptureBaseState\n\n- (instancetype)init NS_UNAVAILABLE;\n\n- (instancetype)initWithPerformer:(SCQueuePerformer *)performer\n                       bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper\n                         delegate:(id<SCCaptureStateDelegate>)delegate;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/StateMachine/States/SCCaptureInitializedState.m",
    "content": "//\n//  SCCaptureInitializedState.m\n//  Snapchat\n//\n//  Created by Jingtian Yang on 20/12/2017.\n//\n\n#import \"SCCaptureInitializedState.h\"\n\n#import \"SCCapturerToken.h\"\n#import \"SCManagedCapturerLogging.h\"\n#import \"SCManagedCapturerV1_Private.h\"\n\n#import <SCFoundation/SCAssertWrapper.h>\n#import <SCFoundation/SCQueuePerformer.h>\n\n@interface SCCaptureInitializedState () {\n    __weak id<SCCaptureStateDelegate> _delegate;\n    SCQueuePerformer *_performer;\n}\n\n@end\n\n@implementation SCCaptureInitializedState\n\n- (instancetype)initWithPerformer:(SCQueuePerformer *)performer\n                       bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper\n                         delegate:(id<SCCaptureStateDelegate>)delegate\n{\n    self = [super initWithPerformer:performer bookKeeper:bookKeeper delegate:delegate];\n    if (self) {\n        _delegate = delegate;\n        _performer = performer;\n    }\n    return self;\n}\n\n- (void)didBecomeCurrentState:(SCStateTransitionPayload *)payload\n                     resource:(SCCaptureResource *)resource\n                      context:(NSString *)context\n{\n    // No op.\n}\n\n- (SCCaptureStateMachineStateId)stateId\n{\n    return SCCaptureInitializedStateId;\n}\n\n- (void)startRunningWithCapturerToken:(SCCapturerToken *)token\n                             resource:(SCCaptureResource *)resource\n                    completionHandler:(dispatch_block_t)completionHandler\n                              context:(NSString *)context\n{\n    SCAssertPerformer(_performer);\n    SCTraceODPCompatibleStart(2);\n    SCLogCapturerInfo(@\"startRunningAsynchronouslyWithCompletionHandler called. token: %@\", token);\n\n    [SCCaptureWorker startRunningWithCaptureResource:resource token:token completionHandler:completionHandler];\n\n    [_delegate currentState:self requestToTransferToNewState:SCCaptureRunningStateId payload:nil context:context];\n\n    NSString *apiName =\n        [NSString sc_stringWithFormat:@\"%@/%@\", NSStringFromClass([self class]), NSStringFromSelector(_cmd)];\n    [self.bookKeeper logAPICalled:apiName context:context];\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/StateMachine/States/SCCaptureRecordingState.h",
    "content": "//\n//  SCCaptureRecordingState.h\n//  Snapchat\n//\n//  Created by Jingtian Yang on 12/01/2018.\n//\n\n#import \"SCCaptureBaseState.h\"\n\n#import <Foundation/Foundation.h>\n\n@class SCQueuePerformer;\n\n@interface SCCaptureRecordingState : SCCaptureBaseState\n\nSC_INIT_AND_NEW_UNAVAILABLE\n\n- (instancetype)initWithPerformer:(SCQueuePerformer *)performer\n                       bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper\n                         delegate:(id<SCCaptureStateDelegate>)delegate;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/StateMachine/States/SCCaptureRecordingState.m",
    "content": "//\n//  SCCaptureRecordingState.m\n//  Snapchat\n//\n//  Created by Jingtian Yang on 12/01/2018.\n//\n\n#import \"SCCaptureRecordingState.h\"\n\n#import \"SCCaptureImageWhileRecordingStateTransitionPayload.h\"\n#import \"SCCaptureRecordingStateTransitionPayload.h\"\n#import \"SCManagedCapturerV1_Private.h\"\n#import \"SCStateTransitionPayload.h\"\n\n#import <SCFoundation/SCAssertWrapper.h>\n#import <SCFoundation/SCQueuePerformer.h>\n\n@interface SCCaptureRecordingState () {\n    __weak id<SCCaptureStateDelegate> _delegate;\n    SCQueuePerformer *_performer;\n}\n@end\n\n@implementation SCCaptureRecordingState\n\n- (instancetype)initWithPerformer:(SCQueuePerformer *)performer\n                       bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper\n                         delegate:(id<SCCaptureStateDelegate>)delegate\n{\n    self = [super initWithPerformer:performer bookKeeper:bookKeeper delegate:delegate];\n    if (self) {\n        _delegate = delegate;\n        _performer = performer;\n    }\n    return self;\n}\n\n- (void)didBecomeCurrentState:(SCStateTransitionPayload *)payload\n                     resource:(SCCaptureResource *)resource\n                      context:(NSString *)context\n{\n    SCAssertPerformer(resource.queuePerformer);\n    SCAssert(payload.toState == [self stateId], @\"\");\n    if (![payload isKindOfClass:[SCCaptureRecordingStateTransitionPayload class]]) {\n        SCAssertFail(@\"wrong payload pass in\");\n        [_delegate currentState:self requestToTransferToNewState:payload.fromState payload:nil context:context];\n        return;\n    }\n\n    SCCaptureRecordingStateTransitionPayload *recordingPayload = (SCCaptureRecordingStateTransitionPayload *)payload;\n    [SCCaptureWorker startRecordingWithCaptureResource:resource\n                                        outputSettings:recordingPayload.outputSettings\n                                    audioConfiguration:recordingPayload.configuration\n                                           maxDuration:recordingPayload.maxDuration\n                                               fileURL:recordingPayload.fileURL\n                                      captureSessionID:recordingPayload.captureSessionID\n                                     completionHandler:recordingPayload.block];\n}\n\n- (void)stopRecordingWithResource:(SCCaptureResource *)resource context:(NSString *)context\n{\n    SCTraceODPCompatibleStart(2);\n    SCAssertPerformer(_performer);\n\n    [SCCaptureWorker stopRecordingWithCaptureResource:resource];\n    [_delegate currentState:self requestToTransferToNewState:SCCaptureRunningStateId payload:nil context:context];\n\n    NSString *apiName =\n        [NSString sc_stringWithFormat:@\"%@/%@\", NSStringFromClass([self class]), NSStringFromSelector(_cmd)];\n    [self.bookKeeper logAPICalled:apiName context:context];\n}\n\n- (void)cancelRecordingWithResource:(SCCaptureResource *)resource context:(NSString *)context\n{\n    SCTraceODPCompatibleStart(2);\n    SCAssertPerformer(_performer);\n\n    [SCCaptureWorker cancelRecordingWithCaptureResource:resource];\n    [_delegate currentState:self requestToTransferToNewState:SCCaptureRunningStateId payload:nil context:context];\n\n    NSString *apiName =\n        [NSString sc_stringWithFormat:@\"%@/%@\", NSStringFromClass([self class]), NSStringFromSelector(_cmd)];\n    [self.bookKeeper logAPICalled:apiName context:context];\n}\n\n- (SCCaptureStateMachineStateId)stateId\n{\n    return SCCaptureRecordingStateId;\n}\n\n- (void)captureStillImageWithResource:(SCCaptureResource *)resource\n                          aspectRatio:(CGFloat)aspectRatio\n                     captureSessionID:(NSString *)captureSessionID\n                    completionHandler:(sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler\n                              context:(NSString *)context\n{\n    SCAssertPerformer(_performer);\n    SCCaptureImageWhileRecordingStateTransitionPayload *payload = [\n        [SCCaptureImageWhileRecordingStateTransitionPayload alloc] initWithFromState:SCCaptureRecordingStateId\n                                                                             toState:SCCaptureImageWhileRecordingStateId\n                                                                    captureSessionId:captureSessionID\n                                                                         aspectRatio:aspectRatio\n                                                                   completionHandler:completionHandler];\n    [_delegate currentState:self\n        requestToTransferToNewState:SCCaptureImageWhileRecordingStateId\n                            payload:payload\n                            context:context];\n\n    NSString *apiName =\n        [NSString sc_stringWithFormat:@\"%@/%@\", NSStringFromClass([self class]), NSStringFromSelector(_cmd)];\n    [self.bookKeeper logAPICalled:apiName context:context];\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/StateMachine/States/SCCaptureRecordingStateTransitionPayload.h",
    "content": "//\n//  SCCaptureRecordingStateTransitionPayload.h\n//  Snapchat\n//\n//  Created by Jingtian Yang on 12/01/2018.\n//\n\n#import \"SCCaptureCommon.h\"\n#import \"SCManagedVideoCapturerOutputSettings.h\"\n#import \"SCStateTransitionPayload.h\"\n\n#import <SCAudio/SCAudioConfiguration.h>\n\n#import <Foundation/Foundation.h>\n\n@interface SCCaptureRecordingStateTransitionPayload : SCStateTransitionPayload\n\n@property (nonatomic, readonly, strong) SCManagedVideoCapturerOutputSettings *outputSettings;\n\n@property (nonatomic, readonly, strong) SCAudioConfiguration *configuration;\n\n@property (nonatomic, readonly, assign) NSTimeInterval maxDuration;\n\n@property (nonatomic, readonly, strong) NSURL *fileURL;\n\n@property (nonatomic, readonly, strong) NSString *captureSessionID;\n\n@property (nonatomic, readonly, copy) sc_managed_capturer_start_recording_completion_handler_t block;\n\nSC_INIT_AND_NEW_UNAVAILABLE\n\n- (instancetype)initWithFromState:(SCCaptureStateMachineStateId)fromState\n                          toState:(SCCaptureStateMachineStateId)toState\n                   outputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings\n               audioConfiguration:(SCAudioConfiguration *)configuration\n                      maxDuration:(NSTimeInterval)maxDuration\n                          fileURL:(NSURL *)fileURL\n                 captureSessionID:(NSString *)captureSessionID\n                completionHandler:(sc_managed_capturer_start_recording_completion_handler_t)block;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/StateMachine/States/SCCaptureRecordingStateTransitionPayload.m",
    "content": "//\n//  SCCaptureRecordingStateTransitionPayload.m\n//  Snapchat\n//\n//  Created by Jingtian Yang on 12/01/2018.\n//\n\n#import \"SCCaptureRecordingStateTransitionPayload.h\"\n\n@implementation SCCaptureRecordingStateTransitionPayload\n\n- (instancetype)initWithFromState:(SCCaptureStateMachineStateId)fromState\n                          toState:(SCCaptureStateMachineStateId)toState\n                   outputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings\n               audioConfiguration:configuration\n                      maxDuration:(NSTimeInterval)maxDuration\n                          fileURL:(NSURL *)fileURL\n                 captureSessionID:(NSString *)captureSessionID\n                completionHandler:(sc_managed_capturer_start_recording_completion_handler_t)block\n{\n    self = [super initWithFromState:fromState toState:toState];\n    if (self) {\n        _outputSettings = outputSettings;\n        _configuration = configuration;\n        _maxDuration = maxDuration;\n        _fileURL = fileURL;\n        _captureSessionID = captureSessionID;\n        _block = block;\n    }\n    return self;\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/StateMachine/States/SCCaptureRunningState.h",
    "content": "//\n//  SCCaptureRunningState.h\n//  Snapchat\n//\n//  Created by Jingtian Yang on 08/01/2018.\n//\n\n#import \"SCCaptureBaseState.h\"\n\n#import <Foundation/Foundation.h>\n\n@class SCQueuePerformer;\n\n@interface SCCaptureRunningState : SCCaptureBaseState\n\n- (instancetype)init NS_UNAVAILABLE;\n\n- (instancetype)initWithPerformer:(SCQueuePerformer *)performer\n                       bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper\n                         delegate:(id<SCCaptureStateDelegate>)delegate;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/StateMachine/States/SCCaptureRunningState.m",
    "content": "//\n//  SCCaptureRunningState.m\n//  Snapchat\n//\n//  Created by Jingtian Yang on 08/01/2018.\n//\n\n#import \"SCCaptureRunningState.h\"\n\n#import \"SCCaptureImageStateTransitionPayload.h\"\n#import \"SCCaptureRecordingStateTransitionPayload.h\"\n#import \"SCCaptureWorker.h\"\n#import \"SCManagedCapturerLogging.h\"\n#import \"SCManagedCapturerV1_Private.h\"\n#import \"SCScanConfiguration.h\"\n\n#import <SCFoundation/SCAssertWrapper.h>\n#import <SCFoundation/SCQueuePerformer.h>\n#import <SCFoundation/SCTraceODPCompatible.h>\n\n@interface SCCaptureRunningState () {\n    __weak id<SCCaptureStateDelegate> _delegate;\n    SCQueuePerformer *_performer;\n}\n\n@end\n\n@implementation SCCaptureRunningState\n\n- (instancetype)initWithPerformer:(SCQueuePerformer *)performer\n                       bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper\n                         delegate:(id<SCCaptureStateDelegate>)delegate\n{\n    self = [super initWithPerformer:performer bookKeeper:bookKeeper delegate:delegate];\n    if (self) {\n        _delegate = delegate;\n        _performer = performer;\n    }\n    return self;\n}\n\n- (void)didBecomeCurrentState:(SCStateTransitionPayload *)payload\n                     resource:(SCCaptureResource *)resource\n                      context:(NSString *)context\n{\n    // No op.\n}\n\n- (void)captureStillImageWithResource:(SCCaptureResource *)resource\n                          aspectRatio:(CGFloat)aspectRatio\n                     captureSessionID:(NSString *)captureSessionID\n                    completionHandler:(sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler\n                              context:(NSString *)context\n{\n    SCAssertPerformer(_performer);\n    SCCaptureImageStateTransitionPayload *payload =\n        [[SCCaptureImageStateTransitionPayload alloc] initWithFromState:SCCaptureRunningStateId\n                                                                toState:SCCaptureImageStateId\n                                                       captureSessionId:captureSessionID\n                                                            aspectRatio:aspectRatio\n                                                      completionHandler:completionHandler];\n    [_delegate currentState:self requestToTransferToNewState:SCCaptureImageStateId payload:payload context:context];\n\n    NSString *apiName =\n        [NSString sc_stringWithFormat:@\"%@/%@\", NSStringFromClass([self class]), NSStringFromSelector(_cmd)];\n    [self.bookKeeper logAPICalled:apiName context:context];\n}\n\n- (SCCaptureStateMachineStateId)stateId\n{\n    return SCCaptureRunningStateId;\n}\n\n- (void)startRunningWithCapturerToken:(SCCapturerToken *)token\n                             resource:(SCCaptureResource *)resource\n                    completionHandler:(dispatch_block_t)completionHandler\n                              context:(NSString *)context\n{\n    SCAssertPerformer(_performer);\n    SCTraceODPCompatibleStart(2);\n    SCLogCapturerInfo(@\"startRunningAsynchronouslyWithCompletionHandler called. token: %@\", token);\n    [SCCaptureWorker startRunningWithCaptureResource:resource token:token completionHandler:completionHandler];\n\n    NSString *apiName =\n        [NSString sc_stringWithFormat:@\"%@/%@\", NSStringFromClass([self class]), NSStringFromSelector(_cmd)];\n    [self.bookKeeper logAPICalled:apiName context:context];\n}\n\n- (void)stopRunningWithCapturerToken:(SCCapturerToken *)token\n                            resource:(SCCaptureResource *)resource\n                   completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler\n                             context:(NSString *)context\n{\n    SCTraceODPCompatibleStart(2);\n    SCAssertPerformer(_performer);\n\n    SCLogCapturerInfo(@\"Stop running asynchronously. token:%@\", token);\n    if ([[SCManagedCapturerV1 sharedInstance] stopRunningWithCaptureToken:token\n                                                        completionHandler:completionHandler\n                                                                  context:context]) {\n        [_delegate currentState:self\n            requestToTransferToNewState:SCCaptureInitializedStateId\n                                payload:nil\n                                context:context];\n    }\n\n    NSString *apiName =\n        [NSString sc_stringWithFormat:@\"%@/%@\", NSStringFromClass([self class]), NSStringFromSelector(_cmd)];\n    [self.bookKeeper logAPICalled:apiName context:context];\n}\n\n- (void)startScanWithScanConfiguration:(SCScanConfiguration *)configuration\n                              resource:(SCCaptureResource *)resource\n                               context:(NSString *)context\n{\n    SCTraceODPCompatibleStart(2);\n    SCLogCapturerInfo(@\"Start scan on preview asynchronously. configuration:%@\", configuration);\n    SCAssertPerformer(_performer);\n    [SCCaptureWorker startScanWithScanConfiguration:configuration resource:resource];\n    [_delegate currentState:self requestToTransferToNewState:SCCaptureScanningStateId payload:nil context:context];\n\n    NSString *apiName =\n        [NSString sc_stringWithFormat:@\"%@/%@\", NSStringFromClass([self class]), NSStringFromSelector(_cmd)];\n    [self.bookKeeper logAPICalled:apiName context:context];\n}\n\n- (void)prepareForRecordingWithResource:(SCCaptureResource *)resource\n                     audioConfiguration:(SCAudioConfiguration *)configuration\n                                context:(NSString *)context\n{\n    SCAssertPerformer(_performer);\n    SCTraceODPCompatibleStart(2);\n    [SCCaptureWorker prepareForRecordingWithAudioConfiguration:configuration resource:resource];\n\n    NSString *apiName =\n        [NSString sc_stringWithFormat:@\"%@/%@\", NSStringFromClass([self class]), NSStringFromSelector(_cmd)];\n    [self.bookKeeper logAPICalled:apiName context:context];\n}\n\n- (void)startRecordingWithResource:(SCCaptureResource *)resource\n                audioConfiguration:(SCAudioConfiguration *)configuration\n                    outputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings\n                       maxDuration:(NSTimeInterval)maxDuration\n                           fileURL:(NSURL *)fileURL\n                  captureSessionID:(NSString *)captureSessionID\n                 completionHandler:(sc_managed_capturer_start_recording_completion_handler_t)completionHandler\n                           context:(NSString *)context\n{\n    SCTraceODPCompatibleStart(2);\n    SCAssertPerformer(_performer);\n\n    SCCaptureRecordingStateTransitionPayload *payload =\n        [[SCCaptureRecordingStateTransitionPayload alloc] initWithFromState:SCCaptureRunningStateId\n                                                                    toState:SCCaptureRecordingStateId\n                                                             outputSettings:outputSettings\n                                                         audioConfiguration:configuration\n                                                                maxDuration:maxDuration\n                                                                    fileURL:fileURL\n                                                           captureSessionID:captureSessionID\n                                                          completionHandler:completionHandler];\n    [_delegate currentState:self requestToTransferToNewState:SCCaptureRecordingStateId payload:payload context:context];\n\n    NSString *apiName =\n        [NSString sc_stringWithFormat:@\"%@/%@\", NSStringFromClass([self class]), NSStringFromSelector(_cmd)];\n    [self.bookKeeper logAPICalled:apiName context:context];\n}\n\n- (void)cancelRecordingWithResource:(SCCaptureResource *)resource context:(NSString *)context\n{\n    // Intentionally No Op, this will be removed once CCAM-13851 gets resolved.\n    NSString *apiName =\n        [NSString sc_stringWithFormat:@\"%@/%@\", NSStringFromClass([self class]), NSStringFromSelector(_cmd)];\n    [self.bookKeeper logAPICalled:apiName context:context];\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/StateMachine/States/SCCaptureScanningState.h",
    "content": "//\n//  SCCaptureScanningState.h\n//  Snapchat\n//\n//  Created by Xiaokang Liu on 09/01/2018.\n//\n\n#import \"SCCaptureBaseState.h\"\n\n@class SCQueuePerformer;\n\n@interface SCCaptureScanningState : SCCaptureBaseState\n- (instancetype)init NS_UNAVAILABLE;\n\n- (instancetype)initWithPerformer:(SCQueuePerformer *)performer\n                       bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper\n                         delegate:(id<SCCaptureStateDelegate>)delegate;\n@end\n"
  },
  {
    "path": "ManagedCapturer/StateMachine/States/SCCaptureScanningState.m",
    "content": "//\n//  SCCaptureScanningState.m\n//  Snapchat\n//\n//  Created by Xiaokang Liu on 09/01/2018.\n//\n\n#import \"SCCaptureScanningState.h\"\n\n#import \"SCManagedCapturerLogging.h\"\n#import \"SCManagedCapturerV1_Private.h\"\n\n#import <SCFoundation/SCAssertWrapper.h>\n#import <SCFoundation/SCQueuePerformer.h>\n#import <SCFoundation/SCTraceODPCompatible.h>\n\n@interface SCCaptureScanningState () {\n    __weak id<SCCaptureStateDelegate> _delegate;\n    SCQueuePerformer *_performer;\n}\n\n@end\n\n@implementation SCCaptureScanningState\n- (instancetype)initWithPerformer:(SCQueuePerformer *)performer\n                       bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper\n                         delegate:(id<SCCaptureStateDelegate>)delegate\n{\n    self = [super initWithPerformer:performer bookKeeper:bookKeeper delegate:delegate];\n    if (self) {\n        SCAssert(delegate, @\"\");\n        SCAssert(performer, @\"\");\n        SCAssert(bookKeeper, @\"\");\n        _delegate = delegate;\n        _performer = performer;\n    }\n    return self;\n}\n\n- (void)didBecomeCurrentState:(SCStateTransitionPayload *)payload\n                     resource:(SCCaptureResource *)resource\n                      context:(NSString *)context\n{\n    // No op.\n}\n\n- (SCCaptureStateMachineStateId)stateId\n{\n    return SCCaptureScanningStateId;\n}\n\n- (void)stopScanWithCompletionHandler:(dispatch_block_t)completionHandler\n                             resource:(SCCaptureResource *)resource\n                              context:(NSString *)context\n{\n    SCAssertPerformer(_performer);\n    SCTraceODPCompatibleStart(2);\n    SCLogCapturerInfo(@\"stop scan asynchronously.\");\n    [SCCaptureWorker stopScanWithCompletionHandler:completionHandler resource:resource];\n    [_delegate currentState:self requestToTransferToNewState:SCCaptureRunningStateId payload:nil context:context];\n\n    NSString *apiName =\n        [NSString sc_stringWithFormat:@\"%@/%@\", NSStringFromClass([self class]), NSStringFromSelector(_cmd)];\n    [self.bookKeeper logAPICalled:apiName context:context];\n}\n\n- (void)cancelRecordingWithResource:(SCCaptureResource *)resource context:(NSString *)context\n{\n    // Intentionally No Op, this will be removed once CCAM-13851 gets resolved.\n    NSString *apiName =\n        [NSString sc_stringWithFormat:@\"%@/%@\", NSStringFromClass([self class]), NSStringFromSelector(_cmd)];\n    [self.bookKeeper logAPICalled:apiName context:context];\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/StateMachine/States/SCCaptureUninitializedState.h",
    "content": "//\n//  SCCaptureUninitializedState.h\n//  Snapchat\n//\n//  Created by Lin Jia on 10/19/17.\n//\n//\n\n#import \"SCCaptureBaseState.h\"\n\n#import <Foundation/Foundation.h>\n\n/*\n State which handles capture initialialization, which should be used only once for every app life span.\n\u0010*/\n@class SCQueuePerformer;\n\n@interface SCCaptureUninitializedState : SCCaptureBaseState\n\n- (instancetype)init NS_UNAVAILABLE;\n\n- (instancetype)initWithPerformer:(SCQueuePerformer *)performer\n                       bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper\n                         delegate:(id<SCCaptureStateDelegate>)delegate;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/StateMachine/States/SCCaptureUninitializedState.m",
    "content": "//\n//  SCCaptureUninitializedState.m\n//  Snapchat\n//\n//  Created by Lin Jia on 10/19/17.\n//\n//\n\n#import \"SCCaptureUninitializedState.h\"\n\n#import \"SCManagedCapturerLogging.h\"\n#import \"SCManagedCapturerV1_Private.h\"\n\n#import <SCFoundation/SCAssertWrapper.h>\n#import <SCFoundation/SCQueuePerformer.h>\n#import <SCFoundation/SCTraceODPCompatible.h>\n\n@interface SCCaptureUninitializedState () {\n    __weak id<SCCaptureStateDelegate> _delegate;\n    SCQueuePerformer *_performer;\n}\n\n@end\n\n@implementation SCCaptureUninitializedState\n\n- (instancetype)initWithPerformer:(SCQueuePerformer *)performer\n                       bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper\n                         delegate:(id<SCCaptureStateDelegate>)delegate\n{\n    self = [super initWithPerformer:performer bookKeeper:bookKeeper delegate:delegate];\n    if (self) {\n        _delegate = delegate;\n        _performer = performer;\n    }\n    return self;\n}\n\n- (void)didBecomeCurrentState:(SCStateTransitionPayload *)payload\n                     resource:(SCCaptureResource *)resource\n                      context:(NSString *)context\n{\n    // No op.\n}\n\n- (SCCaptureStateMachineStateId)stateId\n{\n    return SCCaptureUninitializedStateId;\n}\n\n- (void)initializeCaptureWithDevicePosition:(SCManagedCaptureDevicePosition)devicePosition\n                                   resource:(SCCaptureResource *)resource\n                          completionHandler:(dispatch_block_t)completionHandler\n                                    context:(NSString *)context\n{\n    SCAssertPerformer(_performer);\n    SCTraceODPCompatibleStart(2);\n    SCLogCapturerInfo(@\"Setting up with devicePosition:%lu\", (unsigned long)devicePosition);\n\n    // TODO: we need to push completionHandler to a payload and let intializedState handle.\n    [[SCManagedCapturerV1 sharedInstance] setupWithDevicePosition:devicePosition completionHandler:completionHandler];\n\n    [_delegate currentState:self requestToTransferToNewState:SCCaptureInitializedStateId payload:nil context:context];\n\n    NSString *apiName =\n        [NSString sc_stringWithFormat:@\"%@/%@\", NSStringFromClass([self class]), NSStringFromSelector(_cmd)];\n    [self.bookKeeper logAPICalled:apiName context:context];\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/StateMachine/States/SCStateTransitionPayload.h",
    "content": "//\n//  SCStateTransitionPayload.h\n//  Snapchat\n//\n//  Created by Lin Jia on 1/8/18.\n//\n\n#import \"SCCaptureStateUtil.h\"\n\n#import <Foundation/Foundation.h>\n\n@interface SCStateTransitionPayload : NSObject\n\n@property (nonatomic, readonly, assign) SCCaptureStateMachineStateId fromState;\n\n@property (nonatomic, readonly, assign) SCCaptureStateMachineStateId toState;\n\nSC_INIT_AND_NEW_UNAVAILABLE\n\n- (instancetype)initWithFromState:(SCCaptureStateMachineStateId)fromState toState:(SCCaptureStateMachineStateId)toState;\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/StateMachine/States/SCStateTransitionPayload.m",
    "content": "//\n//  SCStateTransitionPayload.m\n//  Snapchat\n//\n//  Created by Lin Jia on 1/8/18.\n//\n\n#import \"SCStateTransitionPayload.h\"\n\n#import <SCFoundation/SCAssertWrapper.h>\n\n@implementation SCStateTransitionPayload\n\n- (instancetype)initWithFromState:(SCCaptureStateMachineStateId)fromState toState:(SCCaptureStateMachineStateId)toState\n{\n    self = [super init];\n    if (self) {\n        SCAssert(fromState != toState, @\"\");\n        SCAssert(fromState > SCCaptureBaseStateId && fromState < SCCaptureStateMachineStateIdCount, @\"\");\n        SCAssert(toState > SCCaptureBaseStateId && toState < SCCaptureStateMachineStateIdCount, @\"\");\n        _fromState = fromState;\n        _toState = toState;\n    }\n    return self;\n}\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/UIScreen+Debug.h",
    "content": "//\n//  UIScreen+Debug.h\n//  Snapchat\n//\n//  Created by Derek Peirce on 6/1/17.\n//  Copyright © 2017 Snapchat, Inc. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n\n@interface UIScreen (Debug)\n\n@end\n"
  },
  {
    "path": "ManagedCapturer/UIScreen+Debug.m",
    "content": "\n#import \"UIScreen+Debug.h\"\n\n#import <SCFoundation/SCAppEnvironment.h>\n#import <SCFoundation/SCLog.h>\n\n#import <objc/runtime.h>\n\n@implementation UIScreen (Debug)\n+ (void)load\n{\n    if (SCIsPerformanceLoggingEnabled()) {\n        static dispatch_once_t once_token;\n        dispatch_once(&once_token, ^{\n            SEL setBrightnessSelector = @selector(setBrightness:);\n            SEL setBrightnessLoggerSelector = @selector(logged_setBrightness:);\n            Method originalMethod = class_getInstanceMethod(self, setBrightnessSelector);\n            Method extendedMethod = class_getInstanceMethod(self, setBrightnessLoggerSelector);\n            method_exchangeImplementations(originalMethod, extendedMethod);\n        });\n    }\n}\n- (void)logged_setBrightness:(CGFloat)brightness\n{\n    SCLogGeneralInfo(@\"Setting brightness from %f to %f\", self.brightness, brightness);\n    [self logged_setBrightness:brightness];\n}\n@end\n"
  },
  {
    "path": "README.md",
    "content": "# Snapchat-Source-Code-Leak\n\nReupload of the Epic Leak by <a href=\"https://github.com/i5xx\">Khaled Alshehri</a>\n\n![Alt text](https://i.imgur.com/3Cz0sof.png \"SnapChat_Leak_2018-by-Jonny-Banana\")\n\nhttps://github.com/i5xx\n\nEnjoy it!!!\n\nhttps://www.youtube.com/watch?v=xEqCbRevNlo\n\n\n\n\n</BR>\n\n<!-- Banner -->\n<div align=\"center\">\n<a href=\"https://www.purevpn.com/order-now.php?aff=44922&amp;a_bid=bbd0f893\" target=\"_blank\" ><img src=\"https://affiliates.purevpn.com/accounts/default1/6hb82wqa2l/bbd0f893.jpg\" alt=\"Best VPN\" title=\"Best VPN\" width=\"728\" height=\"90\" /></a>\n</BR></BR>\n</div>\n\n\n"
  },
  {
    "path": "Resource/SCCaptureResource.h",
    "content": "//\n//  SCCaptureResource.h\n//  Snapchat\n//\n//  Created by Lin Jia on 10/19/17.\n//\n//\n\n#import \"SCManagedCapturerLensAPI.h\"\n#import \"SCManagedCapturerListenerAnnouncer.h\"\n#import \"SCSnapCreationTriggers.h\"\n\n#import <SCCameraFoundation/SCManagedVideoDataSource.h>\n\n#import <FBKVOController/FBKVOController.h>\n\n#import <Foundation/Foundation.h>\n\n/*\n In general, the function of SCCapturer is to use some resources (such as SCManagedCapturerListenerAnnouncer), to do\n something (such as announce an event).\n\n SCCaptureResource abstract away the \"resources\" part of SCCapturer. It has no APIs itself, it is used to be the\n resource which gets passed arround for capturer V2 state machine.\n */\n@class SCManagedDeviceCapacityAnalyzer;\n\n@class SCManagedCapturePreviewLayerController;\n\n@class ARSession;\n\n@class SCManagedVideoScanner;\n\n@class LSAGLView;\n\n@protocol SCManagedCapturerLSAComponentTrackerAPI;\n\n@class SCManagedStillImageCapturer;\n\n@class SCManagedVideoCapturer;\n\n@class SCQueuePerformer;\n\n@class SCManagedVideoFrameSampler;\n\n@class SCManagedDroppedFramesReporter;\n\n@class SCManagedVideoStreamReporter;\n\n@protocol SCManagedCapturerGLViewManagerAPI;\n\n@class SCCapturerToken;\n\n@class SCSingleFrameStreamCapturer;\n\n@class SCManagedFrontFlashController;\n\n@class SCManagedVideoCapturerHandler;\n\n@class SCManagedStillImageCapturerHandler;\n\n@class SCManagedDeviceCapacityAnalyzerHandler;\n\n@class SCManagedCaptureDeviceDefaultZoomHandler;\n\n@class SCManagedCaptureDeviceHandler;\n\n@class SCBlackCameraNoOutputDetector;\n\n@class SCCaptureSessionFixer;\n\n@protocol SCCaptureFaceDetector;\n\n@protocol SCManagedCapturerLensAPI;\n\n@protocol SCManagedCapturerARImageCaptureProvider;\n\n@class SCManagedCapturerARSessionHandler;\n\n@class SCManagedCaptureDeviceSubjectAreaHandler;\n\n@class SCManagedCaptureSession;\n\n@class SCBlackCameraDetector;\n\n@protocol SCLensProcessingCore;\n\n@protocol SCManagedCapturerLensAPI;\n\n@protocol SCManagedCapturePreviewLayerControllerDelegate;\n\ntypedef enum : NSUInteger {\n    SCManagedCapturerStatusUnknown = 0,\n    SCManagedCapturerStatusReady,\n    SCManagedCapturerStatusRunning,\n} SCManagedCapturerStatus;\n\n@protocol SCDeviceMotionProvider\n\n@property (nonatomic, readonly) BOOL isUnderDeviceMotion;\n\n@end\n\n@protocol SCFileInputDecider\n\n@property (nonatomic, readonly) BOOL shouldProcessFileInput;\n@property (nonatomic, readonly) NSURL *fileURL;\n\n@end\n\n@interface SCCaptureResource : NSObject\n\n@property (nonatomic, readwrite, assign) SCManagedCapturerStatus status;\n\n@property (nonatomic, readwrite, strong) SCManagedCapturerState *state;\n\n@property (nonatomic, readwrite, strong) SCManagedCaptureDevice *device;\n\n@property (nonatomic, readwrite, strong) id<SCManagedCapturerLensAPI> lensProcessingCore;\n\n@property (nonatomic, readwrite, strong) id<SCManagedCapturerLensAPIProvider> lensAPIProvider;\n\n@property (nonatomic, readwrite, strong) ARSession *arSession NS_AVAILABLE_IOS(11_0);\n\n@property (nonatomic, readwrite, strong) SCManagedStillImageCapturer *arImageCapturer NS_AVAILABLE_IOS(11_0);\n\n@property (nonatomic, readwrite, strong) SCManagedCaptureSession *managedSession;\n\n@property (nonatomic, readwrite, strong) id<SCManagedVideoDataSource> videoDataSource;\n\n@property (nonatomic, readwrite, strong) SCManagedDeviceCapacityAnalyzer *deviceCapacityAnalyzer;\n\n@property (nonatomic, readwrite, strong) SCManagedVideoScanner *videoScanner;\n\n@property (nonatomic, readwrite, strong) SCManagedCapturerListenerAnnouncer *announcer;\n\n@property (nonatomic, readwrite, strong) AVCaptureVideoPreviewLayer *videoPreviewLayer;\n\n@property (nonatomic, readwrite, strong) id<SCManagedCapturerGLViewManagerAPI> videoPreviewGLViewManager;\n\n@property (nonatomic, readwrite, strong) SCManagedStillImageCapturer *stillImageCapturer;\n\n@property (nonatomic, readwrite, strong) SCManagedVideoCapturer *videoCapturer;\n\n@property (nonatomic, readwrite, strong) SCQueuePerformer *queuePerformer;\n\n@property (nonatomic, readwrite, strong) SCManagedVideoFrameSampler *videoFrameSampler;\n\n@property (nonatomic, readwrite, strong) SCManagedDroppedFramesReporter *droppedFramesReporter;\n\n@property (nonatomic, readwrite, strong) SCManagedVideoStreamReporter *videoStreamReporter; // INTERNAL USE ONLY\n\n@property (nonatomic, readwrite, strong) SCManagedFrontFlashController *frontFlashController;\n\n@property (nonatomic, readwrite, strong) SCManagedVideoCapturerHandler *videoCapturerHandler;\n\n@property (nonatomic, readwrite, strong) SCManagedStillImageCapturerHandler *stillImageCapturerHandler;\n\n@property (nonatomic, readwrite, strong) SCManagedDeviceCapacityAnalyzerHandler *deviceCapacityAnalyzerHandler;\n\n@property (nonatomic, readwrite, strong) SCManagedCaptureDeviceDefaultZoomHandler *deviceZoomHandler;\n\n@property (nonatomic, readwrite, strong) SCManagedCaptureDeviceHandler *captureDeviceHandler;\n\n@property (nonatomic, readwrite, strong) id<SCCaptureFaceDetector> captureFaceDetector;\n\n@property (nonatomic, readwrite, strong) FBKVOController *kvoController;\n\n@property (nonatomic, readwrite, strong) id<SCManagedCapturerLSAComponentTrackerAPI> lsaTrackingComponentHandler;\n\n@property (nonatomic, readwrite, strong) SCManagedCapturerARSessionHandler *arSessionHandler;\n\n@property (nonatomic, assign) SEL completeARSessionShutdown;\n\n@property (nonatomic, assign) SEL handleAVSessionStatusChange;\n\n@property (nonatomic, assign) BOOL videoRecording;\n\n@property (nonatomic, assign) NSInteger numRetriesFixAVCaptureSessionWithCurrentSession;\n\n@property (nonatomic, assign) BOOL appInBackground;\n\n@property (nonatomic, assign) NSUInteger streamingSequence;\n\n@property (nonatomic, assign) BOOL stillImageCapturing;\n\n@property (nonatomic, readwrite, strong) NSTimer *livenessTimer;\n\n@property (nonatomic, readwrite, strong) NSMutableSet<SCCapturerToken *> *tokenSet;\n\n@property (nonatomic, readwrite, strong) SCSingleFrameStreamCapturer *frameCap;\n\n@property (nonatomic, readwrite, strong) id<SCManagedSampleBufferDisplayController> sampleBufferDisplayController;\n\n@property (nonatomic, readwrite, strong) SCSnapCreationTriggers *snapCreationTriggers;\n\n// Different from most properties above, following are main thread properties.\n@property (nonatomic, assign) BOOL allowsZoom;\n\n@property (nonatomic, assign) NSUInteger numRetriesFixInconsistencyWithCurrentSession;\n\n@property (nonatomic, readwrite, strong) NSMutableDictionary *debugInfoDict;\n\n@property (nonatomic, assign) BOOL notificationRegistered;\n\n@property (nonatomic, readwrite, strong) SCManagedCaptureDeviceSubjectAreaHandler *deviceSubjectAreaHandler;\n\n@property (nonatomic, assign) SEL sessionRuntimeError;\n\n@property (nonatomic, assign) SEL livenessConsistency;\n\n// TODO: these properties will be refactored into SCCaptureSessionFixer class\n// The refactor will be in a separate PR\n// Timestamp when _fixAVSessionIfNecessary is called\n@property (nonatomic, assign) NSTimeInterval lastFixSessionTimestamp;\n// Timestamp when session runtime error is handled\n@property (nonatomic, assign) NSTimeInterval lastSessionRuntimeErrorTime;\n// Wether we schedule fix of creating session already\n@property (nonatomic, assign) BOOL isRecreateSessionFixScheduled;\n\n@property (nonatomic, readwrite, strong) SCCaptureSessionFixer *captureSessionFixer;\n\n@property (nonatomic, readwrite, strong) SCBlackCameraDetector *blackCameraDetector;\n\n@property (nonatomic, readwrite, strong) id<SCDeviceMotionProvider> deviceMotionProvider;\n\n@property (nonatomic, readwrite, strong) id<SCManagedCapturerARImageCaptureProvider> arImageCaptureProvider;\n\n@property (nonatomic, readwrite, strong) id<SCFileInputDecider> fileInputDecider;\n\n@property (nonatomic, readwrite, strong)\n    id<SCManagedCapturePreviewLayerControllerDelegate> previewLayerControllerDelegate;\n@end\n"
  },
  {
    "path": "Resource/SCCaptureResource.m",
    "content": "//\n//  SCCaptureResource.m\n//  Snapchat\n//\n//  Created by Lin Jia on 10/19/17.\n//\n//\n\n#import \"SCCaptureResource.h\"\n\n#import \"SCBlackCameraDetector.h\"\n#import \"SCManagedCapturerState.h\"\n#import \"SCManagedFrontFlashController.h\"\n#import \"SCManagedVideoCapturer.h\"\n\n#import <SCFoundation/SCAssertWrapper.h>\n#import <SCFoundation/SCQueuePerformer.h>\n#import <SCFoundation/SCTraceODPCompatible.h>\n\n#import <FBKVOController/FBKVOController.h>\n\n@interface SCCaptureResource () {\n    FBKVOController *_previewHiddenKVO;\n}\n\n@end\n\n@implementation SCCaptureResource\n\n- (SCManagedFrontFlashController *)frontFlashController\n{\n    SCTraceODPCompatibleStart(2);\n    SCAssert([self.queuePerformer isCurrentPerformer], @\"\");\n    if (!_frontFlashController) {\n        _frontFlashController = [[SCManagedFrontFlashController alloc] init];\n    }\n    return _frontFlashController;\n}\n\n- (void)setVideoPreviewLayer:(AVCaptureVideoPreviewLayer *)layer\n{\n    SC_GUARD_ELSE_RETURN(layer != _videoPreviewLayer);\n\n    if (_videoPreviewLayer) {\n        [_previewHiddenKVO unobserve:_videoPreviewLayer];\n    }\n    _videoPreviewLayer = layer;\n\n    SC_GUARD_ELSE_RETURN(_videoPreviewLayer);\n\n    if (!_previewHiddenKVO) {\n        _previewHiddenKVO = [[FBKVOController alloc] initWithObserver:self];\n    }\n\n    [_previewHiddenKVO observe:_videoPreviewLayer\n                       keyPath:@keypath(_videoPreviewLayer.hidden)\n                       options:NSKeyValueObservingOptionNew | NSKeyValueObservingOptionOld\n                         block:^(id observer, id object, NSDictionary *change) {\n                             BOOL oldValue = [change[NSKeyValueChangeOldKey] boolValue];\n                             BOOL newValue = [change[NSKeyValueChangeNewKey] boolValue];\n                             if (oldValue != newValue) {\n                                 [_blackCameraDetector capturePreviewDidBecomeVisible:!newValue];\n                             }\n                         }];\n}\n@end\n"
  },
  {
    "path": "Tweaks/SCCameraTweaks.h",
    "content": "//\n//  SCCameraTweaks.h\n//  Snapchat\n//\n//  Created by Liu Liu on 9/16/15.\n//  Copyright © 2015 Snapchat, Inc. All rights reserved.\n//\n\n#import <SCBase/SCMacros.h>\n#import <SCCameraFoundation/SCManagedCaptureDevicePosition.h>\n#import <SCTweakAdditions/SCTweakDefines.h>\n\n#import <Tweaks/FBTweakInline.h>\n\n#import <CoreGraphics/CoreGraphics.h>\n#import <Foundation/Foundation.h>\n\n// Core Camera\n\ntypedef NS_ENUM(NSUInteger, SCManagedCaptureDeviceZoomHandlerType) {\n    SCManagedCaptureDeviceDefaultZoom,\n    SCManagedCaptureDeviceSavitzkyGolayFilter,\n    SCManagedCaptureDeviceLinearInterpolation,\n};\n\ntypedef NS_ENUM(NSUInteger, SCCameraTweaksStrategyType) {\n    SCCameraTweaksStrategyFollowABTest = 0,\n    SCCameraTweaksStrategyOverrideToYes,\n    SCCameraTweaksStrategyOverrideToNo\n};\n\ntypedef NS_ENUM(NSUInteger, SCCameraHandsFreeModeType) {\n    SCCameraHandsFreeModeTypeABTest = 0,\n    SCCameraHandsFreeModeTypeDisabled,\n    SCCameraHandsFreeModeTypeMainOnly,\n    SCCameraHandsFreeModeTypeChatMoveCaptureButton,\n    SCCameraHandsFreeModeTypeMainAndChat,\n    SCCameraHandsFreeModeTypeLeftOfCapture,\n};\n\n/// Face detection and focus strategy in Tweak. There are less options in internal Tweaks than the A/B testing\n/// strategies.\ntypedef NS_ENUM(NSUInteger, SCCameraFaceFocusModeStrategyType) {\n    SCCameraFaceFocusModeStrategyTypeABTest = 0,\n    SCCameraFaceFocusModeStrategyTypeDisabled,     // Disabled for both cameras.\n    SCCameraFaceFocusModeStrategyTypeOffByDefault, // Enabled for both cameras, but is off by default.\n    SCCameraFaceFocusModeStrategyTypeOnByDefault,  // Enabled for both cameras, but is off by default.\n};\n\ntypedef NS_ENUM(NSUInteger, SCCameraFaceFocusDetectionMethodType) {\n    SCCameraFaceFocusDetectionMethodTypeABTest = 0,\n    SCCameraFaceFocusDetectionMethodTypeCIDetector, // Use SCCaptureCoreImageFaceDetector\n    SCCameraFaceFocusDetectionMethodTypeAVMetadata, // Use SCCaptureMetadataOutputDetector\n};\n\nSC_EXTERN_C_BEGIN\n\nextern SCManagedCaptureDeviceZoomHandlerType SCCameraTweaksDeviceZoomHandlerStrategy(void);\n\nextern BOOL SCCameraTweaksBlackCameraRecoveryEnabled(void);\n\nextern BOOL SCCameraTweaksMicPermissionEnabled(void);\n\nextern BOOL SCCameraTweaksEnableCaptureKeepRecordedVideo(void);\n\nextern BOOL SCCameraTweaksEnableHandsFreeXToCancel(void);\nextern SCCameraHandsFreeModeType SCCameraTweaksHandsFreeMode(void);\n\nBOOL SCCameraTweaksEnableShortPreviewTransitionAnimationDuration(void);\n\nextern BOOL SCCameraTweaksEnablePreviewPresenterFastPreview(void);\n\nextern BOOL SCCameraTweaksEnableCaptureSharePerformer(void);\n\nextern BOOL SCCameraTweaksEnableFaceDetectionFocus(SCManagedCaptureDevicePosition captureDevicePosition);\n\nextern BOOL SCCameraTweaksTurnOnFaceDetectionFocusByDefault(SCManagedCaptureDevicePosition captureDevicePosition);\n\nextern SCCameraFaceFocusDetectionMethodType SCCameraFaceFocusDetectionMethod(void);\n\nextern CGFloat SCCameraFaceFocusMinFaceSize(void);\n\nextern BOOL SCCameraTweaksSessionLightWeightFixEnabled(void);\n\nSC_EXTERN_C_END\n\nstatic inline BOOL SCCameraTweaksEnableVideoStabilization(void)\n{\n    return FBTweakValue(@\"Camera\", @\"Core Camera\", @\"Enable video stabilization\", NO);\n}\n\nstatic inline BOOL SCCameraTweaksEnableForceTouchToToggleCamera(void)\n{\n    return FBTweakValue(@\"Camera\", @\"Recording\", @\"Force Touch to Toggle\", NO);\n}\n\nstatic inline BOOL SCCameraTweaksEnableStayOnCameraAfterPostingStory(void)\n{\n    return FBTweakValue(@\"Camera\", @\"Story\", @\"Stay on camera after posting\", NO);\n}\n\nstatic inline BOOL SCCameraTweaksEnableKeepLastFrameOnCamera(void)\n{\n    return FBTweakValue(@\"Camera\", @\"Core Camera\", @\"Keep last frame on camera\", YES);\n}\n\nstatic inline BOOL SCCameraTweaksSmoothAutoFocusWhileRecording(void)\n{\n    return FBTweakValue(@\"Camera\", @\"Core Camera\", @\"Smooth autofocus while recording\", YES);\n}\n\nstatic inline NSInteger SCCameraExposureAdjustmentMode(void)\n{\n    return [FBTweakValue(\n        @\"Camera\", @\"Core Camera\", @\"Adjust Exposure\", (id) @0,\n        (@{ @0 : @\"NO\",\n            @1 : @\"Dynamic enhancement\",\n            @2 : @\"Night vision\",\n            @3 : @\"Inverted night vision\" })) integerValue];\n}\n\nstatic inline BOOL SCCameraTweaksRotateToggleCameraButton(void)\n{\n    return SCTweakValueWithHalt(@\"Camera\", @\"Core Camera\", @\"Rotate Toggle-Camera Button\", NO);\n}\n\nstatic inline CGFloat SCCameraTweaksRotateToggleCameraButtonTime(void)\n{\n    return FBTweakValue(@\"Camera\", @\"Core Camera\", @\"Toggle-Camera Button Rotation Time\", 0.3);\n}\n\nstatic inline BOOL SCCameraTweaksDefaultPortrait(void)\n{\n    return FBTweakValue(@\"Camera\", @\"Core Camera\", @\"Default to Portrait Orientation\", YES);\n}\n\n// For test purpose\nstatic inline BOOL SCCameraTweaksTranscodingAlwaysFails(void)\n{\n    return FBTweakValue(@\"Camera\", @\"Core Camera\", @\"Transcoding always fails\", NO);\n}\n\n// This tweak disables the video masking behavior of the snap overlays;\n// Intended to be used by curators who are on-site snapping special events.\n// Ping news-dev@snapchat.com for any questions/comments\nstatic inline BOOL SCCameraTweaksDisableOverlayVideoMask(void)\n{\n    return FBTweakValue(@\"Camera\", @\"Creative Tools\", @\"Disable Overlay Video Masking\", NO);\n}\n\nstatic inline NSInteger SCCameraTweaksDelayTurnOnFilters(void)\n{\n    return [FBTweakValue(@\"Camera\", @\"Core Camera\", @\"Delay turn on filter\", (id) @0,\n                         (@{ @0 : @\"Respect A/B testing\",\n                             @1 : @\"Override to YES\",\n                             @2 : @\"Override to NO\" })) integerValue];\n}\n\nstatic inline BOOL SCCameraTweaksEnableExposurePointObservation(void)\n{\n    return FBTweakValue(@\"Camera\", @\"Core Camera - Face Focus\", @\"Observe Exposure Point\", NO);\n}\n\nstatic inline BOOL SCCameraTweaksEnableFocusPointObservation(void)\n{\n    return FBTweakValue(@\"Camera\", @\"Core Camera - Face Focus\", @\"Observe Focus Point\", NO);\n}\n\nstatic inline CGFloat SCCameraTweaksSmoothZoomThresholdTime()\n{\n    return FBTweakValue(@\"Camera\", @\"Zoom Strategy - Linear Interpolation\", @\"Threshold time\", 0.3);\n}\n\nstatic inline CGFloat SCCameraTweaksSmoothZoomThresholdFactor()\n{\n    return FBTweakValue(@\"Camera\", @\"Zoom Strategy - Linear Interpolation\", @\"Threshold factor diff\", 0.25);\n}\n\nstatic inline CGFloat SCCameraTweaksSmoothZoomIntermediateFramesPerSecond()\n{\n    return FBTweakValue(@\"Camera\", @\"Zoom Strategy - Linear Interpolation\", @\"Intermediate fps\", 60);\n}\n\nstatic inline CGFloat SCCameraTweaksSmoothZoomDelayTolerantTime()\n{\n    return FBTweakValue(@\"Camera\", @\"Zoom Strategy - Linear Interpolation\", @\"Delay tolerant time\", 0.15);\n}\n\nstatic inline CGFloat SCCameraTweaksSmoothZoomMinStepLength()\n{\n    return FBTweakValue(@\"Camera\", @\"Zoom Strategy - Linear Interpolation\", @\"Min step length\", 0.05);\n}\n\nstatic inline CGFloat SCCameraTweaksExposureDeadline()\n{\n    return FBTweakValue(@\"Camera\", @\"Adjust Exposure\", @\"Exposure Deadline\", 0.2);\n}\n\nstatic inline BOOL SCCameraTweaksKillFrontCamera(void)\n{\n    return SCTweakValueWithHalt(@\"Camera\", @\"Debugging\", @\"Kill Front Camera\", NO);\n}\n\nstatic inline BOOL SCCameraTweaksKillBackCamera(void)\n{\n    return SCTweakValueWithHalt(@\"Camera\", @\"Debugging\", @\"Kill Back Camera\", NO);\n}\n\n#if TARGET_IPHONE_SIMULATOR\n\nstatic inline BOOL SCCameraTweaksUseRealMockImage(void)\n{\n    return FBTweakValue(@\"Camera\", @\"Debugging\", @\"Use real mock image on simulator\", YES);\n}\n\n#endif\n\nstatic inline CGFloat SCCameraTweaksShortPreviewTransitionAnimationDuration()\n{\n    return FBTweakValue(@\"Camera\", @\"Preview Transition\", @\"Short Animation Duration\", 0.35);\n}\n\nstatic inline SCCameraTweaksStrategyType SCCameraTweaksPreviewTransitionAnimationDurationStrategy()\n{\n    NSNumber *strategy = SCTweakValueWithHalt(@\"Camera\", @\"Preview Transition\", @\"Enable Short Animation Duration\",\n                                              (id) @(SCCameraTweaksStrategyFollowABTest), (@{\n                                                  @(SCCameraTweaksStrategyFollowABTest) : @\"Respect A/B testing\",\n                                                  @(SCCameraTweaksStrategyOverrideToYes) : @\"Override to YES\",\n                                                  @(SCCameraTweaksStrategyOverrideToNo) : @\"Override to NO\"\n                                              }));\n    return (SCCameraTweaksStrategyType)[strategy unsignedIntegerValue];\n}\n\nstatic inline CGFloat SCCameraTweaksEnablePortraitModeButton(void)\n{\n    return FBTweakValue(@\"Camera\", @\"Core Camera - Portrait Mode\", @\"Enable Button\", NO);\n}\n\nstatic inline CGFloat SCCameraTweaksDepthBlurForegroundThreshold(void)\n{\n    return FBTweakValue(@\"Camera\", @\"Core Camera - Portrait Mode\", @\"Foreground Blur Threshold\", 0.3);\n}\n\nstatic inline CGFloat SCCameraTweaksDepthBlurBackgroundThreshold(void)\n{\n    return FBTweakValue(@\"Camera\", @\"Core Camera - Portrait Mode\", @\"Background Blur Threshold\", 0.1);\n}\n\nstatic inline CGFloat SCCameraTweaksBlurSigma(void)\n{\n    return FBTweakValue(@\"Camera\", @\"Core Camera - Portrait Mode\", @\"Blur Sigma\", 4.0);\n}\n\nstatic inline BOOL SCCameraTweaksEnableFilterInputFocusRect(void)\n{\n    return FBTweakValue(@\"Camera\", @\"Core Camera - Portrait Mode\", @\"Filter Input Focus Rect\", NO);\n}\n\nstatic inline BOOL SCCameraTweaksEnablePortraitModeTapToFocus(void)\n{\n    return FBTweakValue(@\"Camera\", @\"Core Camera - Portrait Mode\", @\"Tap to Focus\", NO);\n}\n\nstatic inline BOOL SCCameraTweaksEnablePortraitModeAutofocus(void)\n{\n    return FBTweakValue(@\"Camera\", @\"Core Camera - Portrait Mode\", @\"Autofocus\", NO);\n}\n\nstatic inline BOOL SCCameraTweaksDepthToGrayscaleOverride(void)\n{\n    return FBTweakValue(@\"Camera\", @\"Core Camera - Portrait Mode\", @\"Depth to Grayscale Override\", NO);\n}\n\nstatic inline SCCameraTweaksStrategyType SCCameraTweaksEnableHandsFreeXToCancelStrategy(void)\n{\n    NSNumber *strategy = SCTweakValueWithHalt(@\"Camera\", @\"Hands-Free Recording\", @\"X to Cancel\",\n                                              (id) @(SCCameraTweaksStrategyFollowABTest), (@{\n                                                  @(SCCameraTweaksStrategyFollowABTest) : @\"Respect A/B testing\",\n                                                  @(SCCameraTweaksStrategyOverrideToYes) : @\"Override to YES\",\n                                                  @(SCCameraTweaksStrategyOverrideToNo) : @\"Override to NO\"\n                                              }));\n    return (SCCameraTweaksStrategyType)[strategy unsignedIntegerValue];\n}\n\nstatic inline SCCameraHandsFreeModeType SCCameraTweaksHandsFreeModeType()\n{\n    NSNumber *strategy = SCTweakValueWithHalt(\n        @\"Camera\", @\"Hands-Free Recording\", @\"Enabled\", (id) @(SCCameraHandsFreeModeTypeABTest), (@{\n            @(SCCameraHandsFreeModeTypeABTest) : @\"Respect A/B testing\",\n            @(SCCameraHandsFreeModeTypeDisabled) : @\"Disable\",\n            @(SCCameraHandsFreeModeTypeMainOnly) : @\"Main Camera only\",\n            @(SCCameraHandsFreeModeTypeChatMoveCaptureButton) : @\"Main Camera + move Chat capture button\",\n            @(SCCameraHandsFreeModeTypeMainAndChat) : @\"Main + Chat Cameras\",\n            @(SCCameraHandsFreeModeTypeLeftOfCapture) : @\"Left of Main + Chat Cameras\"\n        }));\n    return (SCCameraHandsFreeModeType)[strategy unsignedIntegerValue];\n}\n\nstatic inline SCCameraTweaksStrategyType SCCameraTweaksPreviewPresenterFastPreviewStrategy(void)\n{\n    NSNumber *strategy = SCTweakValueWithHalt(@\"Camera\", @\"Preview Presenter\", @\"Fast Preview\",\n                                              (id) @(SCCameraTweaksStrategyFollowABTest), (@{\n                                                  @(SCCameraTweaksStrategyFollowABTest) : @\"Respect A/B testing\",\n                                                  @(SCCameraTweaksStrategyOverrideToYes) : @\"Override to YES\",\n                                                  @(SCCameraTweaksStrategyOverrideToNo) : @\"Override to NO\"\n                                              }));\n    return (SCCameraTweaksStrategyType)[strategy unsignedIntegerValue];\n}\n\nstatic inline NSInteger SCCameraTweaksEnableCaptureKeepRecordedVideoStrategy(void)\n{\n    NSNumber *strategy =\n        SCTweakValueWithHalt(@\"Camera\", @\"Core Camera - Capture Keep Recorded Video\",\n                             @\"Enable Capture Keep Recorded Video\", (id) @(SCCameraTweaksStrategyFollowABTest), (@{\n                                 @(SCCameraTweaksStrategyFollowABTest) : @\"Respect A/B testing\",\n                                 @(SCCameraTweaksStrategyOverrideToYes) : @\"Override to YES\",\n                                 @(SCCameraTweaksStrategyOverrideToNo) : @\"Override to NO\"\n                             }));\n    return (SCCameraTweaksStrategyType)[strategy unsignedIntegerValue];\n}\n\nstatic inline NSInteger SCCameraTweaksEnableCaptureSharePerformerStrategy(void)\n{\n    NSNumber *strategy =\n        SCTweakValueWithHalt(@\"Camera\", @\"Core Camera - Capture Share Performer\", @\"Enable Capture Share Performer\",\n                             (id) @(SCCameraTweaksStrategyFollowABTest), (@{\n                                 @(SCCameraTweaksStrategyFollowABTest) : @\"Respect A/B testing\",\n                                 @(SCCameraTweaksStrategyOverrideToYes) : @\"Override to YES\",\n                                 @(SCCameraTweaksStrategyOverrideToNo) : @\"Override to NO\"\n                             }));\n    return (SCCameraTweaksStrategyType)[strategy unsignedIntegerValue];\n}\n\nstatic inline SCCameraFaceFocusModeStrategyType SCCameraTweaksFaceFocusStrategy()\n{\n    NSNumber *strategy =\n        SCTweakValueWithHalt(@\"Camera\", @\"Core Camera - Face Focus\", @\"Enable Face Focus\",\n                             (id) @(SCCameraFaceFocusModeStrategyTypeABTest), (@{\n                                 @(SCCameraFaceFocusModeStrategyTypeABTest) : @\"Respect A/B testing\",\n                                 @(SCCameraFaceFocusModeStrategyTypeDisabled) : @\"Disabled\",\n                                 @(SCCameraFaceFocusModeStrategyTypeOffByDefault) : @\"Enabled, off by default\",\n                                 @(SCCameraFaceFocusModeStrategyTypeOnByDefault) : @\"Enabled, on by default\",\n                             }));\n    return (SCCameraFaceFocusModeStrategyType)[strategy unsignedIntegerValue];\n}\n\nstatic inline SCCameraFaceFocusDetectionMethodType SCCameraTweaksFaceFocusDetectionMethodType()\n{\n    NSNumber *strategy =\n        SCTweakValueWithHalt(@\"Camera\", @\"Core Camera - Face Focus\", @\"Detection Method\",\n                             (id) @(SCCameraFaceFocusDetectionMethodTypeABTest), (@{\n                                 @(SCCameraFaceFocusDetectionMethodTypeABTest) : @\"Respect A/B testing\",\n                                 @(SCCameraFaceFocusDetectionMethodTypeCIDetector) : @\"CIDetector\",\n                                 @(SCCameraFaceFocusDetectionMethodTypeAVMetadata) : @\"AVMetadata\",\n                             }));\n    return (SCCameraFaceFocusDetectionMethodType)[strategy unsignedIntegerValue];\n}\n\nstatic inline int SCCameraTweaksFaceFocusDetectionFrequency()\n{\n    return FBTweakValue(@\"Camera\", @\"Core Camera - Face Focus\", @\"Detection Frequency\", 3, 1, 30);\n}\n\nstatic inline BOOL SCCameraTweaksFaceFocusMinFaceSizeRespectABTesting()\n{\n    return SCTweakValueWithHalt(@\"Camera\", @\"Core Camera - Face Focus\", @\"Min Face Size Respect AB\", YES);\n}\n\nstatic inline CGFloat SCCameraTweaksFaceFocusMinFaceSizeValue()\n{\n    return FBTweakValue(@\"Camera\", @\"Core Camera - Face Focus\", @\"Min Face Size\", 0.25, 0.01, 0.5);\n}\n\nstatic inline BOOL SCCameraTweaksEnableDualCamera(void)\n{\n    return SCTweakValueWithHalt(@\"Camera\", @\"Core Camera - Dual Camera\", @\"Enable Dual Camera\", NO);\n}\n"
  },
  {
    "path": "Tweaks/SCCameraTweaks.m",
    "content": "//\n//  SCCameraTweaks.m\n//  Snapchat\n//\n//  Created by Liu Liu on 10/4/16.\n//  Copyright © 2016 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCCameraTweaks.h\"\n\n#import \"SCManagedCapturePreviewLayerController.h\"\n\n#import <SCFoundation/SCDeviceName.h>\n#import <SCFoundation/SCZeroDependencyExperiments.h>\n#import <SCTweakAdditions/SCTweakDefines.h>\n\nSCManagedCaptureDeviceZoomHandlerType SCCameraTweaksDeviceZoomHandlerStrategy(void)\n{\n\n    NSNumber *strategyNumber = SCTweakValueWithHalt(\n        @\"Camera\", @\"Core Camera\", @\"Zoom Strategy\",\n        @(SCIsMasterBuild() ? SCManagedCaptureDeviceLinearInterpolation : SCManagedCaptureDeviceDefaultZoom), (@{\n            @(SCManagedCaptureDeviceDefaultZoom) : @\"Default\",\n            @(SCManagedCaptureDeviceSavitzkyGolayFilter) : @\"Savitzky-Golay Filter\",\n            @(SCManagedCaptureDeviceLinearInterpolation) : @\"Linear Interpolation\"\n        }));\n    return (SCManagedCaptureDeviceZoomHandlerType)[strategyNumber integerValue];\n}\n\nBOOL SCCameraTweaksEnableFaceDetectionFocus(SCManagedCaptureDevicePosition captureDevicePosition)\n{\n    SC_GUARD_ELSE_RETURN_VALUE([SCDeviceName isIphone], NO);\n    SC_GUARD_ELSE_RETURN_VALUE(captureDevicePosition != SCManagedCaptureDevicePositionBackDualCamera, NO);\n\n    BOOL isFrontCamera = (captureDevicePosition == SCManagedCaptureDevicePositionFront);\n    BOOL isEnabled = NO;\n    SCCameraFaceFocusModeStrategyType option = SCCameraTweaksFaceFocusStrategy();\n    switch (option) {\n    case SCCameraFaceFocusModeStrategyTypeABTest:\n        if (isFrontCamera) {\n            isEnabled = SCExperimentWithFaceDetectionFocusFrontCameraEnabled();\n        } else {\n            isEnabled = SCExperimentWithFaceDetectionFocusBackCameraEnabled();\n        }\n        break;\n    case SCCameraFaceFocusModeStrategyTypeDisabled:\n        isEnabled = NO;\n        break;\n    case SCCameraFaceFocusModeStrategyTypeOffByDefault:\n    case SCCameraFaceFocusModeStrategyTypeOnByDefault:\n        isEnabled = YES;\n        break;\n    }\n    return isEnabled;\n}\n\nBOOL SCCameraTweaksTurnOnFaceDetectionFocusByDefault(SCManagedCaptureDevicePosition captureDevicePosition)\n{\n    SC_GUARD_ELSE_RETURN_VALUE([SCDeviceName isIphone], NO);\n    SC_GUARD_ELSE_RETURN_VALUE(captureDevicePosition != SCManagedCaptureDevicePositionBackDualCamera, NO);\n\n    BOOL isFrontCamera = (captureDevicePosition == SCManagedCaptureDevicePositionFront);\n    BOOL isOnByDefault = NO;\n    SCCameraFaceFocusModeStrategyType option = SCCameraTweaksFaceFocusStrategy();\n    switch (option) {\n    case SCCameraFaceFocusModeStrategyTypeABTest:\n        if (isFrontCamera) {\n            isOnByDefault = SCExperimentWithFaceDetectionFocusFrontCameraOnByDefault();\n        } else {\n            isOnByDefault = SCExperimentWithFaceDetectionFocusBackCameraOnByDefault();\n        }\n        break;\n    case SCCameraFaceFocusModeStrategyTypeDisabled:\n    case SCCameraFaceFocusModeStrategyTypeOffByDefault:\n        isOnByDefault = NO;\n        break;\n    case SCCameraFaceFocusModeStrategyTypeOnByDefault:\n        isOnByDefault = YES;\n        break;\n    }\n    return isOnByDefault;\n}\n\nSCCameraFaceFocusDetectionMethodType SCCameraFaceFocusDetectionMethod()\n{\n    static SCCameraFaceFocusDetectionMethodType detectionMethodType;\n    static dispatch_once_t onceToken;\n    dispatch_once(&onceToken, ^{\n        SCCameraFaceFocusDetectionMethodType option = SCCameraTweaksFaceFocusDetectionMethodType();\n        switch (option) {\n        case SCCameraFaceFocusDetectionMethodTypeABTest: {\n            // Check the validity of AB value.\n            NSUInteger experimentValue = SCExperimentWithFaceDetectionFocusDetectionMethod();\n            if (experimentValue >= SCCameraFaceFocusDetectionMethodTypeCIDetector &&\n                experimentValue <= SCCameraFaceFocusDetectionMethodTypeAVMetadata) {\n                detectionMethodType = experimentValue;\n            } else {\n                // Use CIDetector by default.\n                detectionMethodType = SCCameraFaceFocusDetectionMethodTypeCIDetector;\n            }\n        } break;\n        case SCCameraFaceFocusDetectionMethodTypeAVMetadata:\n            detectionMethodType = SCCameraFaceFocusDetectionMethodTypeAVMetadata;\n            break;\n        case SCCameraFaceFocusDetectionMethodTypeCIDetector:\n            detectionMethodType = SCCameraFaceFocusDetectionMethodTypeCIDetector;\n            break;\n        }\n    });\n    return detectionMethodType;\n}\n\nCGFloat SCCameraFaceFocusMinFaceSize(void)\n{\n    static CGFloat minFaceSize;\n    static dispatch_once_t onceToken;\n    dispatch_once(&onceToken, ^{\n        if (SCCameraTweaksFaceFocusMinFaceSizeRespectABTesting()) {\n            minFaceSize = (CGFloat)SCExperimentWithFaceDetectionMinFaceSize();\n        } else {\n            minFaceSize = SCCameraTweaksFaceFocusMinFaceSizeValue();\n        }\n        if (minFaceSize < 0.01 || minFaceSize > 0.5) {\n            minFaceSize = 0.25; // Default value is 0.25\n        }\n    });\n    return minFaceSize;\n}\n\nBOOL SCCameraTweaksEnableCaptureKeepRecordedVideo(void)\n{\n    static BOOL enableCaptureKeepRecordedVideo;\n    static dispatch_once_t onceToken;\n    dispatch_once(&onceToken, ^{\n        switch (SCCameraTweaksEnableCaptureKeepRecordedVideoStrategy()) {\n        case SCCameraTweaksStrategyOverrideToYes: {\n            enableCaptureKeepRecordedVideo = YES;\n            break;\n        }\n        case SCCameraTweaksStrategyOverrideToNo: {\n            enableCaptureKeepRecordedVideo = NO;\n            break;\n        }\n        case SCCameraTweaksStrategyFollowABTest: {\n            enableCaptureKeepRecordedVideo = SCExperimentWithCaptureKeepRecordedVideo();\n            break;\n        }\n        default: {\n            enableCaptureKeepRecordedVideo = NO;\n            break;\n        }\n        }\n    });\n    return enableCaptureKeepRecordedVideo;\n}\n\nstatic inline SCCameraTweaksStrategyType SCCameraTweaksBlackCameraRecoveryStrategy(void)\n{\n    NSNumber *strategy = SCTweakValueWithHalt(@\"Camera\", @\"Core Camera\", @\"Black Camera Recovery\",\n                                              (id) @(SCCameraTweaksStrategyFollowABTest), (@{\n                                                  @(SCCameraTweaksStrategyFollowABTest) : @\"Respect A/B testing\",\n                                                  @(SCCameraTweaksStrategyOverrideToYes) : @\"Override to YES\",\n                                                  @(SCCameraTweaksStrategyOverrideToNo) : @\"Override to NO\"\n                                              }));\n    return (SCCameraTweaksStrategyType)[strategy unsignedIntegerValue];\n}\n\nBOOL SCCameraTweaksBlackCameraRecoveryEnabled(void)\n{\n    static BOOL enabled;\n    static dispatch_once_t onceToken;\n    dispatch_once(&onceToken, ^{\n        switch (SCCameraTweaksBlackCameraRecoveryStrategy()) {\n        case SCCameraTweaksStrategyOverrideToYes:\n            enabled = YES;\n            break;\n        case SCCameraTweaksStrategyOverrideToNo:\n            enabled = NO;\n            break;\n        case SCCameraTweaksStrategyFollowABTest:\n            enabled = SCExperimentWithBlackCameraRecovery();\n            break;\n        default:\n            enabled = NO;\n            break;\n        }\n    });\n    return enabled;\n}\n\nstatic inline SCCameraTweaksStrategyType SCCameraTweaksMicrophoneNotificationStrategy(void)\n{\n    NSNumber *strategy = SCTweakValueWithHalt(@\"Camera\", @\"Core Camera\", @\"Mic Notification\",\n                                              (id) @(SCCameraTweaksStrategyFollowABTest), (@{\n                                                  @(SCCameraTweaksStrategyFollowABTest) : @\"Respect A/B testing\",\n                                                  @(SCCameraTweaksStrategyOverrideToYes) : @\"Override to YES\",\n                                                  @(SCCameraTweaksStrategyOverrideToNo) : @\"Override to NO\"\n                                              }));\n    return (SCCameraTweaksStrategyType)[strategy unsignedIntegerValue];\n}\n\nBOOL SCCameraTweaksMicPermissionEnabled(void)\n{\n    static BOOL enabled;\n    static dispatch_once_t onceToken;\n    dispatch_once(&onceToken, ^{\n        switch (SCCameraTweaksMicrophoneNotificationStrategy()) {\n        case SCCameraTweaksStrategyOverrideToYes:\n            enabled = YES;\n            break;\n        case SCCameraTweaksStrategyOverrideToNo:\n            enabled = NO;\n            break;\n        case SCCameraTweaksStrategyFollowABTest:\n            enabled = SCExperimentWithMicrophonePermissionNotificationEnabled();\n            break;\n        default:\n            enabled = NO;\n            break;\n        }\n    });\n    return enabled;\n}\n\nSCCameraHandsFreeModeType SCCameraTweaksHandsFreeMode(void)\n{\n    static SCCameraHandsFreeModeType handsFreeModeType;\n    static dispatch_once_t onceToken;\n    dispatch_once(&onceToken, ^{\n        SCCameraHandsFreeModeType option = SCCameraTweaksHandsFreeModeType();\n        switch (option) {\n        case SCCameraHandsFreeModeTypeDisabled:\n            handsFreeModeType = SCCameraHandsFreeModeTypeDisabled;\n            break;\n        case SCCameraHandsFreeModeTypeMainOnly:\n            handsFreeModeType = SCCameraHandsFreeModeTypeMainOnly;\n            break;\n        case SCCameraHandsFreeModeTypeChatMoveCaptureButton:\n            handsFreeModeType = SCCameraHandsFreeModeTypeChatMoveCaptureButton;\n            break;\n        case SCCameraHandsFreeModeTypeMainAndChat:\n            handsFreeModeType = SCCameraHandsFreeModeTypeMainAndChat;\n            break;\n        case SCCameraHandsFreeModeTypeLeftOfCapture:\n            handsFreeModeType = SCCameraHandsFreeModeTypeLeftOfCapture;\n            break;\n        case SCCameraHandsFreeModeTypeABTest:\n        default:\n            handsFreeModeType = SCExperimentWithHandsFreeMode();\n            break;\n        }\n    });\n    return handsFreeModeType;\n}\n\nBOOL SCCameraTweaksEnableHandsFreeXToCancel(void)\n{\n    static BOOL enableHandsFreeXToCancel;\n    static dispatch_once_t onceToken;\n    dispatch_once(&onceToken, ^{\n        switch (SCCameraTweaksEnableHandsFreeXToCancelStrategy()) {\n        case SCCameraTweaksStrategyOverrideToYes: {\n            enableHandsFreeXToCancel = YES;\n            break;\n        }\n        case SCCameraTweaksStrategyOverrideToNo: {\n            enableHandsFreeXToCancel = NO;\n            break;\n        }\n        case SCCameraTweaksStrategyFollowABTest: {\n            enableHandsFreeXToCancel = SCExperimentWithHandsFreeXToCancel();\n            break;\n        }\n        default: {\n            enableHandsFreeXToCancel = NO;\n            break;\n        }\n        }\n    });\n    return enableHandsFreeXToCancel;\n}\n\nBOOL SCCameraTweaksEnableShortPreviewTransitionAnimationDuration(void)\n{\n    static BOOL enableShortPreviewTransitionAnimationDuration;\n    static dispatch_once_t onceToken;\n    dispatch_once(&onceToken, ^{\n        switch (SCCameraTweaksPreviewTransitionAnimationDurationStrategy()) {\n        case SCCameraTweaksStrategyOverrideToYes: {\n            enableShortPreviewTransitionAnimationDuration = YES;\n            break;\n        }\n        case SCCameraTweaksStrategyOverrideToNo: {\n            enableShortPreviewTransitionAnimationDuration = NO;\n            break;\n        }\n        case SCCameraTweaksStrategyFollowABTest: {\n            enableShortPreviewTransitionAnimationDuration = SCExperimentWithShortPreviewTransitionAnimationDuration();\n            break;\n        }\n        default: {\n            enableShortPreviewTransitionAnimationDuration = YES;\n            break;\n        }\n        }\n    });\n    return enableShortPreviewTransitionAnimationDuration;\n}\n\nBOOL SCCameraTweaksEnablePreviewPresenterFastPreview(void)\n{\n    static BOOL enablePreviewPresenterFastPreview;\n    static dispatch_once_t onceToken;\n    dispatch_once(&onceToken, ^{\n        switch (SCCameraTweaksPreviewPresenterFastPreviewStrategy()) {\n        case SCCameraTweaksStrategyOverrideToYes: {\n            enablePreviewPresenterFastPreview = YES;\n            break;\n        }\n        case SCCameraTweaksStrategyOverrideToNo: {\n            enablePreviewPresenterFastPreview = NO;\n            break;\n        }\n        case SCCameraTweaksStrategyFollowABTest: {\n            enablePreviewPresenterFastPreview = SCExperimentWithPreviewPresenterFastPreview();\n            break;\n        }\n        default: {\n            enablePreviewPresenterFastPreview = NO;\n            break;\n        }\n        }\n    });\n    return enablePreviewPresenterFastPreview;\n}\n\nBOOL SCCameraTweaksEnableCaptureSharePerformer(void)\n{\n    static BOOL enableCaptureSharePerformer;\n    static dispatch_once_t onceToken;\n    dispatch_once(&onceToken, ^{\n        switch (SCCameraTweaksEnableCaptureSharePerformerStrategy()) {\n        case SCCameraTweaksStrategyOverrideToYes: {\n            enableCaptureSharePerformer = YES;\n            break;\n        }\n        case SCCameraTweaksStrategyOverrideToNo: {\n            enableCaptureSharePerformer = NO;\n            break;\n        }\n        case SCCameraTweaksStrategyFollowABTest: {\n            enableCaptureSharePerformer = SCExperimentWithCaptureSharePerformer();\n            break;\n        }\n        default: {\n            enableCaptureSharePerformer = NO;\n            break;\n        }\n        }\n    });\n    return enableCaptureSharePerformer;\n}\n\nstatic inline SCCameraTweaksStrategyType SCCameraTweaksSessionLightWeightFixStrategy(void)\n{\n    NSNumber *strategy = SCTweakValueWithHalt(@\"Camera\", @\"Core Camera\", @\"Light-weight Session Fix\",\n                                              (id) @(SCCameraTweaksStrategyFollowABTest), (@{\n                                                  @(SCCameraTweaksStrategyFollowABTest) : @\"Respect A/B testing\",\n                                                  @(SCCameraTweaksStrategyOverrideToYes) : @\"Override to YES\",\n                                                  @(SCCameraTweaksStrategyOverrideToNo) : @\"Override to NO\"\n                                              }));\n    return (SCCameraTweaksStrategyType)[strategy unsignedIntegerValue];\n}\n\nBOOL SCCameraTweaksSessionLightWeightFixEnabled(void)\n{\n    static BOOL enabled;\n    static dispatch_once_t onceToken;\n    dispatch_once(&onceToken, ^{\n        switch (SCCameraTweaksSessionLightWeightFixStrategy()) {\n        case SCCameraTweaksStrategyOverrideToYes:\n            enabled = YES;\n            break;\n        case SCCameraTweaksStrategyOverrideToNo:\n            enabled = NO;\n            break;\n        case SCCameraTweaksStrategyFollowABTest:\n            enabled = SCExperimentWithSessionLightWeightFix();\n            break;\n        default:\n            enabled = NO;\n            break;\n        }\n    });\n    return enabled;\n}\n"
  },
  {
    "path": "UI/AVCameraViewEnums.h",
    "content": "//\n//  AVCameraViewEnums.h\n//  SCCamera\n//\n//  Copyright © 2016 Snapchat, Inc. All rights reserved.\n//\n\n#import <Foundation/Foundation.h>\n\n/**\n The context specifies the way in which the camera is presented to the user.\n The controller can be configured a variety of ways depending on the context.\n */\ntypedef NS_ENUM(NSUInteger, AVCameraViewControllerContext) {\n    AVCameraViewControllerContextMainVC = 1,\n    AVCameraViewControllerContextReply,\n    AVCameraViewControllerContextDefault = AVCameraViewControllerContextReply,\n    AVCameraViewControllerContextSnapAds,\n    AVCameraViewControllerContextAddToStory,\n};\n\ntypedef NS_ENUM(NSInteger, AVCameraViewType) {\n    AVCameraViewNoReply = 0,\n    AVCameraViewReplyLeft,\n    AVCameraViewReplyRight,\n    AVCameraViewChat,\n    AVCameraViewReplyHydra,\n    AVCameraViewSnapAds,\n    AVCameraViewGalleryMadeWithLenses,\n    AVCameraViewSnapConnectSnapKit,\n    AVCameraViewSnappable\n};\n\ntypedef NS_ENUM(NSUInteger, AVCameraViewControllerRecordingState) {\n    AVCameraViewControllerRecordingStateDefault,            // No capture activity\n    AVCameraViewControllerRecordingStatePrepareRecording,   // Preparing for recording with delay\n    AVCameraViewControllerRecordingStateInitiatedRecording, // Actively recording\n    AVCameraViewControllerRecordingStateTakingPicture,      // Taking a still image\n    AVCameraViewControllerRecordingStatePictureTaken,       // Picture is taken\n    AVCameraViewControllerRecordingStatePreview,            // Preparing to present preview\n};\n\ntypedef NS_ENUM(NSUInteger, SCCameraRecordingMethod) {\n    SCCameraRecordingMethodCameraButton,\n    SCCameraRecordingMethodVolumeButton,\n    SCCameraRecordingMethodLensInitiated\n};\n"
  },
  {
    "path": "UI/Lenses/LensButtonZ/SCFeatureLensButtonZ.h",
    "content": "//\n//  SCFeatureLensButtonZ.h\n//  SCCamera\n//\n//  Created by Anton Udovychenko on 4/24/18.\n//\n\n#import \"AVCameraViewEnums.h\"\n#import \"SCFeature.h\"\n\n#import <Foundation/Foundation.h>\n\n@protocol SCFeatureLensButtonZ;\n@class SCGrowingButton, SCLens;\n\nNS_ASSUME_NONNULL_BEGIN\n\n@protocol SCFeatureLensButtonZDelegate <NSObject>\n- (void)featureLensZButton:(id<SCFeatureLensButtonZ>)featureLensZButton\n        didPressLensButton:(SCGrowingButton *)lensButton;\n- (nullable NSArray<SCLens *> *)allLenses;\n@end\n\n@protocol SCFeatureLensButtonZ <SCFeature>\n\n@property (nonatomic, weak) id<SCFeatureLensButtonZDelegate> delegate;\n\n- (void)setLensButtonActive:(BOOL)active;\n\n@end\n\nNS_ASSUME_NONNULL_END\n"
  },
  {
    "path": "UI/Lenses/LensSideButton/SCFeatureLensSideButton.h",
    "content": "//\n//  SCFeatureLensSideButton.h\n//  SCCamera\n//\n//  Created by Anton Udovychenko on 4/12/18.\n//\n\n#import \"AVCameraViewEnums.h\"\n#import \"SCFeature.h\"\n\n#import <Foundation/Foundation.h>\n\n@protocol SCFeatureLensSideButton;\n@class SCGrowingButton, SCLens;\n\nNS_ASSUME_NONNULL_BEGIN\n\n@protocol SCFeatureLensSideButtonDelegate <NSObject>\n- (void)featureLensSideButton:(id<SCFeatureLensSideButton>)featureLensSideButton\n           didPressLensButton:(SCGrowingButton *)lensButton;\n- (nullable SCLens *)firstApplicableLens;\n@end\n\n@protocol SCFeatureLensSideButton <SCFeature>\n\n@property (nonatomic, weak) id<SCFeatureLensSideButtonDelegate> delegate;\n\n- (void)updateLensButtonVisibility:(CGFloat)visibilityPercentage;\n- (void)showLensButtonIfNeeded;\n\n@end\n\nNS_ASSUME_NONNULL_END\n"
  },
  {
    "path": "UI/SCLongPressGestureRecognizer.h",
    "content": "//\n//  SCLongPressGestureRecognizer.h\n//  SCCamera\n//\n//  Created by Pavlo Antonenko on 4/28/15.\n//  Copyright (c) 2015 Snapchat, Inc. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n\n// gesture recognizer cancels, if user moved finger more then defined value, even if long press started, unlike\n// UILongPressGestureRecognizer. But if user haven't moved finger for defined time, unlimited movement is allowed.\n@interface SCLongPressGestureRecognizer : UILongPressGestureRecognizer\n\n@property (nonatomic, assign) CGFloat allowableMovementAfterBegan;\n@property (nonatomic, assign) CGFloat timeBeforeUnlimitedMovementAllowed;\n@property (nonatomic, assign, readonly) CGFloat forceOfAllTouches;\n@property (nonatomic, assign, readonly) CGFloat maximumPossibleForceOfAllTouches;\n@property (nonatomic, strong) NSDictionary *userInfo;\n@property (nonatomic, assign) BOOL failedByMovement;\n\n- (BOOL)isUnlimitedMovementAllowed;\n\n@end\n"
  },
  {
    "path": "UI/SCLongPressGestureRecognizer.m",
    "content": "//\n//  SCLongPressGestureRecognizer.m\n//  SCCamera\n//\n//  Created by Pavlo Antonenko on 4/28/15.\n//  Copyright (c) 2015 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCLongPressGestureRecognizer.h\"\n\n#import <SCFoundation/SCLog.h>\n\n#import <UIKit/UIGestureRecognizerSubclass.h>\n\n@implementation SCLongPressGestureRecognizer {\n    CGPoint _initialPoint;\n    CGFloat _initialTime;\n}\n\n- (instancetype)initWithTarget:(id)target action:(SEL)action\n{\n    self = [super initWithTarget:target action:action];\n    if (self) {\n        _allowableMovementAfterBegan = FLT_MAX;\n        _timeBeforeUnlimitedMovementAllowed = 0.0;\n    }\n    return self;\n}\n\n- (void)reset\n{\n    [super reset];\n    _initialPoint = CGPointZero;\n    _initialTime = 0;\n    _forceOfAllTouches = 1.0;\n    _maximumPossibleForceOfAllTouches = 1.0;\n    self.failedByMovement = NO;\n}\n\n- (void)touchesBegan:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event\n{\n    [super touchesBegan:touches withEvent:event];\n    _initialPoint = [self locationInView:self.view];\n    _initialTime = CACurrentMediaTime();\n    _forceOfAllTouches = 1.0;\n    for (UITouch *touch in touches) {\n        _maximumPossibleForceOfAllTouches = MAX(touch.maximumPossibleForce, _maximumPossibleForceOfAllTouches);\n        _forceOfAllTouches = MAX(touch.force, _forceOfAllTouches);\n    }\n}\n\n- (void)touchesMoved:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event\n{\n    [super touchesMoved:touches withEvent:event];\n\n    _forceOfAllTouches = 1.0;\n    for (UITouch *touch in touches) {\n        _maximumPossibleForceOfAllTouches = MAX(touch.maximumPossibleForce, _maximumPossibleForceOfAllTouches);\n        _forceOfAllTouches = MAX(touch.force, _forceOfAllTouches);\n    }\n\n    if (!CGPointEqualToPoint(_initialPoint, CGPointZero)) {\n        CGPoint currentPoint = [self locationInView:self.view];\n\n        CGFloat distance = hypot(_initialPoint.x - currentPoint.x, _initialPoint.y - currentPoint.y);\n        CGFloat timeDifference = CACurrentMediaTime() - _initialTime;\n\n        if (distance > self.allowableMovementAfterBegan && timeDifference < self.timeBeforeUnlimitedMovementAllowed) {\n            SCLogGeneralInfo(@\"Long press moved %.2f > %.2f after %.3f < %.3f seconds, and is cancelled\", distance,\n                             self.allowableMovementAfterBegan, timeDifference, self.timeBeforeUnlimitedMovementAllowed);\n            self.state = UIGestureRecognizerStateFailed;\n            self.failedByMovement = YES;\n        }\n    }\n}\n\n- (void)setEnabled:(BOOL)enabled\n{\n    SCLogGeneralInfo(@\"Setting enabled: %d for %@\", enabled, self);\n    [super setEnabled:enabled];\n}\n\n- (BOOL)isUnlimitedMovementAllowed\n{\n    return CACurrentMediaTime() - _initialTime > self.timeBeforeUnlimitedMovementAllowed;\n}\n\n@end\n"
  },
  {
    "path": "VolumeButton/SCCameraVolumeButtonHandler.h",
    "content": "//\n//  SCCameraVolumeButtonHandler.h\n//  Snapchat\n//\n//  Created by Xiaomu Wu on 2/27/15.\n//  Copyright (c) 2015 Snapchat, Inc. All rights reserved.\n//\n\n#import <Foundation/Foundation.h>\n\n@class SCCameraVolumeButtonHandler;\n\n@protocol SCCameraVolumeButtonHandlerDelegate <NSObject>\n\n- (void)volumeButtonHandlerDidBeginPressingVolumeButton:(SCCameraVolumeButtonHandler *)handler;\n- (void)volumeButtonHandlerDidEndPressingVolumeButton:(SCCameraVolumeButtonHandler *)handler;\n\n@end\n\n@interface SCCameraVolumeButtonHandler : NSObject\n\n@property (nonatomic, weak) id<SCCameraVolumeButtonHandlerDelegate> delegate;\n\n- (void)startHandlingVolumeButtonEvents;\n- (void)stopHandlingVolumeButtonEvents;\n- (void)stopHandlingVolumeButtonEventsWhenPressingEnds;\n- (BOOL)isHandlingVolumeButtonEvents;\n\n- (BOOL)isPressingVolumeButton;\n\n@end\n"
  },
  {
    "path": "VolumeButton/SCCameraVolumeButtonHandler.m",
    "content": "//\n//  SCCameraVolumeButtonHandler.m\n//  Snapchat\n//\n//  Created by Xiaomu Wu on 2/27/15.\n//  Copyright (c) 2015 Snapchat, Inc. All rights reserved.\n//\n\n#import \"SCCameraVolumeButtonHandler.h\"\n\n#import <SCFoundation/SCLog.h>\n#import <SCFoundation/UIApplication+SCSecretFeature2.h>\n\n@implementation SCCameraVolumeButtonHandler {\n    NSString *_secretFeatureToken;\n    BOOL _pressingButton1; // volume down button\n    BOOL _pressingButton2; // volume up button\n    BOOL _stopsHandlingWhenPressingEnds;\n}\n\n#pragma mark - NSObject\n\n- (instancetype)init\n{\n    self = [super init];\n    if (self) {\n        NSNotificationCenter *notificationCenter = [NSNotificationCenter defaultCenter];\n        UIApplication *application = [UIApplication sharedApplication];\n        [notificationCenter addObserver:self\n                               selector:@selector(_handleButton1Down:)\n                                   name:[application sc_eventNotificationName1]\n                                 object:nil];\n        [notificationCenter addObserver:self\n                               selector:@selector(_handleButton1Up:)\n                                   name:[application sc_eventNotificationName2]\n                                 object:nil];\n        [notificationCenter addObserver:self\n                               selector:@selector(_handleButton2Down:)\n                                   name:[application sc_eventNotificationName3]\n                                 object:nil];\n        [notificationCenter addObserver:self\n                               selector:@selector(_handleButton2Up:)\n                                   name:[application sc_eventNotificationName4]\n                                 object:nil];\n    }\n    return self;\n}\n\n- (void)dealloc\n{\n    if (_secretFeatureToken) {\n        [[UIApplication sharedApplication] sc_disableSecretFeature2:_secretFeatureToken];\n    }\n}\n\n#pragma mark - Public\n\n- (void)startHandlingVolumeButtonEvents\n{\n    _stopsHandlingWhenPressingEnds = NO;\n    [self _resetPressingButtons];\n    if ([self isHandlingVolumeButtonEvents]) {\n        return;\n    }\n    SCLogGeneralInfo(@\"[Volume Buttons] Start handling volume button events\");\n    _secretFeatureToken = [[[UIApplication sharedApplication] sc_enableSecretFeature2] copy];\n}\n\n- (void)stopHandlingVolumeButtonEvents\n{\n    if (![self isHandlingVolumeButtonEvents]) {\n        return;\n    }\n    SCLogGeneralInfo(@\"[Volume Buttons] Stop handling volume button events\");\n    [[UIApplication sharedApplication] sc_disableSecretFeature2:_secretFeatureToken];\n    _secretFeatureToken = nil;\n    _stopsHandlingWhenPressingEnds = NO;\n}\n\n- (void)stopHandlingVolumeButtonEventsWhenPressingEnds\n{\n    if (![self isHandlingVolumeButtonEvents]) {\n        return;\n    }\n    if (![self isPressingVolumeButton]) {\n        return;\n    }\n    SCLogGeneralInfo(@\"[Volume Buttons] Stop handling volume button events when pressing ends\");\n    _stopsHandlingWhenPressingEnds = YES;\n}\n\n- (BOOL)isHandlingVolumeButtonEvents\n{\n    return (_secretFeatureToken != nil);\n}\n\n- (BOOL)isPressingVolumeButton\n{\n    return _pressingButton1 || _pressingButton2;\n}\n\n- (void)_resetPressingButtons\n{\n    _pressingButton1 = NO;\n    _pressingButton2 = NO;\n}\n\n#pragma mark - Private\n\n- (void)_handleButton1Down:(NSNotification *)notification\n{\n    if (![self isHandlingVolumeButtonEvents]) {\n        SCLogGeneralInfo(@\"[Volume Buttons] Volume button 1 down, not handled\");\n        return;\n    }\n    if (_pressingButton1) {\n        SCLogGeneralInfo(@\"[Volume Buttons] Volume button 1 down, already down\");\n        return;\n    }\n    SCLogGeneralInfo(@\"[Volume Buttons] Volume button 1 down\");\n    [self _changePressingButton:^{\n        _pressingButton1 = YES;\n    }];\n}\n\n- (void)_handleButton1Up:(NSNotification *)notification\n{\n    if (![self isHandlingVolumeButtonEvents]) {\n        SCLogGeneralInfo(@\"[Volume Buttons] Volume button 1 up, not handled\");\n        return;\n    }\n    if (!_pressingButton1) {\n        SCLogGeneralInfo(@\"[Volume Buttons] Volume button 1 up, already up\");\n        return;\n    }\n    SCLogGeneralInfo(@\"[Volume Buttons] Volume button 1 up\");\n    [self _changePressingButton:^{\n        _pressingButton1 = NO;\n    }];\n}\n\n- (void)_handleButton2Down:(NSNotification *)notification\n{\n    if (![self isHandlingVolumeButtonEvents]) {\n        SCLogGeneralInfo(@\"[Volume Buttons] Volume button 2 down, not handled\");\n        return;\n    }\n    if (_pressingButton2) {\n        SCLogGeneralInfo(@\"[Volume Buttons] Volume button 2 down, already down\");\n        return;\n    }\n    SCLogGeneralInfo(@\"[Volume Buttons] Volume button 2 down\");\n    [self _changePressingButton:^{\n        _pressingButton2 = YES;\n    }];\n}\n\n- (void)_handleButton2Up:(NSNotification *)notification\n{\n    if (![self isHandlingVolumeButtonEvents]) {\n        SCLogGeneralInfo(@\"[Volume Buttons] Volume button 2 up, not handled\");\n        return;\n    }\n    if (!_pressingButton2) {\n        SCLogGeneralInfo(@\"[Volume Buttons] Volume button 2 up, already up\");\n        return;\n    }\n    SCLogGeneralInfo(@\"[Volume Buttons] Volume button 2 up\");\n    [self _changePressingButton:^{\n        _pressingButton2 = NO;\n    }];\n}\n\n- (void)_changePressingButton:(void (^)(void))change\n{\n    BOOL oldPressingVolumeButton = [self isPressingVolumeButton];\n    change();\n    BOOL newPressingVolumeButton = [self isPressingVolumeButton];\n\n    if (!oldPressingVolumeButton && newPressingVolumeButton) {\n        [_delegate volumeButtonHandlerDidBeginPressingVolumeButton:self];\n    } else if (oldPressingVolumeButton && !newPressingVolumeButton) {\n        [_delegate volumeButtonHandlerDidEndPressingVolumeButton:self];\n        if (_stopsHandlingWhenPressingEnds) {\n            [self stopHandlingVolumeButtonEvents];\n        }\n    }\n}\n\n@end\n"
  },
  {
    "path": "Worker/SCCaptureWorker.h",
    "content": "//\n//  SCCaptureWorker.h\n//  Snapchat\n//\n//  Created by Lin Jia on 10/19/17.\n//\n//\n\n#import \"SCCaptureResource.h\"\n\n#import <SCFoundation/SCQueuePerformer.h>\n\n#import <Foundation/Foundation.h>\n\n/*\n In general, the function of SCCapturer is to use some resources (such as SCManagedCapturerListenerAnnouncer), to do\n something (such as announce an event).\n\n SCCaptureWorker abstract away the \"do something\" part of SCCapturer. It has very little internal states/resources.\n\n SCCaptureWorker is introduced to be shared between CaptureV1 and CaptureV2, to minimize duplication code.\n\n */\n\n@interface SCCaptureWorker : NSObject\n\n+ (SCCaptureResource *)generateCaptureResource;\n\n+ (void)setupWithCaptureResource:(SCCaptureResource *)captureResource\n                  devicePosition:(SCManagedCaptureDevicePosition)devicePosition;\n\n+ (void)setupCapturePreviewLayerController;\n\n+ (void)startRunningWithCaptureResource:(SCCaptureResource *)captureResource\n                                  token:(SCCapturerToken *)token\n                      completionHandler:(dispatch_block_t)completionHandler;\n\n+ (BOOL)stopRunningWithCaptureResource:(SCCaptureResource *)captureResource\n                                 token:(SCCapturerToken *)token\n                     completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler;\n\n+ (void)setupVideoPreviewLayer:(SCCaptureResource *)resource;\n\n+ (void)makeVideoPreviewLayer:(SCCaptureResource *)resource;\n\n+ (void)redoVideoPreviewLayer:(SCCaptureResource *)resource;\n\n+ (void)startStreaming:(SCCaptureResource *)resource;\n\n+ (void)setupLivenessConsistencyTimerIfForeground:(SCCaptureResource *)resource;\n\n+ (void)destroyLivenessConsistencyTimer:(SCCaptureResource *)resource;\n\n+ (void)softwareZoomWithDevice:(SCManagedCaptureDevice *)device resource:(SCCaptureResource *)resource;\n\n+ (void)captureStillImageWithCaptureResource:(SCCaptureResource *)captureResource\n                                 aspectRatio:(CGFloat)aspectRatio\n                            captureSessionID:(NSString *)captureSessionID\n                      shouldCaptureFromVideo:(BOOL)shouldCaptureFromVideo\n                           completionHandler:\n                               (sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler\n                                     context:(NSString *)context;\n\n+ (void)startRecordingWithCaptureResource:(SCCaptureResource *)captureResource\n                           outputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings\n                       audioConfiguration:(SCAudioConfiguration *)configuration\n                              maxDuration:(NSTimeInterval)maxDuration\n                                  fileURL:(NSURL *)fileURL\n                         captureSessionID:(NSString *)captureSessionID\n                        completionHandler:(sc_managed_capturer_start_recording_completion_handler_t)completionHandler;\n\n+ (void)stopRecordingWithCaptureResource:(SCCaptureResource *)captureResource;\n\n+ (void)cancelRecordingWithCaptureResource:(SCCaptureResource *)captureResource;\n\n+ (SCVideoCaptureSessionInfo)activeSession:(SCCaptureResource *)resource;\n\n+ (BOOL)canRunARSession:(SCCaptureResource *)resource;\n\n+ (void)turnARSessionOn:(SCCaptureResource *)resource;\n\n+ (void)turnARSessionOff:(SCCaptureResource *)resource;\n\n+ (void)clearARKitData:(SCCaptureResource *)resource;\n\n+ (void)updateLensesFieldOfViewTracking:(SCCaptureResource *)captureResource;\n\n+ (CMTime)firstWrittenAudioBufferDelay:(SCCaptureResource *)resource;\n\n+ (BOOL)audioQueueStarted:(SCCaptureResource *)resource;\n\n+ (BOOL)isLensApplied:(SCCaptureResource *)resource;\n\n+ (BOOL)isVideoMirrored:(SCCaptureResource *)resource;\n\n+ (BOOL)shouldCaptureImageFromVideoWithResource:(SCCaptureResource *)resource;\n\n+ (void)setPortraitModePointOfInterestAsynchronously:(CGPoint)pointOfInterest\n                                   completionHandler:(dispatch_block_t)completionHandler\n                                            resource:(SCCaptureResource *)resource;\n\n+ (void)prepareForRecordingWithAudioConfiguration:(SCAudioConfiguration *)configuration\n                                         resource:(SCCaptureResource *)resource;\n\n+ (void)stopScanWithCompletionHandler:(dispatch_block_t)completionHandler resource:(SCCaptureResource *)resource;\n\n+ (void)startScanWithScanConfiguration:(SCScanConfiguration *)configuration resource:(SCCaptureResource *)resource;\n\n@end\n"
  },
  {
    "path": "Worker/SCCaptureWorker.m",
    "content": "//\n//  SCCaptureWorker.m\n//  Snapchat\n//\n//  Created by Lin Jia on 10/19/17.\n//\n//\n\n#import \"SCCaptureWorker.h\"\n\n#import \"ARConfiguration+SCConfiguration.h\"\n#import \"SCBlackCameraDetector.h\"\n#import \"SCBlackCameraNoOutputDetector.h\"\n#import \"SCCameraTweaks.h\"\n#import \"SCCaptureCoreImageFaceDetector.h\"\n#import \"SCCaptureFaceDetector.h\"\n#import \"SCCaptureMetadataOutputDetector.h\"\n#import \"SCCaptureSessionFixer.h\"\n#import \"SCManagedCaptureDevice+SCManagedCapturer.h\"\n#import \"SCManagedCaptureDeviceDefaultZoomHandler.h\"\n#import \"SCManagedCaptureDeviceHandler.h\"\n#import \"SCManagedCaptureDeviceLinearInterpolationZoomHandler.h\"\n#import \"SCManagedCaptureDeviceSavitzkyGolayZoomHandler.h\"\n#import \"SCManagedCaptureDeviceSubjectAreaHandler.h\"\n#import \"SCManagedCapturePreviewLayerController.h\"\n#import \"SCManagedCaptureSession.h\"\n#import \"SCManagedCapturer.h\"\n#import \"SCManagedCapturerARImageCaptureProvider.h\"\n#import \"SCManagedCapturerARSessionHandler.h\"\n#import \"SCManagedCapturerGLViewManagerAPI.h\"\n#import \"SCManagedCapturerLensAPIProvider.h\"\n#import \"SCManagedCapturerLogging.h\"\n#import \"SCManagedCapturerState.h\"\n#import \"SCManagedCapturerStateBuilder.h\"\n#import \"SCManagedCapturerV1.h\"\n#import \"SCManagedDeviceCapacityAnalyzer.h\"\n#import \"SCManagedDeviceCapacityAnalyzerHandler.h\"\n#import \"SCManagedDroppedFramesReporter.h\"\n#import \"SCManagedFrontFlashController.h\"\n#import \"SCManagedStillImageCapturerHandler.h\"\n#import \"SCManagedVideoARDataSource.h\"\n#import \"SCManagedVideoCapturer.h\"\n#import \"SCManagedVideoCapturerHandler.h\"\n#import \"SCManagedVideoFileStreamer.h\"\n#import \"SCManagedVideoScanner.h\"\n#import \"SCManagedVideoStreamReporter.h\"\n#import \"SCManagedVideoStreamer.h\"\n#import \"SCMetalUtils.h\"\n#import \"SCProcessingPipelineBuilder.h\"\n#import \"SCVideoCaptureSessionInfo.h\"\n\n#import <SCBatteryLogger/SCBatteryLogger.h>\n#import <SCFoundation/SCDeviceName.h>\n#import <SCFoundation/SCQueuePerformer.h>\n#import <SCFoundation/SCThreadHelpers.h>\n#import <SCFoundation/SCTraceODPCompatible.h>\n#import <SCFoundation/SCZeroDependencyExperiments.h>\n#import <SCGhostToSnappable/SCGhostToSnappableSignal.h>\n#import <SCImageProcess/SCImageProcessVideoPlaybackSession.h>\n#import <SCLogger/SCCameraMetrics.h>\n#import <SCLogger/SCLogger+Performance.h>\n\n@import ARKit;\n\nstatic const char *kSCManagedCapturerQueueLabel = \"com.snapchat.managed_capturer\";\nstatic NSTimeInterval const kMaxDefaultScanFrameDuration = 1. / 15; // Restrict scanning to max 15 frames per second\nstatic NSTimeInterval const kMaxPassiveScanFrameDuration = 1.;      // Restrict scanning to max 1 frame per second\nstatic float const kScanTargetCPUUtilization = 0.5;                 // 50% utilization\n\nstatic NSString *const kSCManagedCapturerErrorDomain = @\"kSCManagedCapturerErrorDomain\";\nstatic NSInteger const kSCManagedCapturerRecordVideoBusy = 3001;\nstatic NSInteger const kSCManagedCapturerCaptureStillImageBusy = 3002;\n\nstatic UIImageOrientation SCMirroredImageOrientation(UIImageOrientation orientation)\n{\n    switch (orientation) {\n    case UIImageOrientationRight:\n        return UIImageOrientationLeftMirrored;\n    case UIImageOrientationLeftMirrored:\n        return UIImageOrientationRight;\n    case UIImageOrientationUp:\n        return UIImageOrientationUpMirrored;\n    case UIImageOrientationUpMirrored:\n        return UIImageOrientationUp;\n    case UIImageOrientationDown:\n        return UIImageOrientationDownMirrored;\n    case UIImageOrientationDownMirrored:\n        return UIImageOrientationDown;\n    case UIImageOrientationLeft:\n        return UIImageOrientationRightMirrored;\n    case UIImageOrientationRightMirrored:\n        return UIImageOrientationLeft;\n    }\n}\n\n@implementation SCCaptureWorker\n\n+ (SCCaptureResource *)generateCaptureResource\n{\n    SCCaptureResource *captureResource = [[SCCaptureResource alloc] init];\n\n    captureResource.queuePerformer = [[SCQueuePerformer alloc] initWithLabel:kSCManagedCapturerQueueLabel\n                                                            qualityOfService:QOS_CLASS_USER_INTERACTIVE\n                                                                   queueType:DISPATCH_QUEUE_SERIAL\n                                                                     context:SCQueuePerformerContextCamera];\n\n    captureResource.announcer = [[SCManagedCapturerListenerAnnouncer alloc] init];\n    captureResource.videoCapturerHandler =\n        [[SCManagedVideoCapturerHandler alloc] initWithCaptureResource:captureResource];\n    captureResource.stillImageCapturerHandler =\n        [[SCManagedStillImageCapturerHandler alloc] initWithCaptureResource:captureResource];\n    captureResource.deviceCapacityAnalyzerHandler =\n        [[SCManagedDeviceCapacityAnalyzerHandler alloc] initWithCaptureResource:captureResource];\n    captureResource.deviceZoomHandler = ({\n        SCManagedCaptureDeviceDefaultZoomHandler *handler = nil;\n        switch (SCCameraTweaksDeviceZoomHandlerStrategy()) {\n        case SCManagedCaptureDeviceDefaultZoom:\n            handler = [[SCManagedCaptureDeviceDefaultZoomHandler alloc] initWithCaptureResource:captureResource];\n            break;\n        case SCManagedCaptureDeviceSavitzkyGolayFilter:\n            handler = [[SCManagedCaptureDeviceSavitzkyGolayZoomHandler alloc] initWithCaptureResource:captureResource];\n            break;\n        case SCManagedCaptureDeviceLinearInterpolation:\n            handler =\n                [[SCManagedCaptureDeviceLinearInterpolationZoomHandler alloc] initWithCaptureResource:captureResource];\n            break;\n        }\n        handler;\n    });\n    captureResource.captureDeviceHandler =\n        [[SCManagedCaptureDeviceHandler alloc] initWithCaptureResource:captureResource];\n    captureResource.arSessionHandler =\n        [[SCManagedCapturerARSessionHandler alloc] initWithCaptureResource:captureResource];\n\n    captureResource.tokenSet = [NSMutableSet new];\n    captureResource.allowsZoom = YES;\n    captureResource.debugInfoDict = [[NSMutableDictionary alloc] init];\n    captureResource.notificationRegistered = NO;\n    return captureResource;\n}\n\n+ (void)setupWithCaptureResource:(SCCaptureResource *)captureResource\n                  devicePosition:(SCManagedCaptureDevicePosition)devicePosition\n{\n    SCTraceODPCompatibleStart(2);\n    SCAssert(captureResource.status == SCManagedCapturerStatusUnknown, @\"The status should be unknown\");\n    captureResource.device = [SCManagedCaptureDevice deviceWithPosition:devicePosition];\n    if (!captureResource.device) {\n        // Always prefer front camera over back camera\n        if ([SCManagedCaptureDevice front]) {\n            captureResource.device = [SCManagedCaptureDevice front];\n            devicePosition = SCManagedCaptureDevicePositionFront;\n        } else {\n            captureResource.device = [SCManagedCaptureDevice back];\n            devicePosition = SCManagedCaptureDevicePositionBack;\n        }\n    }\n    // Initial state\n    SCLogCapturerInfo(@\"Init state with devicePosition:%lu, zoomFactor:%f, flashSupported:%d, \"\n                      @\"torchSupported:%d, flashActive:%d, torchActive:%d\",\n                      (unsigned long)devicePosition, captureResource.device.zoomFactor,\n                      captureResource.device.isFlashSupported, captureResource.device.isTorchSupported,\n                      captureResource.device.flashActive, captureResource.device.torchActive);\n    captureResource.state = [[SCManagedCapturerState alloc] initWithIsRunning:NO\n                                                            isNightModeActive:NO\n                                                         isPortraitModeActive:NO\n                                                            lowLightCondition:NO\n                                                            adjustingExposure:NO\n                                                               devicePosition:devicePosition\n                                                                   zoomFactor:captureResource.device.zoomFactor\n                                                               flashSupported:captureResource.device.isFlashSupported\n                                                               torchSupported:captureResource.device.isTorchSupported\n                                                                  flashActive:captureResource.device.flashActive\n                                                                  torchActive:captureResource.device.torchActive\n                                                                 lensesActive:NO\n                                                              arSessionActive:NO\n                                                           liveVideoStreaming:NO\n                                                           lensProcessorReady:NO];\n\n    [self configLensesProcessorWithCaptureResource:captureResource];\n    [self configARSessionWithCaptureResource:captureResource];\n    [self configCaptureDeviceHandlerWithCaptureResource:captureResource];\n    [self configAVCaptureSessionWithCaptureResource:captureResource];\n    [self configImageCapturerWithCaptureResource:captureResource];\n    [self configDeviceCapacityAnalyzerWithCaptureResource:captureResource];\n    [self configVideoDataSourceWithCaptureResource:captureResource devicePosition:devicePosition];\n    [self configVideoScannerWithCaptureResource:captureResource];\n    [self configVideoCapturerWithCaptureResource:captureResource];\n\n    if (!SCIsSimulator()) {\n        // We don't want it enabled for simulator\n        [self configBlackCameraDetectorWithCaptureResource:captureResource];\n    }\n\n    if (SCCameraTweaksEnableFaceDetectionFocus(captureResource.state.devicePosition)) {\n        [self configureCaptureFaceDetectorWithCaptureResource:captureResource];\n    }\n}\n\n+ (void)setupCapturePreviewLayerController\n{\n    SCAssert([[SCQueuePerformer mainQueuePerformer] isCurrentPerformer], @\"\");\n    [[SCManagedCapturePreviewLayerController sharedInstance] setupPreviewLayer];\n}\n\n+ (void)configLensesProcessorWithCaptureResource:(SCCaptureResource *)captureResource\n{\n    SCManagedCapturerStateBuilder *stateBuilder =\n        [SCManagedCapturerStateBuilder withManagedCapturerState:captureResource.state];\n    [stateBuilder setLensProcessorReady:YES];\n    captureResource.state = [stateBuilder build];\n\n    captureResource.lensProcessingCore = [captureResource.lensAPIProvider lensAPIForCaptureResource:captureResource];\n}\n\n+ (void)configARSessionWithCaptureResource:(SCCaptureResource *)captureResource\n{\n    if (@available(iOS 11.0, *)) {\n        captureResource.arSession = [[ARSession alloc] init];\n\n        captureResource.arImageCapturer =\n            [captureResource.arImageCaptureProvider arImageCapturerWith:captureResource.queuePerformer\n                                                     lensProcessingCore:captureResource.lensProcessingCore];\n    }\n}\n\n+ (void)configAVCaptureSessionWithCaptureResource:(SCCaptureResource *)captureResource\n{\n#if !TARGET_IPHONE_SIMULATOR\n    captureResource.numRetriesFixAVCaptureSessionWithCurrentSession = 0;\n    // lazily initialize _captureResource.kvoController on background thread\n    if (!captureResource.kvoController) {\n        captureResource.kvoController = [[FBKVOController alloc] initWithObserver:[SCManagedCapturerV1 sharedInstance]];\n    }\n    [captureResource.kvoController unobserve:captureResource.managedSession.avSession];\n    captureResource.managedSession =\n        [[SCManagedCaptureSession alloc] initWithBlackCameraDetector:captureResource.blackCameraDetector];\n    [captureResource.kvoController observe:captureResource.managedSession.avSession\n                                   keyPath:@keypath(captureResource.managedSession.avSession, running)\n                                   options:NSKeyValueObservingOptionNew | NSKeyValueObservingOptionOld\n                                    action:captureResource.handleAVSessionStatusChange];\n#endif\n\n    [captureResource.managedSession.avSession setAutomaticallyConfiguresApplicationAudioSession:NO];\n    [captureResource.device setDeviceAsInput:captureResource.managedSession.avSession];\n}\n\n+ (void)configDeviceCapacityAnalyzerWithCaptureResource:(SCCaptureResource *)captureResource\n{\n    captureResource.deviceCapacityAnalyzer =\n        [[SCManagedDeviceCapacityAnalyzer alloc] initWithPerformer:captureResource.videoDataSource.performer];\n    [captureResource.deviceCapacityAnalyzer addListener:captureResource.deviceCapacityAnalyzerHandler];\n    [captureResource.deviceCapacityAnalyzer setLowLightConditionEnabled:[SCManagedCaptureDevice isNightModeSupported]];\n    [captureResource.deviceCapacityAnalyzer addListener:captureResource.stillImageCapturer];\n    [captureResource.deviceCapacityAnalyzer setAsFocusListenerForDevice:captureResource.device];\n}\n\n+ (void)configVideoDataSourceWithCaptureResource:(SCCaptureResource *)captureResource\n                                  devicePosition:(SCManagedCaptureDevicePosition)devicePosition\n{\n    if (captureResource.fileInputDecider.shouldProcessFileInput) {\n        captureResource.videoDataSource =\n            [[SCManagedVideoFileStreamer alloc] initWithPlaybackForURL:captureResource.fileInputDecider.fileURL];\n        [captureResource.lensProcessingCore setLensesActive:YES\n                                           videoOrientation:captureResource.videoDataSource.videoOrientation\n                                              filterFactory:nil];\n\n        runOnMainThreadAsynchronously(^{\n            [captureResource.videoPreviewGLViewManager prepareViewIfNecessary];\n        });\n    } else {\n        if (@available(iOS 11.0, *)) {\n            captureResource.videoDataSource =\n                [[SCManagedVideoStreamer alloc] initWithSession:captureResource.managedSession.avSession\n                                                      arSession:captureResource.arSession\n                                                 devicePosition:devicePosition];\n            [captureResource.videoDataSource addListener:captureResource.arImageCapturer];\n            if (captureResource.state.isPortraitModeActive) {\n                [captureResource.videoDataSource setDepthCaptureEnabled:YES];\n\n                SCProcessingPipelineBuilder *processingPipelineBuilder = [[SCProcessingPipelineBuilder alloc] init];\n                processingPipelineBuilder.portraitModeEnabled = YES;\n                SCProcessingPipeline *pipeline = [processingPipelineBuilder build];\n                [captureResource.videoDataSource addProcessingPipeline:pipeline];\n            }\n        } else {\n            captureResource.videoDataSource =\n                [[SCManagedVideoStreamer alloc] initWithSession:captureResource.managedSession.avSession\n                                                 devicePosition:devicePosition];\n        }\n    }\n\n    [captureResource.videoDataSource addListener:captureResource.lensProcessingCore.capturerListener];\n    [captureResource.videoDataSource addListener:captureResource.deviceCapacityAnalyzer];\n    [captureResource.videoDataSource addListener:captureResource.stillImageCapturer];\n\n    if (SCIsMasterBuild()) {\n        captureResource.videoStreamReporter = [[SCManagedVideoStreamReporter alloc] init];\n        [captureResource.videoDataSource addListener:captureResource.videoStreamReporter];\n    }\n}\n\n+ (void)configVideoScannerWithCaptureResource:(SCCaptureResource *)captureResource\n{\n    // When initializing video scanner:\n    // Restrict default scanning to max 15 frames per second.\n    // Restrict passive scanning to max 1 frame per second.\n    // Give CPU time to rest.\n    captureResource.videoScanner =\n        [[SCManagedVideoScanner alloc] initWithMaxFrameDefaultDuration:kMaxDefaultScanFrameDuration\n                                               maxFramePassiveDuration:kMaxPassiveScanFrameDuration\n                                                             restCycle:1 - kScanTargetCPUUtilization];\n    [captureResource.videoDataSource addListener:captureResource.videoScanner];\n    [captureResource.deviceCapacityAnalyzer addListener:captureResource.videoScanner];\n}\n\n+ (void)configVideoCapturerWithCaptureResource:(SCCaptureResource *)captureResource\n{\n    if (SCCameraTweaksEnableCaptureSharePerformer()) {\n        captureResource.videoCapturer =\n            [[SCManagedVideoCapturer alloc] initWithQueuePerformer:captureResource.queuePerformer];\n    } else {\n        captureResource.videoCapturer = [[SCManagedVideoCapturer alloc] init];\n    }\n\n    [captureResource.videoCapturer addListener:captureResource.lensProcessingCore.capturerListener];\n    captureResource.videoCapturer.delegate = captureResource.videoCapturerHandler;\n}\n\n+ (void)configImageCapturerWithCaptureResource:(SCCaptureResource *)captureResource\n{\n    captureResource.stillImageCapturer = [SCManagedStillImageCapturer capturerWithCaptureResource:captureResource];\n}\n\n+ (void)startRunningWithCaptureResource:(SCCaptureResource *)captureResource\n                                  token:(SCCapturerToken *)token\n                      completionHandler:(dispatch_block_t)completionHandler\n{\n    [[SCLogger sharedInstance] logStepToEvent:kSCCameraMetricsOpen\n                                     uniqueId:@\"\"\n                                     stepName:@\"startOpenCameraOnManagedCaptureQueue\"];\n    SCTraceSignal(@\"Add token %@ to set %@\", token, captureResource.tokenSet);\n    [captureResource.tokenSet addObject:token];\n    if (captureResource.appInBackground) {\n        SCTraceSignal(@\"Will skip startRunning on AVCaptureSession because we are in background\");\n    }\n    SCTraceStartSection(\"start session\")\n    {\n        if (!SCDeviceSupportsMetal()) {\n            SCCAssert(captureResource.videoPreviewLayer, @\"videoPreviewLayer should be created already\");\n            if (captureResource.status == SCManagedCapturerStatusReady) {\n                // Need to wrap this into a CATransaction because startRunning will change\n                // AVCaptureVideoPreviewLayer,\n                // therefore,\n                // without atomic update, will cause layer inconsistency.\n                [CATransaction begin];\n                [CATransaction setDisableActions:YES];\n                captureResource.videoPreviewLayer.session = captureResource.managedSession.avSession;\n                if (!captureResource.appInBackground) {\n                    SCGhostToSnappableSignalCameraStart();\n                    [captureResource.managedSession startRunning];\n                }\n                [self setupVideoPreviewLayer:captureResource];\n                [CATransaction commit];\n                SCLogCapturerInfo(@\"[_captureResource.avSession startRunning] finished. token: %@\", token);\n            }\n            // In case we don't use sample buffer, then we need to fake that we know when the first frame receieved.\n            SCGhostToSnappableSignalDidReceiveFirstPreviewFrame();\n        } else {\n            if (captureResource.status == SCManagedCapturerStatusReady) {\n                if (!captureResource.appInBackground) {\n                    SCGhostToSnappableSignalCameraStart();\n                    [captureResource.managedSession startRunning];\n                    SCLogCapturerInfo(\n                        @\"[_captureResource.avSession startRunning] finished using sample buffer. token: %@\", token);\n                }\n            }\n        }\n    }\n    SCTraceEndSection();\n    SCTraceStartSection(\"start streaming\")\n    {\n        // Do the start streaming after start running, but make sure we start it\n        // regardless if the status is ready or\n        // not.\n        [self startStreaming:captureResource];\n    }\n    SCTraceEndSection();\n\n    if (!captureResource.notificationRegistered) {\n        captureResource.notificationRegistered = YES;\n\n        [captureResource.deviceSubjectAreaHandler startObserving];\n\n        [[NSNotificationCenter defaultCenter] addObserver:[SCManagedCapturerV1 sharedInstance]\n                                                 selector:captureResource.sessionRuntimeError\n                                                     name:AVCaptureSessionRuntimeErrorNotification\n                                                   object:nil];\n    }\n\n    if (captureResource.status == SCManagedCapturerStatusReady) {\n        // Schedule a timer to check the running state and fix any inconsistency.\n        runOnMainThreadAsynchronously(^{\n            [self setupLivenessConsistencyTimerIfForeground:captureResource];\n        });\n        SCLogCapturerInfo(@\"Setting isRunning to YES. token: %@\", token);\n        captureResource.state =\n            [[[SCManagedCapturerStateBuilder withManagedCapturerState:captureResource.state] setIsRunning:YES] build];\n        captureResource.status = SCManagedCapturerStatusRunning;\n    }\n    [[SCLogger sharedInstance] logStepToEvent:kSCCameraMetricsOpen\n                                     uniqueId:@\"\"\n                                     stepName:@\"endOpenCameraOnManagedCaptureQueue\"];\n    [[SCLogger sharedInstance] logTimedEventEnd:kSCCameraMetricsOpen uniqueId:@\"\" parameters:nil];\n\n    SCManagedCapturerState *state = [captureResource.state copy];\n    SCTraceResumeToken resumeToken = SCTraceCapture();\n    runOnMainThreadAsynchronously(^{\n        SCTraceResume(resumeToken);\n        [captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didChangeState:state];\n        [captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didStartRunning:state];\n        [[SCBatteryLogger shared] logManagedCapturerDidStartRunning];\n        if (completionHandler) {\n            completionHandler();\n        }\n        if (!SCDeviceSupportsMetal()) {\n            // To approximate this did render timer, it is not accurate.\n            SCGhostToSnappableSignalDidRenderFirstPreviewFrame(CACurrentMediaTime());\n        }\n    });\n}\n\n+ (BOOL)stopRunningWithCaptureResource:(SCCaptureResource *)captureResource\n                                 token:(SCCapturerToken *)token\n                     completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler\n{\n    SCTraceODPCompatibleStart(2);\n    SCAssert([captureResource.queuePerformer isCurrentPerformer], @\"\");\n    BOOL videoPreviewLayerChanged = NO;\n    SCAssert([captureResource.tokenSet containsObject:token],\n             @\"It should be a valid token that is issued by startRunning method.\");\n    SCTraceSignal(@\"Remove token %@, from set %@\", token, captureResource.tokenSet);\n    SCLogCapturerInfo(@\"Stop running. token:%@ tokenSet:%@\", token, captureResource.tokenSet);\n    [captureResource.tokenSet removeObject:token];\n    BOOL succeed = (captureResource.tokenSet.count == 0);\n    if (succeed && captureResource.status == SCManagedCapturerStatusRunning) {\n        captureResource.status = SCManagedCapturerStatusReady;\n        if (@available(iOS 11.0, *)) {\n            [captureResource.arSession pause];\n        }\n        [captureResource.managedSession stopRunning];\n        if (!SCDeviceSupportsMetal()) {\n            [captureResource.videoDataSource stopStreaming];\n            [self redoVideoPreviewLayer:captureResource];\n            videoPreviewLayerChanged = YES;\n        } else {\n            [captureResource.videoDataSource pauseStreaming];\n        }\n\n        if (captureResource.state.devicePosition == SCManagedCaptureDevicePositionBackDualCamera) {\n            [[SCManagedCapturerV1 sharedInstance] setDevicePositionAsynchronously:SCManagedCaptureDevicePositionBack\n                                                                completionHandler:nil\n                                                                          context:SCCapturerContext];\n        }\n\n        // We always disable lenses and hide _captureResource.videoPreviewGLView when app goes into\n        // the background\n        // thus there is no need to clean up anything.\n        // _captureResource.videoPreviewGLView will be shown again to the user only when the frame\n        // will be processed by the lenses\n        // processor\n\n        // Remove the liveness timer which checks the health of the running state\n        runOnMainThreadAsynchronously(^{\n            [self destroyLivenessConsistencyTimer:captureResource];\n        });\n        SCLogCapturerInfo(@\"Setting isRunning to NO. removed token: %@\", token);\n        captureResource.state =\n            [[[SCManagedCapturerStateBuilder withManagedCapturerState:captureResource.state] setIsRunning:NO] build];\n\n        captureResource.notificationRegistered = NO;\n\n        [captureResource.deviceSubjectAreaHandler stopObserving];\n\n        [[NSNotificationCenter defaultCenter] removeObserver:[SCManagedCapturerV1 sharedInstance]\n                                                        name:AVCaptureSessionRuntimeErrorNotification\n                                                      object:nil];\n\n        [captureResource.arSessionHandler stopObserving];\n    }\n\n    SCManagedCapturerState *state = [captureResource.state copy];\n    AVCaptureVideoPreviewLayer *videoPreviewLayer = videoPreviewLayerChanged ? captureResource.videoPreviewLayer : nil;\n    runOnMainThreadAsynchronously(^{\n        if (succeed) {\n            [captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didChangeState:state];\n            [captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didStopRunning:state];\n            [[SCBatteryLogger shared] logManagedCapturerDidStopRunning];\n            if (videoPreviewLayerChanged) {\n                [captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]\n                                didChangeVideoPreviewLayer:videoPreviewLayer];\n            }\n        }\n        if (completionHandler) {\n            completionHandler(succeed);\n        }\n    });\n\n    return succeed;\n}\n\n+ (void)setupVideoPreviewLayer:(SCCaptureResource *)resource\n{\n    SCTraceODPCompatibleStart(2);\n    SCAssert([resource.queuePerformer isCurrentPerformer] || [[SCQueuePerformer mainQueuePerformer] isCurrentPerformer],\n             @\"\");\n    if ([resource.videoPreviewLayer.connection isVideoOrientationSupported]) {\n        resource.videoPreviewLayer.connection.videoOrientation = AVCaptureVideoOrientationPortrait;\n    }\n    resource.videoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;\n    resource.videoPreviewLayer.hidden = !resource.managedSession.isRunning;\n\n    SCLogCapturerInfo(@\"Setup video preview layer with connect.enabled:%d, hidden:%d\",\n                      resource.videoPreviewLayer.connection.enabled, resource.videoPreviewLayer.hidden);\n}\n\n+ (void)makeVideoPreviewLayer:(SCCaptureResource *)resource\n{\n    SCTraceODPCompatibleStart(2);\n    // This can be called either from current queue or from main queue.\n    SCAssert([resource.queuePerformer isCurrentPerformer] || [[SCQueuePerformer mainQueuePerformer] isCurrentPerformer],\n             @\"\");\n#if !TARGET_IPHONE_SIMULATOR\n    SCAssert(resource.managedSession.avSession, @\"session shouldn't be nil\");\n#endif\n    // Need to wrap this to a transcation otherwise this is happening off the main\n    // thread, and the layer\n    // won't be lay out correctly.\n    [CATransaction begin];\n    [CATransaction setDisableActions:YES];\n    // Since _captureResource.avSession is always created / recreated on this private queue, and\n    // videoPreviewLayer.session,\n    // if not touched by anyone else, is also set on this private queue, it should\n    // be safe to do this\n    // If-clause check.\n    resource.videoPreviewLayer = [AVCaptureVideoPreviewLayer new];\n    SCAssert(resource.videoPreviewLayer, @\"_captureResource.videoPreviewLayer shouldn't be nil\");\n    [self setupVideoPreviewLayer:resource];\n    if (resource.device.softwareZoom && resource.device.zoomFactor != 1) {\n        [self softwareZoomWithDevice:resource.device resource:resource];\n    }\n    [CATransaction commit];\n    SCLogCapturerInfo(@\"Created AVCaptureVideoPreviewLayer:%@\", resource.videoPreviewLayer);\n}\n\n+ (void)redoVideoPreviewLayer:(SCCaptureResource *)resource\n{\n    SCTraceODPCompatibleStart(2);\n    SCLogCapturerInfo(@\"redo video preview layer\");\n    AVCaptureVideoPreviewLayer *videoPreviewLayer = resource.videoPreviewLayer;\n    resource.videoPreviewLayer = nil;\n    // This will do dispatch_sync on the main thread, since mainQueuePerformer\n    // is reentrant, it should be fine\n    // on iOS 7.\n    [[SCQueuePerformer mainQueuePerformer] performAndWait:^{\n        // Hide and remove the session when stop the video preview layer at main\n        // thread.\n        // It seems that when we nil out the session, it will cause some relayout\n        // on iOS 9\n        // and trigger an assertion.\n        videoPreviewLayer.hidden = YES;\n        videoPreviewLayer.session = nil;\n        // We setup the video preview layer immediately after destroy it so\n        // that when we start running again, we don't need to pay the setup\n        // cost.\n        [self makeVideoPreviewLayer:resource];\n    }];\n}\n\n+ (void)startStreaming:(SCCaptureResource *)resource\n{\n    SCTraceODPCompatibleStart(2);\n    ++resource.streamingSequence;\n    SCLogCapturerInfo(@\"Start streaming. streamingSequence:%lu\", (unsigned long)resource.streamingSequence);\n    [resource.videoDataSource startStreaming];\n}\n\n+ (void)setupLivenessConsistencyTimerIfForeground:(SCCaptureResource *)resource\n{\n    SCTraceODPCompatibleStart(2);\n    SCAssertMainThread();\n    if (resource.livenessTimer) {\n        // If we have the liveness timer already, don't need to set it up.\n        return;\n    }\n    // Check if the application state is in background now, if so, we don't need\n    // to setup liveness timer\n    if ([UIApplication sharedApplication].applicationState != UIApplicationStateBackground) {\n        resource.livenessTimer = [NSTimer scheduledTimerWithTimeInterval:1\n                                                                  target:[SCManagedCapturerV1 sharedInstance]\n                                                                selector:resource.livenessConsistency\n                                                                userInfo:nil\n                                                                 repeats:YES];\n    }\n}\n\n+ (void)destroyLivenessConsistencyTimer:(SCCaptureResource *)resource\n{\n    SCTraceODPCompatibleStart(2);\n    SCAssertMainThread();\n    [resource.livenessTimer invalidate];\n    resource.livenessTimer = nil;\n}\n\n+ (void)softwareZoomWithDevice:(SCManagedCaptureDevice *)device resource:(SCCaptureResource *)resource\n{\n    [resource.deviceZoomHandler softwareZoomWithDevice:device];\n}\n\n+ (void)captureStillImageWithCaptureResource:(SCCaptureResource *)captureResource\n                                 aspectRatio:(CGFloat)aspectRatio\n                            captureSessionID:(NSString *)captureSessionID\n                      shouldCaptureFromVideo:(BOOL)shouldCaptureFromVideo\n                           completionHandler:\n                               (sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler\n                                     context:(NSString *)context\n{\n    SCTraceODPCompatibleStart(2);\n    if (captureResource.stillImageCapturing) {\n        SCLogCapturerWarning(@\"Another still image is capturing. aspectRatio:%f\", aspectRatio);\n        if (completionHandler) {\n            SCManagedCapturerState *state = [captureResource.state copy];\n            runOnMainThreadAsynchronously(^{\n                completionHandler(nil, nil, [NSError errorWithDomain:kSCManagedCapturerErrorDomain\n                                                                code:kSCManagedCapturerCaptureStillImageBusy\n                                                            userInfo:nil],\n                                  state);\n            });\n        }\n    } else {\n        captureResource.stillImageCapturing = YES;\n        [SCCaptureWorker _captureStillImageAsynchronouslyWithCaptureResource:captureResource\n                                                                 aspectRatio:aspectRatio\n                                                            captureSessionID:captureSessionID\n                                                      shouldCaptureFromVideo:shouldCaptureFromVideo\n                                                           completionHandler:completionHandler];\n    }\n}\n\n+ (void)_captureStillImageAsynchronouslyWithCaptureResource:(SCCaptureResource *)captureResource\n                                                aspectRatio:(CGFloat)aspectRatio\n                                           captureSessionID:(NSString *)captureSessionID\n                                     shouldCaptureFromVideo:(BOOL)shouldCaptureFromVideo\n                                          completionHandler:\n                                              (sc_managed_capturer_capture_still_image_completion_handler_t)\n                                                  completionHandler\n{\n    SCTraceODPCompatibleStart(2);\n    SCAssert([captureResource.queuePerformer isCurrentPerformer], @\"\");\n    SCAssert(completionHandler, @\"completionHandler cannot be nil\");\n\n    SCManagedCapturerState *state = [captureResource.state copy];\n    SCLogCapturerInfo(@\"Capturing still image. aspectRatio:%f state:%@\", aspectRatio, state);\n    // If when we start capturing, the video streamer is not running yet, start\n    // running it.\n    [SCCaptureWorker startStreaming:captureResource];\n    SCManagedStillImageCapturer *stillImageCapturer = captureResource.stillImageCapturer;\n    if (@available(iOS 11.0, *)) {\n        if (state.arSessionActive) {\n            stillImageCapturer = captureResource.arImageCapturer;\n        }\n    }\n    dispatch_block_t stillImageCaptureHandler = ^{\n        SCCAssert(captureResource.stillImageCapturer, @\"stillImageCapturer should be available\");\n        float zoomFactor = captureResource.device.softwareZoom ? captureResource.device.zoomFactor : 1;\n        [stillImageCapturer\n            captureStillImageWithAspectRatio:aspectRatio\n                                atZoomFactor:zoomFactor\n                                 fieldOfView:captureResource.device.fieldOfView\n                                       state:state\n                            captureSessionID:captureSessionID\n                      shouldCaptureFromVideo:shouldCaptureFromVideo\n                           completionHandler:^(UIImage *fullScreenImage, NSDictionary *metadata, NSError *error) {\n                               SCTraceStart();\n                               // We are done here, turn off front flash if needed,\n                               // this is dispatched in\n                               // SCManagedCapturer's private queue\n                               if (captureResource.state.flashActive && !captureResource.state.flashSupported &&\n                                   captureResource.state.devicePosition == SCManagedCaptureDevicePositionFront) {\n                                   captureResource.frontFlashController.flashActive = NO;\n                               }\n                               if (state.devicePosition == SCManagedCaptureDevicePositionFront) {\n                                   fullScreenImage = [UIImage\n                                       imageWithCGImage:fullScreenImage.CGImage\n                                                  scale:1.0\n                                            orientation:SCMirroredImageOrientation(fullScreenImage.imageOrientation)];\n                               }\n                               captureResource.stillImageCapturing = NO;\n\n                               runOnMainThreadAsynchronously(^{\n                                   completionHandler(fullScreenImage, metadata, error, state);\n                               });\n                           }];\n    };\n    if (state.flashActive && !captureResource.state.flashSupported &&\n        state.devicePosition == SCManagedCaptureDevicePositionFront) {\n        captureResource.frontFlashController.flashActive = YES;\n        // Do the first capture only after 0.175 seconds so that the front flash is\n        // already available\n        [captureResource.queuePerformer perform:stillImageCaptureHandler after:0.175];\n    } else {\n        stillImageCaptureHandler();\n    }\n}\n\n+ (void)startRecordingWithCaptureResource:(SCCaptureResource *)captureResource\n                           outputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings\n                       audioConfiguration:(SCAudioConfiguration *)configuration\n                              maxDuration:(NSTimeInterval)maxDuration\n                                  fileURL:(NSURL *)fileURL\n                         captureSessionID:(NSString *)captureSessionID\n                        completionHandler:(sc_managed_capturer_start_recording_completion_handler_t)completionHandler\n{\n    SCTraceODPCompatibleStart(2);\n    if (captureResource.videoRecording) {\n        if (completionHandler) {\n            runOnMainThreadAsynchronously(^{\n                completionHandler(SCVideoCaptureSessionInfoMake(kCMTimeInvalid, kCMTimeInvalid, 0),\n                                  [NSError errorWithDomain:kSCManagedCapturerErrorDomain\n                                                      code:kSCManagedCapturerRecordVideoBusy\n                                                  userInfo:nil]);\n            });\n        }\n        // Don't start recording session\n        SCLogCapturerInfo(@\"*** Tries to start multiple video recording session ***\");\n        return;\n    }\n\n    // Fix: https://jira.sc-corp.net/browse/CCAM-12322\n    // Fire this notification in recording state to let PlaybackSession stop\n    runOnMainThreadAsynchronously(^{\n        [[NSNotificationCenter defaultCenter] postNotificationName:kSCImageProcessVideoPlaybackStopNotification\n                                                            object:[SCManagedCapturer sharedInstance]\n                                                          userInfo:nil];\n    });\n\n    SCLogCapturerInfo(@\"Start recording. OutputSettigns:%@, maxDuration:%f, fileURL:%@\", outputSettings, maxDuration,\n                      fileURL);\n    // Turns on torch temporarily if we have Flash active\n    if (!captureResource.state.torchActive) {\n        if (captureResource.state.flashActive) {\n            [captureResource.device setTorchActive:YES];\n\n            if (captureResource.state.devicePosition == SCManagedCaptureDevicePositionFront) {\n                captureResource.frontFlashController.torchActive = YES;\n            }\n        }\n    }\n\n    if (captureResource.device.softwareZoom) {\n        captureResource.device.zoomFactor = 1;\n        [SCCaptureWorker softwareZoomWithDevice:captureResource.device resource:captureResource];\n    }\n\n    // Lock focus on both front and back camera if not using ARKit\n    if (!captureResource.state.arSessionActive) {\n        SCManagedCaptureDevice *front = [SCManagedCaptureDevice front];\n        SCManagedCaptureDevice *back = [SCManagedCaptureDevice back];\n        [front setRecording:YES];\n        [back setRecording:YES];\n    }\n    // Start streaming if we haven't already\n    [self startStreaming:captureResource];\n    // Remove other listeners from video streamer\n    [captureResource.videoDataSource removeListener:captureResource.deviceCapacityAnalyzer];\n    // If lenses is not actually applied, we should open sticky video tweak\n\n    BOOL isLensApplied = [SCCaptureWorker isLensApplied:captureResource];\n    [captureResource.videoDataSource setKeepLateFrames:!isLensApplied];\n    SCLogCapturerInfo(@\"Start recording. isLensApplied:%d\", isLensApplied);\n\n    [captureResource.videoDataSource addListener:captureResource.videoCapturer];\n    captureResource.videoRecording = YES;\n    if (captureResource.state.lensesActive) {\n        BOOL modifySource = captureResource.videoRecording || captureResource.state.liveVideoStreaming;\n        [captureResource.lensProcessingCore setModifySource:modifySource];\n    }\n\n    if (captureResource.fileInputDecider.shouldProcessFileInput) {\n        [captureResource.videoDataSource stopStreaming];\n    }\n    // The max video duration, we will stop process sample buffer if the current\n    // time is larger than max video duration.\n    // 0.5 so that we have a bit of lean way on video recording initialization, and\n    // when NSTimer stucked in normal\n    // recording sessions, we don't suck too much as breaking expections on how long\n    // it is recorded.\n    SCVideoCaptureSessionInfo sessionInfo = [captureResource.videoCapturer\n        startRecordingAsynchronouslyWithOutputSettings:outputSettings\n                                    audioConfiguration:configuration\n                                           maxDuration:maxDuration + 0.5\n                                                 toURL:fileURL\n                                          deviceFormat:captureResource.device.activeFormat\n                                           orientation:AVCaptureVideoOrientationLandscapeLeft\n                                      captureSessionID:captureSessionID];\n\n    if (completionHandler) {\n        runOnMainThreadAsynchronously(^{\n            completionHandler(sessionInfo, nil);\n        });\n    }\n\n    captureResource.droppedFramesReporter = [SCManagedDroppedFramesReporter new];\n    [captureResource.videoDataSource addListener:captureResource.droppedFramesReporter];\n    [[SCManagedCapturerV1 sharedInstance] addListener:captureResource.droppedFramesReporter];\n}\n\n+ (void)stopRecordingWithCaptureResource:(SCCaptureResource *)captureResource\n{\n    SCTraceStart();\n    SCLogCapturerInfo(@\"Stop recording asynchronously\");\n    [captureResource.videoCapturer stopRecordingAsynchronously];\n\n    [captureResource.videoDataSource removeListener:captureResource.droppedFramesReporter];\n    SCManagedDroppedFramesReporter *droppedFramesReporter = captureResource.droppedFramesReporter;\n    [[SCManagedCapturerV1 sharedInstance] removeListener:captureResource.droppedFramesReporter];\n    captureResource.droppedFramesReporter = nil;\n\n    [captureResource.videoDataSource.performer perform:^{\n        // call on the same performer as that of managedVideoDataSource: didOutputSampleBuffer: devicePosition:\n        BOOL keepLateFrames = [captureResource.videoDataSource getKeepLateFrames];\n        [droppedFramesReporter reportWithKeepLateFrames:keepLateFrames\n                                          lensesApplied:[SCCaptureWorker isLensApplied:captureResource]];\n        // Disable keepLateFrames once stop recording to make sure the recentness of preview\n        [captureResource.videoDataSource setKeepLateFrames:NO];\n    }];\n}\n\n+ (void)cancelRecordingWithCaptureResource:(SCCaptureResource *)captureResource\n{\n    SCTraceStart();\n    SCLogCapturerInfo(@\"Cancel recording asynchronously\");\n    [captureResource.videoDataSource removeListener:captureResource.droppedFramesReporter];\n    [[SCManagedCapturerV1 sharedInstance] removeListener:captureResource.droppedFramesReporter];\n    captureResource.droppedFramesReporter = nil;\n\n    [captureResource.videoDataSource removeListener:captureResource.videoCapturer];\n    // Add back other listeners to video streamer\n    [captureResource.videoDataSource addListener:captureResource.deviceCapacityAnalyzer];\n    [captureResource.videoCapturer cancelRecordingAsynchronously];\n\n    captureResource.droppedFramesReporter = nil;\n}\n\n+ (SCVideoCaptureSessionInfo)activeSession:(SCCaptureResource *)resource\n{\n    if (resource.videoCapturer == nil) {\n        SCLogCapturerWarning(\n            @\"Trying to retrieve SCVideoCaptureSessionInfo while _captureResource.videoCapturer is nil.\");\n        return SCVideoCaptureSessionInfoMake(kCMTimeInvalid, kCMTimeInvalid, 0);\n    } else {\n        return resource.videoCapturer.activeSession;\n    }\n}\n\n+ (BOOL)canRunARSession:(SCCaptureResource *)resource\n{\n    SCTraceODPCompatibleStart(2);\n    if (@available(iOS 11.0, *)) {\n        return resource.state.lensesActive &&\n               [ARConfiguration sc_supportedForDevicePosition:resource.state.devicePosition];\n    }\n    return NO;\n}\n\n+ (void)turnARSessionOff:(SCCaptureResource *)resource\n{\n    SCTraceODPCompatibleStart(2);\n    SCAssert([resource.queuePerformer isCurrentPerformer], @\"\");\n    if (@available(iOS 11.0, *)) {\n        SC_GUARD_ELSE_RETURN(resource.state.arSessionActive);\n        SCLogCapturerInfo(@\"Stopping ARSession\");\n\n        [resource.arSessionHandler stopARSessionRunning];\n        [resource.managedSession performConfiguration:^{\n            [resource.device updateActiveFormatWithSession:resource.managedSession.avSession];\n        }];\n        [resource.managedSession startRunning];\n        resource.state =\n            [[[SCManagedCapturerStateBuilder withManagedCapturerState:resource.state] setArSessionActive:NO] build];\n        [resource.lensProcessingCore setShouldProcessARFrames:resource.state.arSessionActive];\n        [self clearARKitData:resource];\n        [self updateLensesFieldOfViewTracking:resource];\n        runOnMainThreadAsynchronously(^{\n            [resource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didChangeState:resource.state];\n            [resource.announcer managedCapturer:[SCManagedCapturer sharedInstance]\n                       didChangeARSessionActive:resource.state];\n            [[SCManagedCapturerV1 sharedInstance] unlockZoomWithContext:SCCapturerContext];\n        });\n    };\n}\n\n+ (void)clearARKitData:(SCCaptureResource *)resource\n{\n    SCTraceODPCompatibleStart(2);\n    if (@available(iOS 11.0, *)) {\n        if ([resource.videoDataSource conformsToProtocol:@protocol(SCManagedVideoARDataSource)]) {\n            id<SCManagedVideoARDataSource> dataSource = (id<SCManagedVideoARDataSource>)resource.videoDataSource;\n            dataSource.currentFrame = nil;\n#ifdef SC_USE_ARKIT_FACE\n            dataSource.lastDepthData = nil;\n#endif\n        }\n    }\n}\n\n+ (void)turnARSessionOn:(SCCaptureResource *)resource\n{\n    SCTraceODPCompatibleStart(2);\n    SCAssert([resource.queuePerformer isCurrentPerformer], @\"\");\n    if (@available(iOS 11.0, *)) {\n        SC_GUARD_ELSE_RETURN(!resource.state.arSessionActive);\n        SC_GUARD_ELSE_RETURN([self canRunARSession:resource]);\n        SCLogCapturerInfo(@\"Starting ARSession\");\n        resource.state =\n            [[[SCManagedCapturerStateBuilder withManagedCapturerState:resource.state] setArSessionActive:YES] build];\n        // Make sure we commit any configurations that may be in flight.\n        [resource.videoDataSource commitConfiguration];\n\n        runOnMainThreadAsynchronously(^{\n            [resource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didChangeState:resource.state];\n            [resource.announcer managedCapturer:[SCManagedCapturer sharedInstance]\n                       didChangeARSessionActive:resource.state];\n            // Zooming on an ARSession breaks stuff in super weird ways.\n            [[SCManagedCapturerV1 sharedInstance] lockZoomWithContext:SCCapturerContext];\n        });\n        [self clearARKitData:resource];\n        [resource.managedSession stopRunning];\n        [resource.arSession\n            runWithConfiguration:[ARConfiguration sc_configurationForDevicePosition:resource.state.devicePosition]\n                         options:(ARSessionRunOptionResetTracking | ARSessionRunOptionRemoveExistingAnchors)];\n\n        [resource.lensProcessingCore setShouldProcessARFrames:resource.state.arSessionActive];\n        [self updateLensesFieldOfViewTracking:resource];\n    }\n}\n\n+ (void)configBlackCameraDetectorWithCaptureResource:(SCCaptureResource *)captureResource\n{\n    captureResource.captureSessionFixer = [[SCCaptureSessionFixer alloc] init];\n    captureResource.blackCameraDetector.blackCameraNoOutputDetector.delegate = captureResource.captureSessionFixer;\n    [captureResource.videoDataSource addListener:captureResource.blackCameraDetector.blackCameraNoOutputDetector];\n}\n\n+ (void)configureCaptureFaceDetectorWithCaptureResource:(SCCaptureResource *)captureResource\n{\n    if (SCCameraFaceFocusDetectionMethod() == SCCameraFaceFocusDetectionMethodTypeCIDetector) {\n        SCCaptureCoreImageFaceDetector *detector =\n            [[SCCaptureCoreImageFaceDetector alloc] initWithCaptureResource:captureResource];\n        captureResource.captureFaceDetector = detector;\n        [captureResource.videoDataSource addListener:detector];\n    } else {\n        captureResource.captureFaceDetector =\n            [[SCCaptureMetadataOutputDetector alloc] initWithCaptureResource:captureResource];\n    }\n}\n\n+ (void)configCaptureDeviceHandlerWithCaptureResource:(SCCaptureResource *)captureResource\n{\n    captureResource.device.delegate = captureResource.captureDeviceHandler;\n}\n\n+ (void)updateLensesFieldOfViewTracking:(SCCaptureResource *)captureResource\n{\n    // 1. reset observers\n    [captureResource.lensProcessingCore removeFieldOfViewListener];\n\n    if (@available(iOS 11.0, *)) {\n        if (captureResource.state.arSessionActive &&\n            [captureResource.videoDataSource conformsToProtocol:@protocol(SCManagedVideoARDataSource)]) {\n            // 2. handle ARKit case\n            id<SCManagedVideoARDataSource> arDataSource =\n                (id<SCManagedVideoARDataSource>)captureResource.videoDataSource;\n            float fieldOfView = arDataSource.fieldOfView;\n            if (fieldOfView > 0) {\n                // 2.5 there will be no field of view\n                [captureResource.lensProcessingCore setFieldOfView:fieldOfView];\n            }\n            [captureResource.lensProcessingCore setAsFieldOfViewListenerForARDataSource:arDataSource];\n            return;\n        }\n    }\n    // 3. fallback to regular device field of view\n    float fieldOfView = captureResource.device.fieldOfView;\n    [captureResource.lensProcessingCore setFieldOfView:fieldOfView];\n    [captureResource.lensProcessingCore setAsFieldOfViewListenerForDevice:captureResource.device];\n}\n\n+ (CMTime)firstWrittenAudioBufferDelay:(SCCaptureResource *)resource\n{\n    return resource.videoCapturer.firstWrittenAudioBufferDelay;\n}\n\n+ (BOOL)audioQueueStarted:(SCCaptureResource *)resource\n{\n    return resource.videoCapturer.audioQueueStarted;\n}\n\n+ (BOOL)isLensApplied:(SCCaptureResource *)resource\n{\n    return resource.state.lensesActive && resource.lensProcessingCore.isLensApplied;\n}\n\n+ (BOOL)isVideoMirrored:(SCCaptureResource *)resource\n{\n    if ([resource.videoDataSource respondsToSelector:@selector(isVideoMirrored)]) {\n        return [resource.videoDataSource isVideoMirrored];\n    } else {\n        // Default is NO.\n        return NO;\n    }\n}\n\n+ (BOOL)shouldCaptureImageFromVideoWithResource:(SCCaptureResource *)resource\n{\n    SCTraceODPCompatibleStart(2);\n    BOOL isIphone5Series = [SCDeviceName isSimilarToIphone5orNewer] && ![SCDeviceName isSimilarToIphone6orNewer];\n    return isIphone5Series && !resource.state.flashActive && ![SCCaptureWorker isLensApplied:resource];\n}\n\n+ (void)setPortraitModePointOfInterestAsynchronously:(CGPoint)pointOfInterest\n                                   completionHandler:(dispatch_block_t)completionHandler\n                                            resource:(SCCaptureResource *)resource\n{\n    SCTraceODPCompatibleStart(2);\n    if (resource.state.isPortraitModeActive) {\n        SCTraceODPCompatibleStart(2);\n        [resource.queuePerformer perform:^{\n            SCTraceStart();\n            if (resource.device.isConnected) {\n                if (resource.device.softwareZoom) {\n                    CGPoint adjustedPoint = CGPointMake((pointOfInterest.x - 0.5) / resource.device.softwareZoom + 0.5,\n                                                        (pointOfInterest.y - 0.5) / resource.device.softwareZoom + 0.5);\n                    // Fix for the zooming factor\n                    [resource.videoDataSource setPortraitModePointOfInterest:adjustedPoint];\n                    if (resource.state.arSessionActive) {\n                        if (@available(ios 11.0, *)) {\n                            [resource.arImageCapturer setPortraitModePointOfInterest:adjustedPoint];\n                        }\n                    } else {\n                        [resource.stillImageCapturer setPortraitModePointOfInterest:adjustedPoint];\n                    }\n                } else {\n                    [resource.videoDataSource setPortraitModePointOfInterest:pointOfInterest];\n                    if (resource.state.arSessionActive) {\n                        if (@available(ios 11.0, *)) {\n                            [resource.arImageCapturer setPortraitModePointOfInterest:pointOfInterest];\n                        }\n                    } else {\n                        [resource.stillImageCapturer setPortraitModePointOfInterest:pointOfInterest];\n                    }\n                }\n            }\n            if (completionHandler) {\n                runOnMainThreadAsynchronously(completionHandler);\n            }\n        }];\n    }\n}\n\n+ (void)prepareForRecordingWithAudioConfiguration:(SCAudioConfiguration *)configuration\n                                         resource:(SCCaptureResource *)resource\n{\n    SCAssertPerformer(resource.queuePerformer);\n    [resource.videoCapturer prepareForRecordingWithAudioConfiguration:configuration];\n}\n\n+ (void)stopScanWithCompletionHandler:(dispatch_block_t)completionHandler resource:(SCCaptureResource *)resource\n{\n    SCTraceODPCompatibleStart(2);\n    SCLogCapturerInfo(@\"Stop scan\");\n    [resource.videoScanner stopScanAsynchronously];\n    if (completionHandler) {\n        runOnMainThreadAsynchronously(completionHandler);\n    }\n}\n\n+ (void)startScanWithScanConfiguration:(SCScanConfiguration *)configuration resource:(SCCaptureResource *)resource\n{\n    SCTraceODPCompatibleStart(2);\n    SCLogCapturerInfo(@\"Start scan. ScanConfiguration:%@\", configuration);\n    [SCCaptureWorker startStreaming:resource];\n    [resource.videoScanner startScanAsynchronouslyWithScanConfiguration:configuration];\n}\n@end\n"
  }
]