Repository: JonnyBanana/Snapchat-Source-Code-Leak Branch: master Commit: f031dd9e0f08 Files: 311 Total size: 1.1 MB Directory structure: gitextract_jfvnmt3e/ ├── BlackCamera/ │ ├── SCBlackCameraDetector.h │ ├── SCBlackCameraDetector.m │ ├── SCBlackCameraNoOutputDetector.h │ ├── SCBlackCameraNoOutputDetector.m │ ├── SCBlackCameraPreviewDetector.h │ ├── SCBlackCameraPreviewDetector.m │ ├── SCBlackCameraReporter.h │ ├── SCBlackCameraReporter.m │ ├── SCBlackCameraRunningDetector.h │ ├── SCBlackCameraRunningDetector.m │ ├── SCBlackCameraSessionBlockDetector.h │ ├── SCBlackCameraSessionBlockDetector.m │ ├── SCBlackCameraViewDetector.h │ ├── SCBlackCameraViewDetector.m │ ├── SCCaptureSessionFixer.h │ └── SCCaptureSessionFixer.m ├── ContextAwareTaskManagement/ │ ├── OWNERS │ ├── Requests/ │ │ ├── SCContextAwareSnapCreationThrottleRequest.h │ │ └── SCContextAwareSnapCreationThrottleRequest.m │ └── Triggers/ │ ├── SCSnapCreationTriggers.h │ └── SCSnapCreationTriggers.m ├── Features/ │ ├── Core/ │ │ ├── SCFeature.h │ │ ├── SCFeatureContainerView.h │ │ ├── SCFeatureCoordinator.h │ │ ├── SCFeatureCoordinator.m │ │ └── SCFeatureProvider.h │ ├── Flash/ │ │ ├── SCFeatureFlash.h │ │ ├── SCFeatureFlashImpl.h │ │ ├── SCFeatureFlashImpl.m │ │ ├── SCFlashButton.h │ │ └── SCFlashButton.m │ ├── HandsFree/ │ │ └── SCFeatureHandsFree.h │ ├── ImageCapture/ │ │ ├── SCFeatureImageCapture.h │ │ ├── SCFeatureImageCaptureImpl.h │ │ └── SCFeatureImageCaptureImpl.m │ ├── NightMode/ │ │ ├── SCFeatureNightMode.h │ │ ├── SCNightModeButton.h │ │ └── SCNightModeButton.m │ ├── Scanning/ │ │ └── SCFeatureScanning.h │ ├── Shazam/ │ │ └── SCFeatureShazam.h │ ├── SnapKit/ │ │ └── SCFeatureSnapKit.h │ ├── TapToFocus/ │ │ ├── SCFeatureTapToFocusAndExposure.h │ │ ├── SCFeatureTapToFocusAndExposureImpl.h │ │ ├── SCFeatureTapToFocusAndExposureImpl.m │ │ ├── SCTapAnimationView.h │ │ └── SCTapAnimationView.m │ ├── ToggleCamera/ │ │ └── SCFeatureToggleCamera.h │ └── Zooming/ │ └── SCFeatureZooming.h ├── Lens/ │ ├── SCManagedCapturerARImageCaptureProvider.h │ ├── SCManagedCapturerGLViewManagerAPI.h │ ├── SCManagedCapturerLSAComponentTrackerAPI.h │ ├── SCManagedCapturerLensAPI.h │ └── SCManagedCapturerLensAPIProvider.h ├── Logging/ │ ├── SCCoreCameraLogger.h │ ├── SCCoreCameraLogger.m │ ├── SCLogger+Camera.h │ ├── SCLogger+Camera.m │ └── SCManiphestTicketCreator.h ├── ManagedCapturer/ │ ├── ARConfiguration+SCConfiguration.h │ ├── ARConfiguration+SCConfiguration.m │ ├── AVCaptureConnection+InputDevice.h │ ├── AVCaptureConnection+InputDevice.m │ ├── AVCaptureDevice+ConfigurationLock.h │ ├── AVCaptureDevice+ConfigurationLock.m │ ├── CapturerV2/ │ │ ├── Configuration/ │ │ │ ├── SCCaptureConfiguration.h │ │ │ ├── SCCaptureConfiguration.m │ │ │ ├── SCCaptureConfigurationAnnouncer.h │ │ │ ├── SCCaptureConfigurationAnnouncer.m │ │ │ ├── SCCaptureConfigurationAnnouncer_Private.h │ │ │ ├── SCCaptureConfigurationListener.h │ │ │ ├── SCCaptureConfiguration_Private.h │ │ │ ├── SCCaptureConfigurator.h │ │ │ └── SCCaptureConfigurator.m │ │ └── Core/ │ │ ├── SCCaptureCore.h │ │ └── SCCaptureCore.m │ ├── ImageProcessing/ │ │ ├── SCDepthBlurMetalModule.metal │ │ ├── SCDepthBlurMetalRenderCommand.h │ │ ├── SCDepthBlurMetalRenderCommand.m │ │ ├── SCDepthToGrayscaleMetalModule.metal │ │ ├── SCDepthToGrayscaleMetalRenderCommand.h │ │ ├── SCDepthToGrayscaleMetalRenderCommand.m │ │ ├── SCDigitalExposureHandler.h │ │ ├── SCDigitalExposureHandler.m │ │ ├── SCExposureAdjustMetalModule.metal │ │ ├── SCExposureAdjustMetalRenderCommand.h │ │ ├── SCExposureAdjustMetalRenderCommand.m │ │ ├── SCExposureAdjustProcessingModule.h │ │ ├── SCExposureAdjustProcessingModule.m │ │ ├── SCMetalModule.h │ │ ├── SCMetalModule.m │ │ ├── SCMetalTextureResource.h │ │ ├── SCMetalTextureResource.m │ │ ├── SCNightModeEnhancementMetalModule.metal │ │ ├── SCNightModeEnhancementMetalRenderCommand.h │ │ ├── SCNightModeEnhancementMetalRenderCommand.m │ │ ├── SCProcessingModule.h │ │ ├── SCProcessingModuleUtils.h │ │ ├── SCProcessingModuleUtils.m │ │ ├── SCProcessingPipeline.h │ │ ├── SCProcessingPipeline.m │ │ ├── SCProcessingPipelineBuilder.h │ │ ├── SCProcessingPipelineBuilder.m │ │ ├── SCStillImageDepthBlurFilter.h │ │ └── SCStillImageDepthBlurFilter.m │ ├── NSURL+Asset.h │ ├── NSURL+Asset.m │ ├── OWNERS │ ├── SCAudioCaptureSession.h │ ├── SCAudioCaptureSession.m │ ├── SCCameraSettingUtils.h │ ├── SCCameraSettingUtils.m │ ├── SCCaptureCommon.h │ ├── SCCaptureCommon.m │ ├── SCCaptureCoreImageFaceDetector.h │ ├── SCCaptureCoreImageFaceDetector.m │ ├── SCCaptureDeviceAuthorization.h │ ├── SCCaptureDeviceAuthorization.m │ ├── SCCaptureDeviceAuthorizationChecker.h │ ├── SCCaptureDeviceAuthorizationChecker.m │ ├── SCCaptureDeviceResolver.h │ ├── SCCaptureDeviceResolver.m │ ├── SCCaptureFaceDetectionParser.h │ ├── SCCaptureFaceDetectionParser.m │ ├── SCCaptureFaceDetector.h │ ├── SCCaptureFaceDetectorTrigger.h │ ├── SCCaptureFaceDetectorTrigger.m │ ├── SCCaptureMetadataObjectParser.h │ ├── SCCaptureMetadataObjectParser.m │ ├── SCCaptureMetadataOutputDetector.h │ ├── SCCaptureMetadataOutputDetector.m │ ├── SCCapturer.h │ ├── SCCapturerBufferedVideoWriter.h │ ├── SCCapturerBufferedVideoWriter.m │ ├── SCCapturerDefines.h │ ├── SCCapturerToken.h │ ├── SCCapturerToken.m │ ├── SCCapturerTokenProvider.h │ ├── SCCapturerTokenProvider.m │ ├── SCExposureState.h │ ├── SCExposureState.m │ ├── SCFileAudioCaptureSession.h │ ├── SCFileAudioCaptureSession.m │ ├── SCManagedAudioStreamer.h │ ├── SCManagedAudioStreamer.m │ ├── SCManagedCaptureDevice+SCManagedCapturer.h │ ├── SCManagedCaptureDevice+SCManagedDeviceCapacityAnalyzer.h │ ├── SCManagedCaptureDevice.h │ ├── SCManagedCaptureDevice.m │ ├── SCManagedCaptureDeviceAutoExposureHandler.h │ ├── SCManagedCaptureDeviceAutoExposureHandler.m │ ├── SCManagedCaptureDeviceAutoFocusHandler.h │ ├── SCManagedCaptureDeviceAutoFocusHandler.m │ ├── SCManagedCaptureDeviceDefaultZoomHandler.h │ ├── SCManagedCaptureDeviceDefaultZoomHandler.m │ ├── SCManagedCaptureDeviceDefaultZoomHandler_Private.h │ ├── SCManagedCaptureDeviceExposureHandler.h │ ├── SCManagedCaptureDeviceFaceDetectionAutoExposureHandler.h │ ├── SCManagedCaptureDeviceFaceDetectionAutoExposureHandler.m │ ├── SCManagedCaptureDeviceFaceDetectionAutoFocusHandler.h │ ├── SCManagedCaptureDeviceFaceDetectionAutoFocusHandler.m │ ├── SCManagedCaptureDeviceFocusHandler.h │ ├── SCManagedCaptureDeviceHandler.h │ ├── SCManagedCaptureDeviceHandler.m │ ├── SCManagedCaptureDeviceLinearInterpolationZoomHandler.h │ ├── SCManagedCaptureDeviceLinearInterpolationZoomHandler.m │ ├── SCManagedCaptureDeviceLockOnRecordExposureHandler.h │ ├── SCManagedCaptureDeviceLockOnRecordExposureHandler.m │ ├── SCManagedCaptureDeviceSavitzkyGolayZoomHandler.h │ ├── SCManagedCaptureDeviceSavitzkyGolayZoomHandler.m │ ├── SCManagedCaptureDeviceSubjectAreaHandler.h │ ├── SCManagedCaptureDeviceSubjectAreaHandler.m │ ├── SCManagedCaptureDeviceThresholdExposureHandler.h │ ├── SCManagedCaptureDeviceThresholdExposureHandler.m │ ├── SCManagedCaptureFaceDetectionAdjustingPOIResource.h │ ├── SCManagedCaptureFaceDetectionAdjustingPOIResource.m │ ├── SCManagedCapturePreviewLayerController.h │ ├── SCManagedCapturePreviewLayerController.m │ ├── SCManagedCapturePreviewView.h │ ├── SCManagedCapturePreviewView.m │ ├── SCManagedCapturePreviewViewDebugView.h │ ├── SCManagedCapturePreviewViewDebugView.m │ ├── SCManagedCaptureSession.h │ ├── SCManagedCaptureSession.m │ ├── SCManagedCapturer.h │ ├── SCManagedCapturer.m │ ├── SCManagedCapturerARSessionHandler.h │ ├── SCManagedCapturerARSessionHandler.m │ ├── SCManagedCapturerListener.h │ ├── SCManagedCapturerListenerAnnouncer.h │ ├── SCManagedCapturerListenerAnnouncer.mm │ ├── SCManagedCapturerSampleMetadata.h │ ├── SCManagedCapturerSampleMetadata.m │ ├── SCManagedCapturerState.h │ ├── SCManagedCapturerState.m │ ├── SCManagedCapturerState.value │ ├── SCManagedCapturerStateBuilder.h │ ├── SCManagedCapturerStateBuilder.m │ ├── SCManagedCapturerUtils.h │ ├── SCManagedCapturerUtils.m │ ├── SCManagedCapturerV1.h │ ├── SCManagedCapturerV1.m │ ├── SCManagedCapturerV1_Private.h │ ├── SCManagedDeviceCapacityAnalyzer.h │ ├── SCManagedDeviceCapacityAnalyzer.m │ ├── SCManagedDeviceCapacityAnalyzerHandler.h │ ├── SCManagedDeviceCapacityAnalyzerHandler.m │ ├── SCManagedDeviceCapacityAnalyzerListener.h │ ├── SCManagedDeviceCapacityAnalyzerListenerAnnouncer.h │ ├── SCManagedDeviceCapacityAnalyzerListenerAnnouncer.mm │ ├── SCManagedDroppedFramesReporter.h │ ├── SCManagedDroppedFramesReporter.m │ ├── SCManagedFrameHealthChecker.h │ ├── SCManagedFrameHealthChecker.m │ ├── SCManagedFrontFlashController.h │ ├── SCManagedFrontFlashController.m │ ├── SCManagedLegacyStillImageCapturer.h │ ├── SCManagedLegacyStillImageCapturer.m │ ├── SCManagedPhotoCapturer.h │ ├── SCManagedPhotoCapturer.m │ ├── SCManagedRecordedVideo.h │ ├── SCManagedRecordedVideo.m │ ├── SCManagedRecordedVideo.value │ ├── SCManagedStillImageCapturer.h │ ├── SCManagedStillImageCapturer.mm │ ├── SCManagedStillImageCapturerHandler.h │ ├── SCManagedStillImageCapturerHandler.m │ ├── SCManagedStillImageCapturer_Protected.h │ ├── SCManagedVideoARDataSource.h │ ├── SCManagedVideoCapturer.h │ ├── SCManagedVideoCapturer.m │ ├── SCManagedVideoCapturerHandler.h │ ├── SCManagedVideoCapturerHandler.m │ ├── SCManagedVideoCapturerLogger.h │ ├── SCManagedVideoCapturerLogger.m │ ├── SCManagedVideoCapturerOutputSettings.h │ ├── SCManagedVideoCapturerOutputSettings.m │ ├── SCManagedVideoCapturerOutputSettings.value │ ├── SCManagedVideoCapturerOutputType.h │ ├── SCManagedVideoCapturerTimeObserver.h │ ├── SCManagedVideoCapturerTimeObserver.m │ ├── SCManagedVideoFileStreamer.h │ ├── SCManagedVideoFileStreamer.m │ ├── SCManagedVideoFrameSampler.h │ ├── SCManagedVideoFrameSampler.m │ ├── SCManagedVideoNoSoundLogger.h │ ├── SCManagedVideoNoSoundLogger.m │ ├── SCManagedVideoScanner.h │ ├── SCManagedVideoScanner.m │ ├── SCManagedVideoStreamReporter.h │ ├── SCManagedVideoStreamReporter.m │ ├── SCManagedVideoStreamer.h │ ├── SCManagedVideoStreamer.m │ ├── SCMetalUtils.h │ ├── SCMetalUtils.m │ ├── SCScanConfiguration.h │ ├── SCScanConfiguration.m │ ├── SCSingleFrameStreamCapturer.h │ ├── SCSingleFrameStreamCapturer.m │ ├── SCStillImageCaptureVideoInputMethod.h │ ├── SCStillImageCaptureVideoInputMethod.m │ ├── SCTimedTask.h │ ├── SCTimedTask.m │ ├── SCVideoCaptureSessionInfo.h │ ├── StateMachine/ │ │ ├── SCCaptureBaseState.h │ │ ├── SCCaptureBaseState.m │ │ ├── SCCaptureStateDelegate.h │ │ ├── SCCaptureStateMachineBookKeeper.h │ │ ├── SCCaptureStateMachineBookKeeper.m │ │ ├── SCCaptureStateMachineContext.h │ │ ├── SCCaptureStateMachineContext.m │ │ ├── SCCaptureStateUtil.h │ │ ├── SCCaptureStateUtil.m │ │ ├── SCManagedCapturerLogging.h │ │ └── States/ │ │ ├── SCCaptureImageState.h │ │ ├── SCCaptureImageState.m │ │ ├── SCCaptureImageStateTransitionPayload.h │ │ ├── SCCaptureImageStateTransitionPayload.m │ │ ├── SCCaptureImageWhileRecordingState.h │ │ ├── SCCaptureImageWhileRecordingState.m │ │ ├── SCCaptureImageWhileRecordingStateTransitionPayload.h │ │ ├── SCCaptureImageWhileRecordingStateTransitionPayload.m │ │ ├── SCCaptureInitializedState.h │ │ ├── SCCaptureInitializedState.m │ │ ├── SCCaptureRecordingState.h │ │ ├── SCCaptureRecordingState.m │ │ ├── SCCaptureRecordingStateTransitionPayload.h │ │ ├── SCCaptureRecordingStateTransitionPayload.m │ │ ├── SCCaptureRunningState.h │ │ ├── SCCaptureRunningState.m │ │ ├── SCCaptureScanningState.h │ │ ├── SCCaptureScanningState.m │ │ ├── SCCaptureUninitializedState.h │ │ ├── SCCaptureUninitializedState.m │ │ ├── SCStateTransitionPayload.h │ │ └── SCStateTransitionPayload.m │ ├── UIScreen+Debug.h │ └── UIScreen+Debug.m ├── README.md ├── Resource/ │ ├── SCCaptureResource.h │ └── SCCaptureResource.m ├── Tweaks/ │ ├── SCCameraTweaks.h │ └── SCCameraTweaks.m ├── UI/ │ ├── AVCameraViewEnums.h │ ├── Lenses/ │ │ ├── LensButtonZ/ │ │ │ └── SCFeatureLensButtonZ.h │ │ └── LensSideButton/ │ │ └── SCFeatureLensSideButton.h │ ├── SCLongPressGestureRecognizer.h │ └── SCLongPressGestureRecognizer.m ├── VolumeButton/ │ ├── SCCameraVolumeButtonHandler.h │ └── SCCameraVolumeButtonHandler.m └── Worker/ ├── SCCaptureWorker.h └── SCCaptureWorker.m ================================================ FILE CONTENTS ================================================ ================================================ FILE: BlackCamera/SCBlackCameraDetector.h ================================================ // // SCBlackCameraDetector.h // Snapchat // // Created by Derek Wang on 24/01/2018. // #import "SCBlackCameraReporter.h" #import #import @class SCBlackCameraNoOutputDetector; @interface SCBlackCameraDetector : NSObject @property (nonatomic, strong) SCBlackCameraNoOutputDetector *blackCameraNoOutputDetector; SC_INIT_AND_NEW_UNAVAILABLE - (instancetype)initWithTicketCreator:(id)ticketCreator; // CameraView visible/invisible - (void)onCameraViewVisible:(BOOL)visible; - (void)onCameraViewVisibleWithTouch:(UIGestureRecognizer *)touch; // Call this when [AVCaptureSession startRunning] is called - (void)sessionWillCallStartRunning; - (void)sessionDidCallStartRunning; // Call this when [AVCaptureSession stopRunning] is called - (void)sessionWillCallStopRunning; - (void)sessionDidCallStopRunning; // Call this when [AVCaptureSession commitConfiguration] is called - (void)sessionWillCommitConfiguration; - (void)sessionDidCommitConfiguration; - (void)sessionDidChangeIsRunning:(BOOL)running; // For CapturePreview visibility detector - (void)capturePreviewDidBecomeVisible:(BOOL)visible; /** Mark the start of creating new session When we fix black camera by creating new session, some detector may report black camera because we called [AVCaptureSession stopRunning] on old AVCaptureSession, so we need to tell the detector the session is recreating, so it is fine to call [AVCaptureSession stopRunning] on old AVCaptureSession. */ - (void)sessionWillRecreate; /** Mark the end of creating new session */ - (void)sessionDidRecreate; @end ================================================ FILE: BlackCamera/SCBlackCameraDetector.m ================================================ // // SCBlackCameraDetector.m // Snapchat // // Created by Derek Wang on 24/01/2018. // #import "SCBlackCameraDetector.h" #import "SCBlackCameraNoOutputDetector.h" #import "SCBlackCameraPreviewDetector.h" #import "SCBlackCameraRunningDetector.h" #import "SCBlackCameraSessionBlockDetector.h" #import "SCBlackCameraViewDetector.h" #import #if !TARGET_IPHONE_SIMULATOR static char *const kSCBlackCameraDetectorQueueLabel = "com.snapchat.black-camera-detector"; #endif @interface SCBlackCameraDetector () { BOOL _sessionIsRunning; BOOL _cameraIsVisible; BOOL _previewIsVisible; } @property (nonatomic, strong) SCQueuePerformer *queuePerformer; @property (nonatomic, strong) SCBlackCameraViewDetector *cameraViewDetector; @property (nonatomic, strong) SCBlackCameraRunningDetector *sessionRunningDetector; @property (nonatomic, strong) SCBlackCameraPreviewDetector *previewDetector; @property (nonatomic, strong) SCBlackCameraSessionBlockDetector *sessionBlockDetector; @end @implementation SCBlackCameraDetector - (instancetype)initWithTicketCreator:(id)ticketCreator { #if !TARGET_IPHONE_SIMULATOR self = [super init]; if (self) { _queuePerformer = [[SCQueuePerformer alloc] initWithLabel:kSCBlackCameraDetectorQueueLabel qualityOfService:QOS_CLASS_BACKGROUND queueType:DISPATCH_QUEUE_SERIAL context:SCQueuePerformerContextCamera]; SCBlackCameraReporter *reporter = [[SCBlackCameraReporter alloc] initWithTicketCreator:ticketCreator]; _cameraViewDetector = [[SCBlackCameraViewDetector alloc] initWithPerformer:_queuePerformer reporter:reporter]; _sessionRunningDetector = [[SCBlackCameraRunningDetector alloc] initWithPerformer:_queuePerformer reporter:reporter]; _previewDetector = [[SCBlackCameraPreviewDetector alloc] initWithPerformer:_queuePerformer reporter:reporter]; _sessionBlockDetector = [[SCBlackCameraSessionBlockDetector alloc] initWithReporter:reporter]; _blackCameraNoOutputDetector = [[SCBlackCameraNoOutputDetector alloc] initWithReporter:reporter]; } return self; #else return nil; #endif } #pragma mark - Camera view visibility detector - (void)onCameraViewVisible:(BOOL)visible { SC_GUARD_ELSE_RETURN(visible != _cameraIsVisible); _cameraIsVisible = visible; [_cameraViewDetector onCameraViewVisible:visible]; } - (void)onCameraViewVisibleWithTouch:(UIGestureRecognizer *)gesture { [_cameraViewDetector onCameraViewVisibleWithTouch:gesture]; } #pragma mark - Track [AVCaptureSession startRunning] call - (void)sessionWillCallStartRunning { [_cameraViewDetector sessionWillCallStartRunning]; [_sessionBlockDetector sessionWillCallStartRunning]; } - (void)sessionDidCallStartRunning { [_sessionRunningDetector sessionDidCallStartRunning]; [_sessionBlockDetector sessionDidCallStartRunning]; } #pragma mark - Track [AVCaptureSession stopRunning] call - (void)sessionWillCallStopRunning { [_cameraViewDetector sessionWillCallStopRunning]; [_sessionRunningDetector sessionWillCallStopRunning]; } - (void)sessionDidCallStopRunning { } - (void)sessionDidChangeIsRunning:(BOOL)running { SC_GUARD_ELSE_RETURN(running != _sessionIsRunning); _sessionIsRunning = running; [_sessionRunningDetector sessionDidChangeIsRunning:running]; [_previewDetector sessionDidChangeIsRunning:running]; } #pragma mark - Capture preview visibility detector - (void)capturePreviewDidBecomeVisible:(BOOL)visible { SC_GUARD_ELSE_RETURN(visible != _previewIsVisible); _previewIsVisible = visible; [_previewDetector capturePreviewDidBecomeVisible:visible]; } #pragma mark - AVCaptureSession block detector - (void)sessionWillCommitConfiguration { [_sessionBlockDetector sessionWillCommitConfiguration]; } - (void)sessionDidCommitConfiguration { [_sessionBlockDetector sessionDidCommitConfiguration]; } - (void)sessionWillRecreate { [_cameraViewDetector sessionWillRecreate]; } - (void)sessionDidRecreate { [_cameraViewDetector sessionDidRecreate]; } @end ================================================ FILE: BlackCamera/SCBlackCameraNoOutputDetector.h ================================================ // // SCBlackCameraNoOutputDetector.h // Snapchat // // Created by Derek Wang on 05/12/2017. // #import "SCManagedCapturerListener.h" #import #import @class SCBlackCameraNoOutputDetector, SCBlackCameraReporter; @protocol SCManiphestTicketCreator; @protocol SCBlackCameraDetectorDelegate - (void)detector:(SCBlackCameraNoOutputDetector *)detector didDetectBlackCamera:(id)capture; @end @interface SCBlackCameraNoOutputDetector : NSObject @property (nonatomic, weak) id delegate; - (instancetype)initWithReporter:(SCBlackCameraReporter *)reporter; @end ================================================ FILE: BlackCamera/SCBlackCameraNoOutputDetector.m ================================================ // // SCBlackCameraDetectorNoOutput.m // Snapchat // // Created by Derek Wang on 05/12/2017. // // This detector is used to detect the case that session is running, but there is no sample buffer output #import "SCBlackCameraNoOutputDetector.h" #import "SCBlackCameraReporter.h" #import #import #import #import #import #import #import static CGFloat const kShortCheckingDelay = 0.5f; static CGFloat const kLongCheckingDelay = 3.0f; static char *const kSCBlackCameraDetectorQueueLabel = "com.snapchat.black-camera-detector"; @interface SCBlackCameraNoOutputDetector () { BOOL _sampleBufferReceived; BOOL _blackCameraDetected; // Whether we receive first frame after we detected black camera, that's maybe because the checking delay is too // short, and we will switch to kLongCheckingDelay next time we do the checking BOOL _blackCameraRecovered; // Whether checking is scheduled, to avoid duplicated checking BOOL _checkingScheduled; // Whether AVCaptureSession is stopped, if stopped, we don't need to check black camera any more // It is set on main thread, read on background queue BOOL _sessionStoppedRunning; } @property (nonatomic) SCQueuePerformer *queuePerformer; @property (nonatomic) SCBlackCameraReporter *reporter; @end @implementation SCBlackCameraNoOutputDetector - (instancetype)initWithReporter:(SCBlackCameraReporter *)reporter { self = [super init]; if (self) { _queuePerformer = [[SCQueuePerformer alloc] initWithLabel:kSCBlackCameraDetectorQueueLabel qualityOfService:QOS_CLASS_BACKGROUND queueType:DISPATCH_QUEUE_SERIAL context:SCQueuePerformerContextCamera]; _reporter = reporter; } return self; } - (void)managedVideoDataSource:(id)managedVideoDataSource didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer devicePosition:(SCManagedCaptureDevicePosition)devicePosition { // The block is very light-weight [self.queuePerformer perform:^{ if (_blackCameraDetected) { // Detected a black camera case _blackCameraDetected = NO; _blackCameraRecovered = YES; SCLogCoreCameraInfo(@"[BlackCamera] Black camera recovered"); if (SCExperimentWithBlackCameraReporting()) { [[SCLogger sharedInstance] logUnsampledEvent:KSCCameraBlackCamera parameters:@{ @"type" : @"RECOVERED" } secretParameters:nil metrics:nil]; } } // Received buffer! _sampleBufferReceived = YES; }]; } - (void)managedCapturer:(id)managedCapturer didStartRunning:(SCManagedCapturerState *)state { SCAssertMainThread(); if ([UIApplication sharedApplication].applicationState == UIApplicationStateBackground) { SCLogCoreCameraInfo(@"[BlackCamera] In background, skip checking"); return; } _sessionStoppedRunning = NO; [self.queuePerformer perform:^{ SCTraceODPCompatibleStart(2); if (_checkingScheduled) { SCLogCoreCameraInfo(@"[BlackCamera] Checking is scheduled, skip"); return; } if (_sessionStoppedRunning) { SCLogCoreCameraInfo(@"[BlackCamera] AVCaptureSession stopped, should not check"); return; } _sampleBufferReceived = NO; if (_blackCameraRecovered) { SCLogCoreCameraInfo(@"[BlackCamera] Last black camera recovered, let's wait longer to check this time"); } SCLogCoreCameraInfo(@"[BlackCamera] Schedule black camera checking"); [self.queuePerformer perform:^{ SCTraceODPCompatibleStart(2); if (!_sessionStoppedRunning) { if (!_sampleBufferReceived) { _blackCameraDetected = YES; [_reporter reportBlackCameraWithCause:SCBlackCameraNoOutputData]; [self.delegate detector:self didDetectBlackCamera:managedCapturer]; } else { SCLogCoreCameraInfo(@"[BlackCamera] No black camera"); _blackCameraDetected = NO; } } else { SCLogCoreCameraInfo(@"[BlackCamera] AVCaptureSession stopped"); _blackCameraDetected = NO; } _blackCameraRecovered = NO; _checkingScheduled = NO; } after:_blackCameraRecovered ? kLongCheckingDelay : kShortCheckingDelay]; _checkingScheduled = YES; }]; } - (void)managedCapturer:(id)managedCapturer didStopRunning:(SCManagedCapturerState *)state { SCAssertMainThread(); _sessionStoppedRunning = YES; [self.queuePerformer perform:^{ SCTraceODPCompatibleStart(2); _sampleBufferReceived = NO; }]; } @end ================================================ FILE: BlackCamera/SCBlackCameraPreviewDetector.h ================================================ // // SCBlackCameraPreviewDetector.h // Snapchat // // Created by Derek Wang on 25/01/2018. // #import @class SCQueuePerformer, SCBlackCameraReporter; @protocol SCManiphestTicketCreator; @interface SCBlackCameraPreviewDetector : NSObject - (instancetype)initWithPerformer:(SCQueuePerformer *)performer reporter:(SCBlackCameraReporter *)reporter; - (void)sessionDidChangeIsRunning:(BOOL)running; - (void)capturePreviewDidBecomeVisible:(BOOL)visible; @end ================================================ FILE: BlackCamera/SCBlackCameraPreviewDetector.m ================================================ // // SCBlackCameraPreviewDetector.m // Snapchat // // Created by Derek Wang on 25/01/2018. // #import "SCBlackCameraPreviewDetector.h" #import "SCBlackCameraReporter.h" #import "SCMetalUtils.h" #import #import #import #import // Check whether preview is visible when AVCaptureSession is running static CGFloat const kSCBlackCameraCheckingDelay = 0.5; @interface SCBlackCameraPreviewDetector () { BOOL _previewVisible; dispatch_block_t _checkingBlock; } @property (nonatomic) SCQueuePerformer *queuePerformer; @property (nonatomic) SCBlackCameraReporter *reporter; @end @implementation SCBlackCameraPreviewDetector - (instancetype)initWithPerformer:(SCQueuePerformer *)performer reporter:(SCBlackCameraReporter *)reporter { self = [super init]; if (self) { _queuePerformer = performer; _reporter = reporter; } return self; } - (void)capturePreviewDidBecomeVisible:(BOOL)visible { [_queuePerformer perform:^{ _previewVisible = visible; }]; } - (void)sessionDidChangeIsRunning:(BOOL)running { if (running) { [self _scheduleCheck]; } else { [_queuePerformer perform:^{ if (_checkingBlock) { dispatch_block_cancel(_checkingBlock); _checkingBlock = nil; } }]; } } - (void)_scheduleCheck { [_queuePerformer perform:^{ @weakify(self); _checkingBlock = dispatch_block_create(0, ^{ @strongify(self); SC_GUARD_ELSE_RETURN(self); self->_checkingBlock = nil; [self _checkPreviewState]; }); [_queuePerformer perform:_checkingBlock after:kSCBlackCameraCheckingDelay]; }]; } - (void)_checkPreviewState { if (!_previewVisible) { runOnMainThreadAsynchronously(^{ // Make sure the app is in foreground SC_GUARD_ELSE_RETURN([UIApplication sharedApplication].applicationState == UIApplicationStateActive); SCBlackCameraCause cause = SCDeviceSupportsMetal() ? SCBlackCameraRenderingPaused : SCBlackCameraPreviewIsHidden; [_reporter reportBlackCameraWithCause:cause]; [_reporter fileShakeTicketWithCause:cause]; }); } } @end ================================================ FILE: BlackCamera/SCBlackCameraReporter.h ================================================ // // SCBlackCameraReporter.h // Snapchat // // Created by Derek Wang on 09/01/2018. // #import #import typedef NS_ENUM(NSInteger, SCBlackCameraCause) { SCBlackCameraStartRunningNotCalled, // 1. View is visible, but session startRunning is not called SCBlackCameraSessionNotRunning, // 2. Session startRunning is called, but isRunning is still false SCBlackCameraRenderingPaused, // 3.1 View is visible, but capture preview rendering is paused SCBlackCameraPreviewIsHidden, // 3.2 For non-metal devices, capture preview is hidden SCBlackCameraSessionStartRunningBlocked, // 4.1 AVCaptureSession is blocked at startRunning SCBlackCameraSessionConfigurationBlocked, // 4.2 AVCaptureSession is blocked at commitConfiguration SCBlackCameraNoOutputData, // 5. Session is running, but no data output }; @protocol SCManiphestTicketCreator; @interface SCBlackCameraReporter : NSObject SC_INIT_AND_NEW_UNAVAILABLE - (instancetype)initWithTicketCreator:(id)ticketCreator; - (NSString *)causeNameFor:(SCBlackCameraCause)cause; - (void)reportBlackCameraWithCause:(SCBlackCameraCause)cause; - (void)fileShakeTicketWithCause:(SCBlackCameraCause)cause; @end ================================================ FILE: BlackCamera/SCBlackCameraReporter.m ================================================ // // SCBlackCameraReporter.m // Snapchat // // Created by Derek Wang on 09/01/2018. // #import "SCBlackCameraReporter.h" #import "SCManiphestTicketCreator.h" #import #import #import #import #import #import @interface SCBlackCameraReporter () @property (nonatomic) id ticketCreator; @end @implementation SCBlackCameraReporter - (instancetype)initWithTicketCreator:(id)ticketCreator { if (self = [super init]) { _ticketCreator = ticketCreator; } return self; } - (NSString *)causeNameFor:(SCBlackCameraCause)cause { switch (cause) { case SCBlackCameraStartRunningNotCalled: return @"StartRunningNotCalled"; case SCBlackCameraSessionNotRunning: return @"SessionNotRunning"; case SCBlackCameraRenderingPaused: return @"RenderingPause"; case SCBlackCameraPreviewIsHidden: return @"PreviewIsHidden"; case SCBlackCameraSessionStartRunningBlocked: return @"SessionStartRunningBlocked"; case SCBlackCameraSessionConfigurationBlocked: return @"SessionConfigurationBlocked"; case SCBlackCameraNoOutputData: return @"NoOutputData"; default: SCAssert(NO, @"illegate cause"); break; } return nil; } - (void)reportBlackCameraWithCause:(SCBlackCameraCause)cause { NSString *causeStr = [self causeNameFor:cause]; SCLogCoreCameraError(@"[BlackCamera] Detected black camera, cause: %@", causeStr); NSDictionary *parameters = @{ @"type" : @"DETECTED", @"cause" : causeStr }; [_ticketCreator createAndFileBetaReport:JSONStringSerializeObjectForLogging(parameters)]; if (SCExperimentWithBlackCameraReporting()) { [[SCLogger sharedInstance] logUnsampledEvent:KSCCameraBlackCamera parameters:parameters secretParameters:nil metrics:nil]; } } - (void)fileShakeTicketWithCause:(SCBlackCameraCause)cause { if (SCExperimentWithBlackCameraExceptionLogging()) { // Log exception with auto S2R NSString *errMsg = [NSString sc_stringWithFormat:@"[BlackCamera] Detected black camera, cause: %@", [self causeNameFor:cause]]; [_ticketCreator createAndFile:nil creationTime:0 description:errMsg email:nil project:@"Camera" subproject:nil]; } } @end ================================================ FILE: BlackCamera/SCBlackCameraRunningDetector.h ================================================ // // SCBlackCameraRunningDetector.h // Snapchat // // Created by Derek Wang on 30/01/2018. // #import #import @class SCQueuePerformer, SCBlackCameraReporter; @protocol SCManiphestTicketCreator; @interface SCBlackCameraRunningDetector : NSObject SC_INIT_AND_NEW_UNAVAILABLE - (instancetype)initWithPerformer:(SCQueuePerformer *)performer reporter:(SCBlackCameraReporter *)reporter; // When session isRunning changed - (void)sessionDidChangeIsRunning:(BOOL)running; // Call this after [AVCaptureSession startRunning] is called - (void)sessionDidCallStartRunning; // Call this before [AVCaptureSession stopRunning] is called - (void)sessionWillCallStopRunning; @end ================================================ FILE: BlackCamera/SCBlackCameraRunningDetector.m ================================================ // // SCBlackCameraRunningDetector.m // Snapchat // // Created by Derek Wang on 30/01/2018. // #import "SCBlackCameraRunningDetector.h" #import "SCBlackCameraReporter.h" #import #import #import #import // Check whether we called AVCaptureSession isRunning within this period static CGFloat const kSCBlackCameraCheckingDelay = 5; @interface SCBlackCameraRunningDetector () { BOOL _isSessionRunning; dispatch_block_t _checkSessionBlock; } @property (nonatomic) SCQueuePerformer *queuePerformer; @property (nonatomic) SCBlackCameraReporter *reporter; @end @implementation SCBlackCameraRunningDetector - (instancetype)initWithPerformer:(SCQueuePerformer *)performer reporter:(SCBlackCameraReporter *)reporter { self = [super init]; if (self) { _queuePerformer = performer; _reporter = reporter; } return self; } - (void)sessionDidChangeIsRunning:(BOOL)running { [_queuePerformer perform:^{ _isSessionRunning = running; }]; } - (void)sessionDidCallStartRunning { [self _scheduleCheck]; } - (void)sessionWillCallStopRunning { [_queuePerformer perform:^{ if (_checkSessionBlock) { dispatch_block_cancel(_checkSessionBlock); _checkSessionBlock = nil; } }]; } - (void)_scheduleCheck { [_queuePerformer perform:^{ @weakify(self); _checkSessionBlock = dispatch_block_create(0, ^{ @strongify(self); SC_GUARD_ELSE_RETURN(self); self->_checkSessionBlock = nil; [self _checkSessionState]; }); [_queuePerformer perform:_checkSessionBlock after:kSCBlackCameraCheckingDelay]; }]; } - (void)_checkSessionState { if (!_isSessionRunning) { [_reporter reportBlackCameraWithCause:SCBlackCameraSessionNotRunning]; } } @end ================================================ FILE: BlackCamera/SCBlackCameraSessionBlockDetector.h ================================================ // // SCBlackCameraSessionBlockDetector.h // Snapchat // // Created by Derek Wang on 25/01/2018. // #import "SCBlackCameraReporter.h" #import @interface SCBlackCameraSessionBlockDetector : NSObject SC_INIT_AND_NEW_UNAVAILABLE - (instancetype)initWithReporter:(SCBlackCameraReporter *)reporter; - (void)sessionWillCallStartRunning; - (void)sessionDidCallStartRunning; - (void)sessionWillCommitConfiguration; - (void)sessionDidCommitConfiguration; @end ================================================ FILE: BlackCamera/SCBlackCameraSessionBlockDetector.m ================================================ // // SCBlackCameraSessionBlockDetector.m // Snapchat // // Created by Derek Wang on 25/01/2018. // #import "SCBlackCameraSessionBlockDetector.h" #import "SCBlackCameraReporter.h" #import #import @import CoreGraphics; // Longer than 5 seconds is considerred as black camera static CGFloat const kSCBlackCameraBlockingThreshold = 5; // Will report if session blocks longer than 1 second static CGFloat const kSCSessionBlockingLogThreshold = 1; @interface SCBlackCameraSessionBlockDetector () { NSTimeInterval _startTime; } @property (nonatomic) SCBlackCameraReporter *reporter; @end @implementation SCBlackCameraSessionBlockDetector - (instancetype)initWithReporter:(SCBlackCameraReporter *)reporter { if (self = [super init]) { _reporter = reporter; } return self; } - (void)sessionWillCallStartRunning { _startTime = [NSDate timeIntervalSinceReferenceDate]; } - (void)sessionDidCallStartRunning { [self _reportBlackCameraIfNeededWithCause:SCBlackCameraSessionStartRunningBlocked]; [self _reportBlockingIfNeededWithCause:SCBlackCameraSessionStartRunningBlocked]; } - (void)sessionWillCommitConfiguration { _startTime = [NSDate timeIntervalSinceReferenceDate]; } - (void)sessionDidCommitConfiguration { [self _reportBlackCameraIfNeededWithCause:SCBlackCameraSessionConfigurationBlocked]; [self _reportBlockingIfNeededWithCause:SCBlackCameraSessionConfigurationBlocked]; } - (void)_reportBlockingIfNeededWithCause:(SCBlackCameraCause)cause { NSTimeInterval duration = [NSDate timeIntervalSinceReferenceDate] - _startTime; if (duration >= kSCSessionBlockingLogThreshold) { NSString *causeStr = [_reporter causeNameFor:cause]; [[SCLogger sharedInstance] logEvent:KSCCameraCaptureSessionBlocked parameters:@{ @"cause" : causeStr, @"duration" : @(duration) }]; } } - (void)_reportBlackCameraIfNeededWithCause:(SCBlackCameraCause)cause { NSTimeInterval endTime = [NSDate timeIntervalSinceReferenceDate]; if (endTime - _startTime >= kSCBlackCameraBlockingThreshold) { [_reporter reportBlackCameraWithCause:cause]; } } @end ================================================ FILE: BlackCamera/SCBlackCameraViewDetector.h ================================================ // // SCBlackCameraDetectorCameraView.h // Snapchat // // Created by Derek Wang on 24/01/2018. // #import #import @class SCQueuePerformer, SCBlackCameraReporter; @protocol SCManiphestTicketCreator; @interface SCBlackCameraViewDetector : NSObject - (instancetype)initWithPerformer:(SCQueuePerformer *)performer reporter:(SCBlackCameraReporter *)reporter; // CameraView visible/invisible - (void)onCameraViewVisible:(BOOL)visible; - (void)onCameraViewVisibleWithTouch:(UIGestureRecognizer *)gesture; // Call this when [AVCaptureSession startRunning] is called - (void)sessionWillCallStartRunning; // Call this when [AVCaptureSession stopRunning] is called - (void)sessionWillCallStopRunning; - (void)sessionWillRecreate; - (void)sessionDidRecreate; @end ================================================ FILE: BlackCamera/SCBlackCameraViewDetector.m ================================================ // // SCBlackCameraDetectorCameraView.m // Snapchat // // Created by Derek Wang on 24/01/2018. // #import "SCBlackCameraViewDetector.h" #import "SCBlackCameraReporter.h" #import "SCCaptureDeviceAuthorization.h" #import #import #import #import #import // Check whether we called [AVCaptureSession startRunning] within this period static CGFloat const kSCBlackCameraCheckingDelay = 0.5; @interface SCBlackCameraViewDetector () { BOOL _startRunningCalled; BOOL _sessionIsRecreating; dispatch_block_t _checkSessionBlock; } @property (nonatomic) SCQueuePerformer *queuePerformer; @property (nonatomic) SCBlackCameraReporter *reporter; @property (nonatomic, weak) UIGestureRecognizer *cameraViewGesture; @end @implementation SCBlackCameraViewDetector - (instancetype)initWithPerformer:(SCQueuePerformer *)performer reporter:(SCBlackCameraReporter *)reporter { self = [super init]; if (self) { _queuePerformer = performer; _reporter = reporter; } return self; } #pragma mark - Camera view visibility change trigger - (void)onCameraViewVisible:(BOOL)visible { SCTraceODPCompatibleStart(2); SCLogCoreCameraInfo(@"[BlackCamera] onCameraViewVisible: %d", visible); BOOL firstTimeAccess = [SCCaptureDeviceAuthorization notDeterminedForVideoCapture]; if (firstTimeAccess) { // We don't want to check black camera for firstTimeAccess return; } // Visible and application is active if (visible && [UIApplication sharedApplication].applicationState == UIApplicationStateActive) { // Since this method is usually called before the view is actually visible, leave some margin to check [self _scheduleCheckDelayed:YES]; } else { [_queuePerformer perform:^{ if (_checkSessionBlock) { dispatch_block_cancel(_checkSessionBlock); _checkSessionBlock = nil; } }]; } } // Call this when [AVCaptureSession startRunning] is called - (void)sessionWillCallStartRunning { [_queuePerformer perform:^{ _startRunningCalled = YES; }]; } - (void)sessionWillCallStopRunning { [_queuePerformer perform:^{ _startRunningCalled = NO; }]; } - (void)_scheduleCheckDelayed:(BOOL)delay { [_queuePerformer perform:^{ SC_GUARD_ELSE_RETURN(!_checkSessionBlock); @weakify(self); _checkSessionBlock = dispatch_block_create(0, ^{ @strongify(self); SC_GUARD_ELSE_RETURN(self); self->_checkSessionBlock = nil; [self _checkSessionState]; }); if (delay) { [_queuePerformer perform:_checkSessionBlock after:kSCBlackCameraCheckingDelay]; } else { [_queuePerformer perform:_checkSessionBlock]; } }]; } - (void)_checkSessionState { SCLogCoreCameraInfo(@"[BlackCamera] checkSessionState startRunning: %d, sessionIsRecreating: %d", _startRunningCalled, _sessionIsRecreating); if (!_startRunningCalled && !_sessionIsRecreating) { [_reporter reportBlackCameraWithCause:SCBlackCameraStartRunningNotCalled]; [_reporter fileShakeTicketWithCause:SCBlackCameraStartRunningNotCalled]; } } - (void)sessionWillRecreate { [_queuePerformer perform:^{ _sessionIsRecreating = YES; }]; } - (void)sessionDidRecreate { [_queuePerformer perform:^{ _sessionIsRecreating = NO; }]; } - (void)onCameraViewVisibleWithTouch:(UIGestureRecognizer *)gesture { if (gesture != _cameraViewGesture) { // Skip repeating gesture self.cameraViewGesture = gesture; [self _scheduleCheckDelayed:NO]; } } @end ================================================ FILE: BlackCamera/SCCaptureSessionFixer.h ================================================ // // SCCaptureSessionFixer.h // Snapchat // // Created by Derek Wang on 05/12/2017. // #import "SCBlackCameraNoOutputDetector.h" #import @interface SCCaptureSessionFixer : NSObject @end ================================================ FILE: BlackCamera/SCCaptureSessionFixer.m ================================================ // // SCCaptureSessionFixer.m // Snapchat // // Created by Derek Wang on 05/12/2017. // #import "SCCaptureSessionFixer.h" #import "SCCameraTweaks.h" @implementation SCCaptureSessionFixer - (void)detector:(SCBlackCameraNoOutputDetector *)detector didDetectBlackCamera:(id)capture { if (SCCameraTweaksBlackCameraRecoveryEnabled()) { [capture recreateAVCaptureSession]; } } @end ================================================ FILE: ContextAwareTaskManagement/OWNERS ================================================ --- !OWNERS version: 2 default: jira_project: CCAM owners: num_required_reviewers: 1 teams: - Snapchat/core-camera-ios users: - cjiang - ljia ================================================ FILE: ContextAwareTaskManagement/Requests/SCContextAwareSnapCreationThrottleRequest.h ================================================ // // SCContextAwareSnapCreationThrottleRequest.h // SCCamera // // Created by Cheng Jiang on 4/24/18. // #import #import @interface SCContextAwareSnapCreationThrottleRequest : NSObject - (instancetype)init; @end ================================================ FILE: ContextAwareTaskManagement/Requests/SCContextAwareSnapCreationThrottleRequest.m ================================================ // // SCContextAwareSnapCreationThrottleRequest.m // SCCamera // // Created by Cheng Jiang on 4/24/18. // #import "SCContextAwareSnapCreationThrottleRequest.h" #import #import #import #import BOOL SCCATMSnapCreationEnabled(void) { static dispatch_once_t capturingOnceToken; static BOOL capturingImprovementEnabled; dispatch_once(&capturingOnceToken, ^{ BOOL enabledWithAB = SCExperimentWithContextAwareTaskManagementCapturingImprovementEnabled(); NSInteger tweakOption = [FBTweakValue(@"CATM", @"Performance Improvement", @"Capturing", (id) @0, (@{ @0 : @"Respect A/B", @1 : @"YES", @2 : @"NO" })) integerValue]; switch (tweakOption) { case 0: capturingImprovementEnabled = enabledWithAB; break; case 1: capturingImprovementEnabled = YES; break; case 2: capturingImprovementEnabled = NO; break; default: SCCAssertFail(@"Illegal option"); } }); return capturingImprovementEnabled; } @implementation SCContextAwareSnapCreationThrottleRequest { NSString *_requestID; } - (instancetype)init { if (self = [super init]) { _requestID = @"SCContextAwareSnapCreationThrottleRequest"; } return self; } - (BOOL)shouldThrottle:(SCApplicationContextState)context { return SCCATMSnapCreationEnabled() && context != SCApplicationContextStateCamera; } - (NSString *)requestID { return _requestID; } - (BOOL)isEqual:(id)object { return [[object requestID] isEqualToString:_requestID]; } @end ================================================ FILE: ContextAwareTaskManagement/Triggers/SCSnapCreationTriggers.h ================================================ // // SCSnapCreationTriggers.h // Snapchat // // Created by Cheng Jiang on 4/1/18. // #import @interface SCSnapCreationTriggers : NSObject - (void)markSnapCreationStart; - (void)markSnapCreationPreviewAnimationFinish; - (void)markSnapCreationPreviewImageSetupFinish; - (void)markSnapCreationPreviewVideoFirstFrameRenderFinish; - (void)markSnapCreationEndWithContext:(NSString *)context; @end ================================================ FILE: ContextAwareTaskManagement/Triggers/SCSnapCreationTriggers.m ================================================ // // SCSnapCreationTriggers.m // Snapchat // // Created by Cheng Jiang on 3/30/18. // #import "SCSnapCreationTriggers.h" #import "SCContextAwareSnapCreationThrottleRequest.h" #import #import #import #import @implementation SCSnapCreationTriggers { BOOL _snapCreationStarted; BOOL _previewAnimationFinished; BOOL _previewImageSetupFinished; BOOL _previewVideoFirstFrameRendered; } - (void)markSnapCreationStart { SC_GUARD_ELSE_RUN_AND_RETURN( !_snapCreationStarted, SCLogCoreCameraWarning(@"markSnapCreationStart skipped because previous SnapCreation session is not complete")); @synchronized(self) { _snapCreationStarted = YES; } [[SCContextAwareThrottleRequester shared] submitSuspendRequest:[SCContextAwareSnapCreationThrottleRequest new]]; } - (void)markSnapCreationPreviewAnimationFinish { @synchronized(self) { _previewAnimationFinished = YES; if (_previewImageSetupFinished || _previewVideoFirstFrameRendered) { [self markSnapCreationEndWithContext:@"markSnapCreationPreviewAnimationFinish"]; } } } - (void)markSnapCreationPreviewImageSetupFinish { @synchronized(self) { _previewImageSetupFinished = YES; if (_previewAnimationFinished) { [self markSnapCreationEndWithContext:@"markSnapCreationPreviewImageSetupFinish"]; } } } - (void)markSnapCreationPreviewVideoFirstFrameRenderFinish { @synchronized(self) { _previewVideoFirstFrameRendered = YES; if (_previewAnimationFinished) { [self markSnapCreationEndWithContext:@"markSnapCreationPreviewVideoFirstFrameRenderFinish"]; } } } - (void)markSnapCreationEndWithContext:(NSString *)context { SC_GUARD_ELSE_RETURN(_snapCreationStarted); SCLogCoreCameraInfo(@"markSnapCreationEnd triggered with context: %@", context); @synchronized(self) { _snapCreationStarted = NO; _previewAnimationFinished = NO; _previewImageSetupFinished = NO; _previewVideoFirstFrameRendered = NO; } [[SCContextAwareThrottleRequester shared] submitResumeRequest:[SCContextAwareSnapCreationThrottleRequest new]]; } @end ================================================ FILE: Features/Core/SCFeature.h ================================================ // // SCFeature.h // SCCamera // // Created by Kristian Bauer on 1/4/18. // #import /** * Top level protocol for UI features */ #define SCLogCameraFeatureInfo(fmt, ...) SCLogCoreCameraInfo(@"[SCFeature] " fmt, ##__VA_ARGS__) @protocol SCFeatureContainerView; @protocol SCFeature @optional - (void)configureWithView:(UIView *)view; - (void)forwardCameraTimerGesture:(UIGestureRecognizer *)gestureRecognizer; - (void)forwardCameraOverlayTapGesture:(UIGestureRecognizer *)gestureRecognizer; - (void)forwardLongPressGesture:(UIGestureRecognizer *)gestureRecognizer; - (void)forwardPinchGesture:(UIPinchGestureRecognizer *)gestureRecognizer; - (void)forwardPanGesture:(UIPanGestureRecognizer *)gestureRecognizer; - (BOOL)shouldBlockTouchAtPoint:(CGPoint)point; @end ================================================ FILE: Features/Core/SCFeatureContainerView.h ================================================ // // SCFeatureContainerView.h // SCCamera // // Created by Kristian Bauer on 4/17/18. // #import @protocol SCFeatureContainerView - (BOOL)isTapGestureRecognizer:(UIGestureRecognizer *)gestureRecognizer; - (CGRect)initialCameraTimerFrame; @end ================================================ FILE: Features/Core/SCFeatureCoordinator.h ================================================ // // SCFeatureCoordinator.h // SCCamera // // Created by Kristian Bauer on 1/4/18. // #import "SCFeature.h" #import @protocol SCFeatureProvider; @class SCCameraOverlayView; /** * Handles creation of SCFeatures and communication between owner and features. */ @interface SCFeatureCoordinator : NSObject SC_INIT_AND_NEW_UNAVAILABLE; - (instancetype)initWithFeatureContainerView:(SCCameraOverlayView *)containerView provider:(id)provider; /** * Asks provider for features with given featureTypes specified in initializer. */ - (void)reloadFeatures; /** * Eventually won't need this, but in order to use new framework w/ existing architecture, need a way to forward * gestures to individual features. */ - (void)forwardCameraTimerGesture:(UIGestureRecognizer *)gestureRecognizer; - (void)forwardCameraOverlayTapGesture:(UIGestureRecognizer *)gestureRecognizer; - (void)forwardLongPressGesture:(UIGestureRecognizer *)gestureRecognizer; - (void)forwardPinchGesture:(UIPinchGestureRecognizer *)recognizer; - (void)forwardPanGesture:(UIPanGestureRecognizer *)recognizer; /** * To prevent gestures on AVCameraViewController from triggering at the same time as feature controls, need to provide a * way for features to indicate that they will block a touch with given point. */ - (BOOL)shouldBlockTouchAtPoint:(CGPoint)point; @end ================================================ FILE: Features/Core/SCFeatureCoordinator.m ================================================ // // SCFeatureCoordinator.m // SCCamera // // Created by Kristian Bauer on 1/4/18. // #import "SCFeatureCoordinator.h" #import "SCFeature.h" #import "SCFeatureProvider.h" #import #import typedef NSString SCFeatureDictionaryKey; @interface SCFeatureCoordinator () @property (nonatomic, weak) UIView *containerView; @property (nonatomic, strong) id provider; @end @implementation SCFeatureCoordinator - (instancetype)initWithFeatureContainerView:(UIView *)containerView provider:(id)provider { SCTraceODPCompatibleStart(2); SCAssert(containerView, @"SCFeatureCoordinator containerView must be non-nil"); SCAssert(provider, @"SCFeatureCoordinator provider must be non-nil"); self = [super init]; if (self) { _containerView = containerView; _provider = provider; [self reloadFeatures]; } return self; } - (void)reloadFeatures { SCTraceODPCompatibleStart(2); [_provider resetInstances]; NSMutableArray *features = [NSMutableArray array]; for (id feature in _provider.supportedFeatures) { if ([feature respondsToSelector:@selector(configureWithView:)]) { [feature configureWithView:_containerView]; } if (feature) { [features addObject:feature]; } } } - (void)forwardCameraTimerGesture:(UIGestureRecognizer *)gestureRecognizer { SCTraceODPCompatibleStart(2); for (id feature in _provider.supportedFeatures) { if ([feature respondsToSelector:@selector(forwardCameraTimerGesture:)]) { [feature forwardCameraTimerGesture:gestureRecognizer]; } } } - (void)forwardCameraOverlayTapGesture:(UIGestureRecognizer *)gestureRecognizer { SCTraceODPCompatibleStart(2); for (id feature in _provider.supportedFeatures) { if ([feature respondsToSelector:@selector(forwardCameraOverlayTapGesture:)]) { [feature forwardCameraOverlayTapGesture:gestureRecognizer]; } } } - (void)forwardLongPressGesture:(UIGestureRecognizer *)gestureRecognizer { SCTraceODPCompatibleStart(2); for (id feature in _provider.supportedFeatures) { if ([feature respondsToSelector:@selector(forwardLongPressGesture:)]) { [feature forwardLongPressGesture:gestureRecognizer]; } } } - (void)forwardPinchGesture:(UIPinchGestureRecognizer *)gestureRecognizer { SCTraceODPCompatibleStart(2); for (id feature in _provider.supportedFeatures) { if ([feature respondsToSelector:@selector(forwardPinchGesture:)]) { [feature forwardPinchGesture:gestureRecognizer]; } } } - (void)forwardPanGesture:(UIPanGestureRecognizer *)gestureRecognizer { SCTraceODPCompatibleStart(2); for (id feature in _provider.supportedFeatures) { if ([feature respondsToSelector:@selector(forwardPanGesture:)]) { [feature forwardPanGesture:gestureRecognizer]; } } } - (BOOL)shouldBlockTouchAtPoint:(CGPoint)point { SCTraceODPCompatibleStart(2); for (id feature in _provider.supportedFeatures) { if ([feature respondsToSelector:@selector(shouldBlockTouchAtPoint:)] && [feature shouldBlockTouchAtPoint:point]) { return YES; } } return NO; } @end ================================================ FILE: Features/Core/SCFeatureProvider.h ================================================ // // SCFeatureProvider.h // SCCamera // // Created by Kristian Bauer on 1/4/18. // #import #import @class SCFeatureSettingsManager, SCCapturerToken, SCUserSession; @protocol SCFeature , SCCapturer, SCFeatureFlash, SCFeatureHandsFree, SCFeatureLensSideButton, SCFeatureLensButtonZ, SCFeatureMemories, SCFeatureNightMode, SCFeatureSnapKit, SCFeatureTapToFocusAndExposure, SCFeatureToggleCamera, SCFeatureShazam, SCFeatureImageCapture, SCFeatureScanning, SCFeatureZooming; /** * Provides single location for creating and configuring SCFeatures. */ @protocol SCFeatureProvider @property (nonatomic) AVCameraViewType cameraViewType; @property (nonatomic, readonly) id capturer; @property (nonatomic, strong, readwrite) SCCapturerToken *token; @property (nonatomic, readonly) SCUserSession *userSession; // TODO: We should not be reusing AVCameraViewController so eventually the // context should be removed. @property (nonatomic, readonly) AVCameraViewControllerContext context; @property (nonatomic) id handsFreeRecording; @property (nonatomic) id snapKit; @property (nonatomic) id tapToFocusAndExposure; @property (nonatomic) id memories; @property (nonatomic) id flash; @property (nonatomic) id lensSideButton; @property (nonatomic) id lensZButton; @property (nonatomic) id nightMode; @property (nonatomic) id toggleCamera; @property (nonatomic) id shazam; @property (nonatomic) id scanning; @property (nonatomic) id imageCapture; @property (nonatomic) id zooming; @property (nonatomic, readonly) NSArray> *supportedFeatures; - (void)resetInstances; @end ================================================ FILE: Features/Flash/SCFeatureFlash.h ================================================ // // SCFeatureFlash.h // SCCamera // // Created by Kristian Bauer on 3/27/18. // #import "SCFeature.h" @class SCNavigationBarButtonItem; /** * Public interface for interacting with camera flash feature. */ @protocol SCFeatureFlash @property (nonatomic, readonly) SCNavigationBarButtonItem *navigationBarButtonItem; - (void)interruptGestures; @end ================================================ FILE: Features/Flash/SCFeatureFlashImpl.h ================================================ // // SCFeatureFlashImpl.h // SCCamera // // Created by Kristian Bauer on 3/27/18. // #import "SCFeatureFlash.h" #import @class SCLogger; @protocol SCCapturer; /** * Interface for camera flash feature. Handles enabling/disabling of camera flash via SCCapturer and UI for displaying * flash button. * Should only expose initializer. All other vars and methods should be declared in SCFeatureFlash protocol. */ @interface SCFeatureFlashImpl : NSObject SC_INIT_AND_NEW_UNAVAILABLE - (instancetype)initWithCapturer:(id)capturer logger:(SCLogger *)logger NS_DESIGNATED_INITIALIZER; @end ================================================ FILE: Features/Flash/SCFeatureFlashImpl.m ================================================ // // SCFeatureFlashImpl.m // SCCamera // // Created by Kristian Bauer on 3/27/18. // #import "SCFeatureFlashImpl.h" #import "SCCapturer.h" #import "SCFlashButton.h" #import "SCManagedCapturerListener.h" #import "SCManagedCapturerState.h" #import #import #import #import #import static CGFloat const kSCFlashButtonInsets = -2.f; static CGRect const kSCFlashButtonFrame = {0, 0, 36, 44}; static NSString *const kSCFlashEventName = @"TOGGLE_CAMERA_FLASH_BUTTON"; static NSString *const kSCFlashEventParameterFlashName = @"flash_on"; static NSString *const kSCFlashEventParameterCameraName = @"front_facing_camera_on"; @interface SCFeatureFlashImpl () @property (nonatomic, strong, readwrite) id capturer; @property (nonatomic, strong, readwrite) SCLogger *logger; @property (nonatomic, strong, readwrite) SCFlashButton *flashButton; @property (nonatomic, weak, readwrite) UIView *containerView; @property (nonatomic, strong, readwrite) SCManagedCapturerState *managedCapturerState; @property (nonatomic, assign, readwrite) BOOL canEnable; @end @interface SCFeatureFlashImpl (SCManagedCapturerListener) @end @implementation SCFeatureFlashImpl @synthesize navigationBarButtonItem = _navigationBarButtonItem; - (instancetype)initWithCapturer:(id)capturer logger:(SCLogger *)logger { SCTraceODPCompatibleStart(2); self = [super init]; if (self) { _capturer = capturer; [_capturer addListener:self]; _logger = logger; } return self; } - (void)dealloc { SCTraceODPCompatibleStart(2); [_capturer removeListener:self]; } #pragma mark - SCFeature - (void)configureWithView:(UIView *)view { SCTraceODPCompatibleStart(2); _containerView = view; } - (BOOL)shouldBlockTouchAtPoint:(CGPoint)point { SCTraceODPCompatibleStart(2); SC_GUARD_ELSE_RETURN_VALUE(_flashButton.userInteractionEnabled && !_flashButton.hidden, NO); CGPoint convertedPoint = [_flashButton convertPoint:point fromView:_containerView]; return [_flashButton pointInside:convertedPoint withEvent:nil]; } #pragma mark - SCFeatureFlash - (void)interruptGestures { SCTraceODPCompatibleStart(2); [_flashButton interruptGestures]; } - (SCNavigationBarButtonItem *)navigationBarButtonItem { SCTraceODPCompatibleStart(2); SC_GUARD_ELSE_RETURN_VALUE(!_navigationBarButtonItem, _navigationBarButtonItem); _navigationBarButtonItem = [[SCNavigationBarButtonItem alloc] initWithCustomView:self.flashButton]; return _navigationBarButtonItem; } #pragma mark - Getters - (SCFlashButton *)flashButton { SCTraceODPCompatibleStart(2); SC_GUARD_ELSE_RETURN_VALUE(!_flashButton, _flashButton); _flashButton = [[SCFlashButton alloc] initWithFrame:kSCFlashButtonFrame]; _flashButton.layer.sublayerTransform = CATransform3DMakeTranslation(kSCFlashButtonInsets, 0, 0); _flashButton.buttonState = SCFlashButtonStateOff; _flashButton.maximumScale = 1.1111f; [_flashButton addTarget:self action:@selector(_flashTapped)]; _flashButton.accessibilityIdentifier = @"flash"; _flashButton.accessibilityLabel = SCLocalizedString(@"flash", 0); return _flashButton; } #pragma mark - Setters - (void)setCanEnable:(BOOL)canEnable { SCTraceODPCompatibleStart(2); SCLogCameraFeatureInfo(@"[%@] setCanEnable new: %@ old: %@", NSStringFromClass([self class]), canEnable ? @"YES" : @"NO", _canEnable ? @"YES" : @"NO"); self.flashButton.userInteractionEnabled = canEnable; } #pragma mark - Internal Helpers - (void)_flashTapped { SCTraceODPCompatibleStart(2); BOOL flashActive = !_managedCapturerState.flashActive; SCLogCameraFeatureInfo(@"[%@] _flashTapped flashActive new: %@ old: %@", NSStringFromClass([self class]), flashActive ? @"YES" : @"NO", !flashActive ? @"YES" : @"NO"); _containerView.userInteractionEnabled = NO; @weakify(self); [_capturer setFlashActive:flashActive completionHandler:^{ @strongify(self); SCLogCameraFeatureInfo(@"[%@] _flashTapped setFlashActive completion", NSStringFromClass([self class])); self.containerView.userInteractionEnabled = YES; } context:SCCapturerContext]; NSDictionary *loggingParameters = @{ kSCFlashEventParameterFlashName : @(flashActive), kSCFlashEventParameterCameraName : @(_managedCapturerState.devicePosition == SCManagedCaptureDevicePositionFront) }; [_logger logEvent:kSCFlashEventName parameters:loggingParameters]; } - (BOOL)_shouldHideForState:(SCManagedCapturerState *)state { SCTraceODPCompatibleStart(2); return (!state.flashSupported && !state.torchSupported && state.devicePosition != SCManagedCaptureDevicePositionFront) || state.arSessionActive; } @end @implementation SCFeatureFlashImpl (SCManagedCapturerListener) - (void)managedCapturer:(id)managedCapturer didChangeFlashActive:(SCManagedCapturerState *)state { SCTraceODPCompatibleStart(2); SCLogCameraFeatureInfo(@"[%@] didChangeFlashActive flashActive: %@", NSStringFromClass([self class]), state.flashActive ? @"YES" : @"NO"); self.flashButton.buttonState = state.flashActive ? SCFlashButtonStateOn : SCFlashButtonStateOff; } - (void)managedCapturer:(id)managedCapturer didChangeFlashSupportedAndTorchSupported:(SCManagedCapturerState *)state { SCTraceODPCompatibleStart(2); SCLogCameraFeatureInfo( @"[%@] didChangeFlashSupportedAndTorchSupported flashSupported: %@ torchSupported: %@ devicePosition: %@", NSStringFromClass([self class]), state.flashSupported ? @"YES" : @"NO", state.torchSupported ? @"YES" : @"NO", state.devicePosition == SCManagedCaptureDevicePositionFront ? @"front" : @"back"); self.flashButton.hidden = [self _shouldHideForState:state]; } - (void)managedCapturer:(id)managedCapturer didChangeState:(SCManagedCapturerState *)state { SCTraceODPCompatibleStart(2); _managedCapturerState = [state copy]; } - (void)managedCapturer:(id)managedCapturer didChangeARSessionActive:(SCManagedCapturerState *)state { SCTraceODPCompatibleStart(2); SCLogCameraFeatureInfo(@"[%@] didChangeARSessionActive: %@", NSStringFromClass([self class]), state.arSessionActive ? @"YES" : @"NO"); self.flashButton.hidden = [self _shouldHideForState:state]; } - (void)managedCapturer:(id)managedCapturer didBeginVideoRecording:(SCManagedCapturerState *)state session:(SCVideoCaptureSessionInfo)session { SCTraceODPCompatibleStart(2); self.canEnable = NO; } - (void)managedCapturer:(id)managedCapturer didFinishRecording:(SCManagedCapturerState *)state session:(SCVideoCaptureSessionInfo)session recordedVideo:(SCManagedRecordedVideo *)recordedVideo { SCTraceODPCompatibleStart(2); self.canEnable = YES; } - (void)managedCapturer:(id)managedCapturer didFailRecording:(SCManagedCapturerState *)state session:(SCVideoCaptureSessionInfo)session error:(NSError *)error { SCTraceODPCompatibleStart(2); self.canEnable = YES; } - (void)managedCapturer:(id)managedCapturer didCancelRecording:(SCManagedCapturerState *)state session:(SCVideoCaptureSessionInfo)session { SCTraceODPCompatibleStart(2); self.canEnable = YES; } @end ================================================ FILE: Features/Flash/SCFlashButton.h ================================================ // // SCFlashButton.h // SCCamera // // Created by Will Wu on 2/13/14. // Copyright (c) 2014 Snapchat, Inc. All rights reserved. // #import typedef NS_ENUM(NSInteger, SCFlashButtonState) { SCFlashButtonStateOn = 0, SCFlashButtonStateOff = 1 }; @interface SCFlashButton : SCGrowingButton @property (nonatomic, assign) SCFlashButtonState buttonState; @end ================================================ FILE: Features/Flash/SCFlashButton.m ================================================ // // SCFlashButton.m // SCCamera // // Created by Will Wu on 2/13/14. // Copyright (c) 2014 Snapchat, Inc. All rights reserved. // #import "SCFlashButton.h" #import @implementation SCFlashButton - (void)setButtonState:(SCFlashButtonState)buttonState { // Don't reset flash button state if it doesn't change. if (_buttonState == buttonState) { return; } _buttonState = buttonState; if (buttonState == SCFlashButtonStateOn) { self.image = [UIImage imageNamed:@"camera_flash_on_v10"]; self.accessibilityValue = @"on"; } else { self.image = [UIImage imageNamed:@"camera_flash_off_v10"]; self.accessibilityValue = @"off"; } self.imageInset = SCRoundSizeToPixels(CGSizeMake((CGRectGetWidth(self.bounds) - self.image.size.width) / 2, (CGRectGetHeight(self.bounds) - self.image.size.height) / 2)); } @end ================================================ FILE: Features/HandsFree/SCFeatureHandsFree.h ================================================ // // SCFeatureHandsFree.h // SCCamera // // Created by Kristian Bauer on 2/26/18. // #import "SCFeature.h" #import @class SCLongPressGestureRecognizer, SCPreviewPresenter; @protocol SCFeatureHandsFree @property (nonatomic, weak) SCPreviewPresenter *previewPresenter; @property (nonatomic, strong, readonly) SCLongPressGestureRecognizer *longPressGestureRecognizer; /** * Whether the feature is enabled or not. */ @property (nonatomic) BOOL enabled; - (void)setupRecordLifecycleEventsWithMethod:(SCCameraRecordingMethod)method; - (BOOL)shouldDisplayMultiSnapTooltip; /** * Block called when user cancels hands-free recording via X button. */ - (void)setCancelBlock:(dispatch_block_t)cancelBlock; @end ================================================ FILE: Features/ImageCapture/SCFeatureImageCapture.h ================================================ // // SCFeatureImageCapture.h // SCCamera // // Created by Kristian Bauer on 4/18/18. // #import "SCFeature.h" #import @protocol SCFeatureImageCapture; @protocol SCFeatureImageCaptureDelegate - (void)featureImageCapture:(id)featureImageCapture willCompleteWithImage:(UIImage *)image; - (void)featureImageCapture:(id)featureImageCapture didCompleteWithError:(NSError *)error; - (void)featureImageCapturedDidComplete:(id)featureImageCapture; @end /** SCFeature protocol for capturing an image. */ @protocol SCFeatureImageCapture @property (nonatomic, weak, readwrite) id delegate; @property (nonatomic, strong, readonly) SCPromise *imagePromise; - (void)captureImage:(NSString *)captureSessionID; @end ================================================ FILE: Features/ImageCapture/SCFeatureImageCaptureImpl.h ================================================ // // SCFeatureImageCaptureImpl.h // SCCamera // // Created by Kristian Bauer on 4/18/18. // #import "AVCameraViewEnums.h" #import "SCFeatureImageCapture.h" #import @protocol SCCapturer; @class SCLogger; @interface SCFeatureImageCaptureImpl : NSObject SC_INIT_AND_NEW_UNAVAILABLE - (instancetype)initWithCapturer:(id)capturer logger:(SCLogger *)logger cameraViewType:(AVCameraViewType)cameraViewType NS_DESIGNATED_INITIALIZER; @end ================================================ FILE: Features/ImageCapture/SCFeatureImageCaptureImpl.m ================================================ // // SCFeatureImageCaptureImpl.m // SCCamera // // Created by Kristian Bauer on 4/18/18. // #import "SCFeatureImageCaptureImpl.h" #import "SCLogger+Camera.h" #import "SCManagedCapturePreviewLayerController.h" #import "SCManagedCapturerLensAPI.h" #import "SCManagedCapturerListener.h" #import "SCManagedCapturerUtils.h" #import "SCManagedStillImageCapturer.h" #import #import #import #import #import #import #import @interface SCFeatureImageCaptureImpl () @property (nonatomic, strong, readwrite) id capturer; @property (nonatomic, strong, readwrite) SCLogger *logger; @property (nonatomic, assign) AVCameraViewType cameraViewType; @property (nonatomic, strong, readwrite) SCManagedCapturerState *managedCapturerState; /** * Whether user has attempted image capture in current session. Reset on foreground of app. */ @property (nonatomic, assign) BOOL hasTriedCapturing; @end @interface SCFeatureImageCaptureImpl (SCManagedCapturerListener) @end @implementation SCFeatureImageCaptureImpl @synthesize delegate = _delegate; @synthesize imagePromise = _imagePromise; - (instancetype)initWithCapturer:(id)capturer logger:(SCLogger *)logger cameraViewType:(AVCameraViewType)cameraViewType { SCTraceODPCompatibleStart(2); self = [super init]; if (self) { _capturer = capturer; [_capturer addListener:self]; _logger = logger; _cameraViewType = cameraViewType; [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(_viewWillEnterForeground) name:UIApplicationWillEnterForegroundNotification object:nil]; } return self; } - (void)dealloc { [_capturer removeListener:self]; } #pragma mark - SCFeatureImageCapture - (void)captureImage:(NSString *)captureSessionID { SCTraceODPCompatibleStart(2); [_logger logTimedEventStart:kSCCameraMetricsRecordingDelay uniqueId:@"IMAGE" isUniqueEvent:NO]; BOOL asyncCaptureEnabled = [self _asynchronousCaptureEnabled:_managedCapturerState]; SCLogCameraFeatureInfo(@"[%@] takeImage begin async: %@", NSStringFromClass([self class]), asyncCaptureEnabled ? @"YES" : @"NO"); if (asyncCaptureEnabled) { SCQueuePerformer *performer = [[SCQueuePerformer alloc] initWithLabel:"com.snapchat.image-capture-promise" qualityOfService:QOS_CLASS_USER_INTERACTIVE queueType:DISPATCH_QUEUE_SERIAL context:SCQueuePerformerContextCoreCamera]; _imagePromise = [[SCPromise alloc] initWithPerformer:performer]; } @weakify(self); [_capturer captureStillImageAsynchronouslyWithAspectRatio:SCManagedCapturedImageAndVideoAspectRatio() captureSessionID:captureSessionID completionHandler:^(UIImage *fullScreenImage, NSDictionary *metadata, NSError *error, SCManagedCapturerState *state) { @strongify(self); SC_GUARD_ELSE_RETURN(self); [self _takeImageCallback:fullScreenImage metadata:metadata error:error state:state]; } context:SCCapturerContext]; [_logger logCameraCaptureFinishedWithDuration:0]; } #pragma mark - Private - (void)_viewWillEnterForeground { SCTraceODPCompatibleStart(2); _hasTriedCapturing = NO; } - (void)_takeImageCallback:(UIImage *)image metadata:(NSDictionary *)metadata error:(NSError *)error state:(SCManagedCapturerState *)state { SCTraceODPCompatibleStart(2); [self _logCaptureComplete:state]; if (image) { [_delegate featureImageCapture:self willCompleteWithImage:image]; if (_imagePromise) { [_imagePromise completeWithValue:image]; } } else { if (_imagePromise) { [_imagePromise completeWithError:[NSError errorWithDomain:@"" code:-1 userInfo:nil]]; } [_delegate featureImageCapture:self didCompleteWithError:error]; } _imagePromise = nil; [_delegate featureImageCapturedDidComplete:self]; } - (BOOL)_asynchronousCaptureEnabled:(SCManagedCapturerState *)state { SCTraceODPCompatibleStart(2); BOOL shouldCaptureImageFromVideoBuffer = [SCDeviceName isSimilarToIphone5orNewer] && ![SCDeviceName isSimilarToIphone6orNewer]; // Fast image capture is disabled in following cases // (1) flash is on; // (2) lenses are active; // (3) SCPhotoCapturer is not supported; // (4) not main camera for iPhoneX; return !state.flashActive && !state.lensesActive && !_capturer.lensProcessingCore.appliedLens && (SCPhotoCapturerIsEnabled() || shouldCaptureImageFromVideoBuffer) && (![SCDeviceName isIphoneX] || (_cameraViewType == AVCameraViewNoReply)); } - (void)_logCaptureComplete:(SCManagedCapturerState *)state { SCTraceODPCompatibleStart(2); NSDictionary *params = @{ @"type" : @"image", @"lenses_active" : @(state.lensesActive), @"is_back_camera" : @(state.devicePosition != SCManagedCaptureDevicePositionFront), @"is_main_camera" : @(_cameraViewType == AVCameraViewNoReply), @"is_first_attempt_after_app_startup" : @(!_hasTriedCapturing), @"app_startup_type" : SCLaunchType(), @"app_startup_time" : @(SCAppStartupTimeMicros() / 1000.0), @"time_elapse_after_app_startup" : @(SCTimeElapseAfterAppStartupMicros() / 1000.0), }; [_logger logTimedEventEnd:kSCCameraMetricsRecordingDelay uniqueId:@"IMAGE" parameters:params]; _hasTriedCapturing = YES; } @end @implementation SCFeatureImageCaptureImpl (SCManagedCapturerListener) - (void)managedCapturer:(id)managedCapturer didChangeState:(SCManagedCapturerState *)state { SCTraceODPCompatibleStart(2); _managedCapturerState = [state copy]; } - (void)managedCapturer:(id)managedCapturer didCapturePhoto:(SCManagedCapturerState *)state { SCTraceODPCompatibleStart(2); if (_imagePromise) { [[SCManagedCapturePreviewLayerController sharedInstance] pause]; } } @end ================================================ FILE: Features/NightMode/SCFeatureNightMode.h ================================================ // // SCFeatureNightMode.h // SCCamera // // Created by Kristian Bauer on 4/9/18. // #import "SCFeature.h" @class SCNavigationBarButtonItem, SCPreviewPresenter; /** * Public interface for interacting with camera night mode feature. * User spec: https://snapchat.quip.com/w4h4ArzcmXCS */ @protocol SCFeatureNightMode @property (nonatomic, weak, readwrite) SCPreviewPresenter *previewPresenter; @property (nonatomic, readonly) SCNavigationBarButtonItem *navigationBarButtonItem; - (void)interruptGestures; - (void)hideWithDelayIfNeeded; @end ================================================ FILE: Features/NightMode/SCNightModeButton.h ================================================ // // SCNightModeButton.h // SCCamera // // Created by Liu Liu on 3/19/15. // Copyright (c) 2015 Snapchat, Inc. All rights reserved. // #import #import @interface SCNightModeButton : SCGrowingButton @property (nonatomic, assign, getter=isSelected) BOOL selected; SC_INIT_AND_NEW_UNAVAILABLE - (void)show; - (void)hideWithDelay:(BOOL)delay; - (BOOL)willHideAfterDelay; @end ================================================ FILE: Features/NightMode/SCNightModeButton.m ================================================ // // SCNightModeButton.m // SCCamera // // Created by Liu Liu on 3/19/15. // Copyright (c) 2015 Snapchat, Inc. All rights reserved. // #import "SCNightModeButton.h" #import static NSTimeInterval const kSCNightModeButtonHiddenDelay = 2.5; @implementation SCNightModeButton { dispatch_block_t _delayedHideBlock; } - (instancetype)initWithFrame:(CGRect)frame { self = [super initWithFrame:frame]; if (self) { self.image = [UIImage imageNamed:@"camera_nightmode_off_v10"]; self.imageInset = CGSizeMake((CGRectGetWidth(self.bounds) - self.image.size.width) / 2, (CGRectGetHeight(self.bounds) - self.image.size.height) / 2); } return self; } - (void)setSelected:(BOOL)selected { SC_GUARD_ELSE_RETURN(_selected != selected); if (selected) { [self _cancelDelayedHideAnimation]; self.image = [UIImage imageNamed:@"camera_nightmode_on_v10"]; } else { self.image = [UIImage imageNamed:@"camera_nightmode_off_v10"]; } self.imageInset = CGSizeMake((CGRectGetWidth(self.bounds) - self.image.size.width) / 2, (CGRectGetHeight(self.bounds) - self.image.size.height) / 2); _selected = selected; } - (void)show { SC_GUARD_ELSE_RETURN(self.hidden); SCAssertMainThread(); [self _cancelDelayedHideAnimation]; self.hidden = NO; [self animate]; } - (void)hideWithDelay:(BOOL)delay { SC_GUARD_ELSE_RETURN(!self.hidden); SCAssertMainThread(); [self _cancelDelayedHideAnimation]; if (delay) { @weakify(self); _delayedHideBlock = dispatch_block_create(0, ^{ @strongify(self); SC_GUARD_ELSE_RETURN(self); [UIView animateWithDuration:0.3 animations:^{ self.alpha = 0; } completion:^(BOOL finished) { self.alpha = 1; self.hidden = YES; _delayedHideBlock = nil; }]; }); dispatch_time_t delayTime = dispatch_time(DISPATCH_TIME_NOW, (int64_t)(kSCNightModeButtonHiddenDelay * NSEC_PER_SEC)); dispatch_after(delayTime, dispatch_get_main_queue(), _delayedHideBlock); } else { self.hidden = YES; } } - (BOOL)willHideAfterDelay { return _delayedHideBlock != nil; } #pragma mark - Private - (void)_cancelDelayedHideAnimation { SC_GUARD_ELSE_RETURN(_delayedHideBlock); dispatch_cancel(_delayedHideBlock); _delayedHideBlock = nil; } @end ================================================ FILE: Features/Scanning/SCFeatureScanning.h ================================================ // // SCFeatureScanning.h // Snapchat // // Created by Xiaokang Liu on 2018/4/19. // #import "SCFeature.h" @protocol SCFeatureScanning; @protocol SCFeatureScanningDelegate - (void)featureScanning:(id)featureScanning didFinishWithResult:(NSObject *)resultObject; @end /** This SCFeature allows the user to long press on the screen to scan a snapcode. */ @protocol SCFeatureScanning @property (nonatomic, weak) id delegate; @property (nonatomic, assign) NSTimeInterval lastSuccessfulScanTime; - (void)startScanning; - (void)stopScanning; - (void)stopSearch; @end ================================================ FILE: Features/Shazam/SCFeatureShazam.h ================================================ // // SCFeatureShazam.h // SCCamera // // Created by Xiaokang Liu on 2018/4/18. // #import "SCFeature.h" @class SCLens; @protocol SCFeatureShazam; @protocol SCFeatureShazamDelegate - (void)featureShazam:(id)featureShazam didFinishWithResult:(NSObject *)result; - (void)featureShazamDidSubmitSearchRequest:(id)featureShazam; - (SCLens *)filterLensForFeatureShazam:(id)featureShazam; @end @protocol SCFeatureShazam @property (nonatomic, weak) id delegate; - (void)stopAudioRecordingAsynchronously; - (void)resetInfo; @end ================================================ FILE: Features/SnapKit/SCFeatureSnapKit.h ================================================ // // SCFeatureSnapKit.h // SCCamera // // Created by Michel Loenngren on 3/19/18. // #import "SCFeature.h" @class SCCameraDeepLinkMetadata; @protocol SCFeatureSnapKit - (void)setDeepLinkMetadata:(SCCameraDeepLinkMetadata *)metadata; @end ================================================ FILE: Features/TapToFocus/SCFeatureTapToFocusAndExposure.h ================================================ // // SCFeatureTapToFocusAndExposure.h // SCCamera // // Created by Michel Loenngren on 4/5/18. // #import "SCFeature.h" /** This SCFeature allows the user to tap on the screen to adjust focus and exposure. */ @protocol SCFeatureTapToFocusAndExposure - (void)reset; @end ================================================ FILE: Features/TapToFocus/SCFeatureTapToFocusAndExposureImpl.h ================================================ // // SCFeatureTapToFocusAndExposureImpl.h // SCCamera // // Created by Michel Loenngren on 4/5/18. // #import "SCFeatureTapToFocusAndExposure.h" #import #import @protocol SCCapturer; /** Protocol describing unique camera commands to run when the user taps on screen. These could be focus, exposure or tap to portrait mode. */ @protocol SCFeatureCameraTapCommand - (void)execute:(CGPoint)pointOfInterest capturer:(id)capturer; @end /** This is the default implementation of SCFeatureTapToFocusAndExposure allowing the user to tap on the camera overlay view in order to adjust focus and exposure. */ @interface SCFeatureTapToFocusAndExposureImpl : NSObject SC_INIT_AND_NEW_UNAVAILABLE - (instancetype)initWithCapturer:(id)capturer commands:(NSArray> *)commands; @end /** Adjust focus on tap. */ @interface SCFeatureCameraFocusTapCommand : NSObject @end /** Adjust exposure on tap. */ @interface SCFeatureCameraExposureTapCommand : NSObject @end /** Adjust portrait mode point of interest on tap. */ @interface SCFeatureCameraPortraitTapCommand : NSObject @end ================================================ FILE: Features/TapToFocus/SCFeatureTapToFocusAndExposureImpl.m ================================================ // // SCFeatureTapToFocusImpl.m // SCCamera // // Created by Michel Loenngren on 4/5/18. // #import "SCFeatureTapToFocusAndExposureImpl.h" #import "SCCameraTweaks.h" #import "SCCapturer.h" #import "SCFeatureContainerView.h" #import "SCTapAnimationView.h" #import #import @interface SCFeatureTapToFocusAndExposureImpl () @property (nonatomic, weak) id capturer; @property (nonatomic, weak) UIView *containerView; @property (nonatomic) BOOL userTappedToFocusAndExposure; @property (nonatomic) NSArray> *commands; @end @implementation SCFeatureTapToFocusAndExposureImpl - (instancetype)initWithCapturer:(id)capturer commands:(NSArray> *)commands { if (self = [super init]) { _capturer = capturer; _commands = commands; } return self; } - (void)reset { SC_GUARD_ELSE_RETURN(_userTappedToFocusAndExposure); _userTappedToFocusAndExposure = NO; [_capturer continuousAutofocusAndExposureAsynchronouslyWithCompletionHandler:nil context:SCCapturerContext]; } #pragma mark - SCFeature - (void)configureWithView:(UIView *)view { SCTraceODPCompatibleStart(2); _containerView = view; } - (void)forwardCameraOverlayTapGesture:(UIGestureRecognizer *)gestureRecognizer { SCTraceODPCompatibleStart(2); CGPoint point = [gestureRecognizer locationInView:gestureRecognizer.view]; @weakify(self); [_capturer convertViewCoordinates:[gestureRecognizer locationInView:_containerView] completionHandler:^(CGPoint pointOfInterest) { @strongify(self); SC_GUARD_ELSE_RETURN(self); SCLogCameraFeatureInfo(@"Tapped to focus: %@", NSStringFromCGPoint(pointOfInterest)); [self _applyTapCommands:pointOfInterest]; [self _showTapAnimationAtPoint:point forGesture:gestureRecognizer]; } context:SCCapturerContext]; } #pragma mark - Private helpers - (void)_applyTapCommands:(CGPoint)pointOfInterest { SCTraceODPCompatibleStart(2); for (id command in _commands) { [command execute:pointOfInterest capturer:_capturer]; } self.userTappedToFocusAndExposure = YES; } - (void)_showTapAnimationAtPoint:(CGPoint)point forGesture:(UIGestureRecognizer *)gestureRecognizer { SCTraceODPCompatibleStart(2); SC_GUARD_ELSE_RETURN([self.containerView isTapGestureRecognizer:gestureRecognizer]) SCTapAnimationView *tapAnimationView = [SCTapAnimationView tapAnimationView]; [_containerView addSubview:tapAnimationView]; tapAnimationView.center = point; [tapAnimationView showWithCompletion:^(SCTapAnimationView *view) { [view removeFromSuperview]; }]; } @end @implementation SCFeatureCameraFocusTapCommand - (void)execute:(CGPoint)pointOfInterest capturer:(id)capturer { [capturer setAutofocusPointOfInterestAsynchronously:pointOfInterest completionHandler:nil context:SCCapturerContext]; } @end @implementation SCFeatureCameraExposureTapCommand - (void)execute:(CGPoint)pointOfInterest capturer:(id)capturer { [capturer setExposurePointOfInterestAsynchronously:pointOfInterest fromUser:YES completionHandler:nil context:SCCapturerContext]; } @end @implementation SCFeatureCameraPortraitTapCommand - (void)execute:(CGPoint)pointOfInterest capturer:(id)capturer { [capturer setPortraitModePointOfInterestAsynchronously:pointOfInterest completionHandler:nil context:SCCapturerContext]; } @end ================================================ FILE: Features/TapToFocus/SCTapAnimationView.h ================================================ // // SCTapAnimationView.h // SCCamera // // Created by Alexander Grytsiuk on 8/26/15. // Copyright (c) 2015 Snapchat, Inc. All rights reserved. // #import @class SCTapAnimationView; typedef void (^SCTapAnimationViewCompletion)(SCTapAnimationView *); @interface SCTapAnimationView : UIView + (instancetype)tapAnimationView; - (void)showWithCompletion:(SCTapAnimationViewCompletion)completion; @end ================================================ FILE: Features/TapToFocus/SCTapAnimationView.m ================================================ // // SCTapAnimationView.m // SCCamera // // Created by Alexander Grytsiuk on 8/26/15. // Copyright (c) 2015 Snapchat, Inc. All rights reserved. // #import "SCTapAnimationView.h" #import @import QuartzCore; static const CGFloat kSCAnimationStep = 0.167; static const CGFloat kSCInnerCirclePadding = 2.5; static const CGFloat kSCTapAnimationViewWidth = 55; static const CGFloat kSCOuterRingBorderWidth = 1; static NSString *const kSCOpacityAnimationKey = @"opacity"; static NSString *const kSCScaleAnimationKey = @"scale"; @implementation SCTapAnimationView { CALayer *_outerRing; CALayer *_innerCircle; } #pragma mark Class Methods + (instancetype)tapAnimationView { return [[self alloc] initWithFrame:CGRectMake(0, 0, kSCTapAnimationViewWidth, kSCTapAnimationViewWidth)]; } #pragma mark Life Cycle - (instancetype)initWithFrame:(CGRect)frame { self = [super initWithFrame:frame]; if (self) { self.userInteractionEnabled = NO; _outerRing = [CALayer layer]; _outerRing.backgroundColor = [UIColor clearColor].CGColor; _outerRing.borderColor = [UIColor whiteColor].CGColor; _outerRing.borderWidth = kSCOuterRingBorderWidth; _outerRing.shadowColor = [UIColor blackColor].CGColor; _outerRing.shadowOpacity = 0.4; _outerRing.shadowOffset = CGSizeMake(0.5, 0.5); _outerRing.opacity = 0.0; _outerRing.frame = self.bounds; _outerRing.cornerRadius = CGRectGetMidX(_outerRing.bounds); [self.layer addSublayer:_outerRing]; _innerCircle = [CALayer layer]; _innerCircle.backgroundColor = [UIColor whiteColor].CGColor; _innerCircle.opacity = 0.0; _innerCircle.frame = CGRectInset(self.bounds, kSCInnerCirclePadding, kSCInnerCirclePadding); _innerCircle.cornerRadius = CGRectGetMidX(_innerCircle.bounds); [self.layer addSublayer:_innerCircle]; } return self; } #pragma mark Public - (void)showWithCompletion:(SCTapAnimationViewCompletion)completion { [_outerRing removeAllAnimations]; [_innerCircle removeAllAnimations]; [CATransaction begin]; [CATransaction setCompletionBlock:^{ if (completion) { completion(self); } }]; [self addOuterRingOpacityAnimation]; [self addOuterRingScaleAnimation]; [self addInnerCircleOpacityAnimation]; [self addInnerCircleScaleAnimation]; [CATransaction commit]; } #pragma mark Private - (CAKeyframeAnimation *)keyFrameAnimationWithKeyPath:(NSString *)keyPath duration:(CGFloat)duration values:(NSArray *)values keyTimes:(NSArray *)keyTimes timingFunctions:(NSArray *)timingFunctions { CAKeyframeAnimation *keyframeAnimation = [CAKeyframeAnimation animationWithKeyPath:keyPath]; keyframeAnimation.duration = duration; keyframeAnimation.values = values; keyframeAnimation.keyTimes = keyTimes; keyframeAnimation.timingFunctions = timingFunctions; keyframeAnimation.fillMode = kCAFillModeForwards; keyframeAnimation.removedOnCompletion = NO; return keyframeAnimation; } - (CABasicAnimation *)animationWithKeyPath:(NSString *)keyPath duration:(CGFloat)duration fromValue:(NSValue *)fromValue toValue:(NSValue *)toValue timingFunction:(CAMediaTimingFunction *)timingFunction { CABasicAnimation *animation = [CABasicAnimation animationWithKeyPath:keyPath]; animation.duration = duration; animation.fromValue = fromValue; animation.toValue = toValue; animation.timingFunction = timingFunction; animation.fillMode = kCAFillModeForwards; animation.removedOnCompletion = NO; return animation; } - (void)addOuterRingOpacityAnimation { CAKeyframeAnimation *animation = [self keyFrameAnimationWithKeyPath:@keypath(_outerRing, opacity) duration:kSCAnimationStep * 5 values:@[ @0.0, @1.0, @1.0, @0.0 ] keyTimes:@[ @0.0, @0.2, @0.8, @1.0 ] timingFunctions:@[ [CAMediaTimingFunction functionWithControlPoints:0.0:0.0:0.0:1.0], [CAMediaTimingFunction functionWithName:kCAMediaTimingFunctionLinear], [CAMediaTimingFunction functionWithName:kCAMediaTimingFunctionEaseInEaseOut], ]]; [_outerRing addAnimation:animation forKey:kSCOpacityAnimationKey]; } - (void)addOuterRingScaleAnimation { CAKeyframeAnimation *animation = [self keyFrameAnimationWithKeyPath:@keypath(_innerCircle, transform) duration:kSCAnimationStep * 3 values:@[ [NSValue valueWithCATransform3D:CATransform3DMakeScale(0.50, 0.50, 1.0)], [NSValue valueWithCATransform3D:CATransform3DIdentity], [NSValue valueWithCATransform3D:CATransform3DMakeScale(0.83, 0.83, 1.0)], ] keyTimes:@[ @0.0, @0.66, @1.0 ] timingFunctions:@[ [CAMediaTimingFunction functionWithControlPoints:0.0:0.0:0.0:1.0], [CAMediaTimingFunction functionWithName:kCAMediaTimingFunctionEaseInEaseOut], ]]; [_outerRing addAnimation:animation forKey:kSCScaleAnimationKey]; } - (void)addInnerCircleOpacityAnimation { CAKeyframeAnimation *animation = [self keyFrameAnimationWithKeyPath:@keypath(_innerCircle, opacity) duration:kSCAnimationStep * 3 values:@[ @0.0, @0.40, @0.0 ] keyTimes:@[ @0.0, @0.33, @1.0 ] timingFunctions:@[ [CAMediaTimingFunction functionWithName:kCAMediaTimingFunctionEaseIn], [CAMediaTimingFunction functionWithName:kCAMediaTimingFunctionEaseInEaseOut], ]]; [_innerCircle addAnimation:animation forKey:kSCOpacityAnimationKey]; } - (void)addInnerCircleScaleAnimation { CABasicAnimation *animation = [self animationWithKeyPath:@keypath(_innerCircle, transform) duration:kSCAnimationStep * 2 fromValue:[NSValue valueWithCATransform3D:CATransform3DMakeScale(0.0, 0.0, 1.0)] toValue:[NSValue valueWithCATransform3D:CATransform3DIdentity] timingFunction:[CAMediaTimingFunction functionWithControlPoints:0.0:0.0:0.0:1.0]]; [_innerCircle addAnimation:animation forKey:kSCScaleAnimationKey]; } @end ================================================ FILE: Features/ToggleCamera/SCFeatureToggleCamera.h ================================================ // // SCFeatureToggleCamera.h // SCCamera // // Created by Michel Loenngren on 4/17/18. // #import #import @protocol SCCapturer , SCFeatureToggleCamera, SCLensCameraScreenDataProviderProtocol; @protocol SCFeatureToggleCameraDelegate - (void)featureToggleCamera:(id)feature willToggleToDevicePosition:(SCManagedCaptureDevicePosition)devicePosition; - (void)featureToggleCamera:(id)feature didToggleToDevicePosition:(SCManagedCaptureDevicePosition)devicePosition; @end /** SCFeature protocol for toggling the camera. */ @protocol SCFeatureToggleCamera @property (nonatomic, weak) id delegate; - (void)toggleCameraWithRecording:(BOOL)isRecording takingPicture:(BOOL)isTakingPicture lensDataProvider:(id)lensDataProvider completion:(void (^)(BOOL success))completion; - (void)reset; @end ================================================ FILE: Features/Zooming/SCFeatureZooming.h ================================================ // // SCFeatureZooming.h // SCCamera // // Created by Xiaokang Liu on 2018/4/19. // #import "SCFeature.h" #import #import @class SCPreviewPresenter; @protocol SCFeatureZooming; @protocol SCFeatureZoomingDelegate - (void)featureZoomingForceTouchedWhileRecording:(id)featureZooming; - (BOOL)featureZoomingIsInitiatedRecording:(id)featureZooming; @end @protocol SCFeatureZooming @property (nonatomic, weak) id delegate; @property (nonatomic, weak) SCPreviewPresenter *previewPresenter; - (void)resetOffset; - (void)resetScale; - (void)cancelPreview; - (void)flipOffset; - (void)resetBeginningScale; - (void)toggleCameraForReset:(SCManagedCaptureDevicePosition)devicePosition; - (void)recordCurrentZoomStateForReset; @end ================================================ FILE: Lens/SCManagedCapturerARImageCaptureProvider.h ================================================ // // SCManagedCapturerARImageCaptureProvider.h // SCCamera // // Created by Michel Loenngren on 4/11/18. // #import @class SCManagedStillImageCapturer; @protocol SCManagedCapturerLensAPI , SCPerforming; /** Bridging protocol providing the ARImageCapturer subclass of SCManagedStillImageCapturer to capture core. */ @protocol SCManagedCapturerARImageCaptureProvider - (SCManagedStillImageCapturer *)arImageCapturerWith:(id)performer lensProcessingCore:(id)lensProcessingCore; @end ================================================ FILE: Lens/SCManagedCapturerGLViewManagerAPI.h ================================================ // // SCManagedCapturerGLViewManagerAPI.h // SCCamera // // Created by Michel Loenngren on 4/11/18. // #import #import @class SCCaptureResource; /** Bridging protocol for providing a glViewManager to capture core. */ @protocol SCManagedCapturerGLViewManagerAPI @property (nonatomic, readonly, strong) LSAGLView *view; - (void)configureWithCaptureResource:(SCCaptureResource *)captureResource; - (void)setLensesActive:(BOOL)active; - (void)prepareViewIfNecessary; @end ================================================ FILE: Lens/SCManagedCapturerLSAComponentTrackerAPI.h ================================================ // // SCManagedCapturerLSAComponentTrackerAPI.h // SCCamera // // Created by Michel Loenngren on 4/11/18. // #import @class SCCaptureResource; /** SCCamera protocol providing LSA tracking logic. */ @protocol SCManagedCapturerLSAComponentTrackerAPI - (void)configureWithCaptureResource:(SCCaptureResource *)captureResource; @end ================================================ FILE: Lens/SCManagedCapturerLensAPI.h ================================================ // // SCManagedCapturerLensAPI.h // SCCamera // // Created by Michel Loenngren on 4/11/18. // #import "SCManagedCapturerListener.h" #import "SCManagedVideoARDataSource.h" #import #import #import @protocol SCManagedAudioDataSourceListener , SCManagedVideoARDataSource; @class LSAComponentManager; /** Encapsulation of LensesProcessingCore for use in SCCamera. */ @protocol SCManagedCapturerLensAPI @property (nonatomic, strong, readonly) LSAComponentManager *componentManager; @property (nonatomic, strong) NSString *activeLensId; @property (nonatomic, readonly) BOOL isLensApplied; @property (nonatomic, strong, readonly) id capturerListener; typedef void (^SCManagedCapturerLensAPIPointOfInterestCompletion)(SCLensCategory *category, NSInteger categoriesCount); - (void)setAspectRatio:(BOOL)isLiveStreaming; - (SCLens *)appliedLens; - (void)setFieldOfView:(float)fieldOfView; - (void)setAsFieldOfViewListenerForDevice:(SCManagedCaptureDevice *)captureDevice; - (void)setAsFieldOfViewListenerForARDataSource:(id)arDataSource NS_AVAILABLE_IOS(11_0); - (void)removeFieldOfViewListener; - (void)setModifySource:(BOOL)modifySource; - (void)setLensesActive:(BOOL)lensesActive videoOrientation:(AVCaptureVideoOrientation)videoOrientation filterFactory:(SCLookseryFilterFactory *)filterFactory; - (void)detectLensCategoryOnNextFrame:(CGPoint)point videoOrientation:(AVCaptureVideoOrientation)videoOrientation lenses:(NSArray *)lenses completion:(SCManagedCapturerLensAPIPointOfInterestCompletion)completion; - (void)setShouldMuteAllSounds:(BOOL)shouldMuteAllSounds; - (UIImage *)processImage:(UIImage *)image maxPixelSize:(NSInteger)maxPixelSize devicePosition:(SCManagedCaptureDevicePosition)position fieldOfView:(float)fieldOfView; - (void)setShouldProcessARFrames:(BOOL)shouldProcessARFrames; - (NSInteger)maxPixelSize; @end ================================================ FILE: Lens/SCManagedCapturerLensAPIProvider.h ================================================ // // SCManagedCapturerLensAPIProvider.h // SCCamera // // Created by Michel Loenngren on 4/12/18. // #import @protocol SCManagedCapturerLensAPI; @class SCCaptureResource; /** Provider for creating new instances of SCManagedCapturerLensAPI within SCCamera. */ @protocol SCManagedCapturerLensAPIProvider - (id)lensAPIForCaptureResource:(SCCaptureResource *)captureResouce; @end ================================================ FILE: Logging/SCCoreCameraLogger.h ================================================ // // SCCoreCameraLogger.h // Snapchat // // Created by Chao Pang on 3/6/18. // #import /** * CAMERA_CREATION_DELAY event */ extern NSString *const kSCCameraCreationDelayEventStartTimeKey; extern NSString *const kSCCameraCreationDelayEventStartTimeAdjustmentKey; extern NSString *const kSCCameraCreationDelayEventEndTimeKey; extern NSString *const kSCCameraCreationDelayEventCaptureSessionIdKey; extern NSString *const kSCCameraCreationDelayEventFilterLensIdKey; extern NSString *const kSCCameraCreationDelayEventNightModeDetectedKey; extern NSString *const kSCCameraCreationDelayEventNightModeActiveKey; extern NSString *const kSCCameraCreationDelayEventCameraApiKey; extern NSString *const kSCCameraCreationDelayEventCameraLevelKey; extern NSString *const kSCCameraCreationDelayEventCameraPositionKey; extern NSString *const kSCCameraCreationDelayEventCameraOpenSourceKey; extern NSString *const kSCCameraCreationDelayEventContentDurationKey; extern NSString *const kSCCameraCreationDelayEventMediaTypeKey; extern NSString *const kSCCameraCreationDelayEventStartTypeKey; extern NSString *const kSCCameraCreationDelayEventStartSubTypeKey; extern NSString *const kSCCameraCreationDelayEventAnalyticsVersion; @interface SCCoreCameraLogger : NSObject + (instancetype)sharedInstance; /** * CAMERA_CREATION_DELAY event */ - (void)logCameraCreationDelayEventStartWithCaptureSessionId:(NSString *)captureSessionId filterLensId:(NSString *)filterLensId underLowLightCondition:(BOOL)underLowLightCondition isNightModeActive:(BOOL)isNightModeActive isBackCamera:(BOOL)isBackCamera isMainCamera:(BOOL)isMainCamera; - (void)logCameraCreationDelaySplitPointRecordingGestureFinished; - (void)logCameraCreationDelaySplitPointStillImageCaptureApi:(NSString *)api; - (void)logCameraCreationDelaySplitPointPreCaptureOperationRequested; - (void)logCameraCreationDelaySplitPointPreCaptureOperationFinishedAt:(CFTimeInterval)time; - (void)updatedCameraCreationDelayWithContentDuration:(CFTimeInterval)duration; - (void)logCameraCreationDelaySplitPointCameraCaptureContentReady; - (void)logCameraCreationDelaySplitPointPreviewFinishedPreparation; - (void)logCameraCreationDelaySplitPointPreviewDisplayedForImage:(BOOL)isImage; - (void)logCameraCreationDelaySplitPointPreviewAnimationComplete:(BOOL)isImage; - (void)logCameraCreationDelaySplitPointPreviewFirstFramePlayed:(BOOL)isImage; - (void)cancelCameraCreationDelayEvent; @end ================================================ FILE: Logging/SCCoreCameraLogger.m ================================================ // // SCCoreCameraLogger.m // Snapchat // // Created by Chao Pang on 3/6/18. // #import "SCCoreCameraLogger.h" #import #import #import #import static const char *kSCCoreCameraLoggerQueueLabel = "com.snapchat.core-camera-logger-queue"; NSString *const kSCCameraCreationDelayEventStartTimeKey = @"start_time"; NSString *const kSCCameraCreationDelayEventStartTimeAdjustmentKey = @"start_time_adjustment"; NSString *const kSCCameraCreationDelayEventEndTimeKey = @"end_time"; NSString *const kSCCameraCreationDelayEventCaptureSessionIdKey = @"capture_session_id"; NSString *const kSCCameraCreationDelayEventFilterLensIdKey = @"filter_lens_id"; NSString *const kSCCameraCreationDelayEventNightModeDetectedKey = @"night_mode_detected"; NSString *const kSCCameraCreationDelayEventNightModeActiveKey = @"night_mode_active"; NSString *const kSCCameraCreationDelayEventCameraApiKey = @"camera_api"; NSString *const kSCCameraCreationDelayEventCameraLevelKey = @"camera_level"; NSString *const kSCCameraCreationDelayEventCameraPositionKey = @"camera_position"; NSString *const kSCCameraCreationDelayEventCameraOpenSourceKey = @"camera_open_source"; NSString *const kSCCameraCreationDelayEventContentDurationKey = @"content_duration"; NSString *const kSCCameraCreationDelayEventMediaTypeKey = @"media_type"; NSString *const kSCCameraCreationDelayEventStartTypeKey = @"start_type"; NSString *const kSCCameraCreationDelayEventStartSubTypeKey = @"start_sub_type"; NSString *const kSCCameraCreationDelayEventAnalyticsVersion = @"ios_v1"; static inline NSUInteger SCTimeToMS(CFTimeInterval time) { return (NSUInteger)(time * 1000); } static NSString *SCDictionaryToJSONString(NSDictionary *dictionary) { NSData *dictData = [NSJSONSerialization dataWithJSONObject:dictionary options:0 error:nil]; return [[NSString alloc] initWithData:dictData encoding:NSUTF8StringEncoding]; } @implementation SCCoreCameraLogger { SCQueuePerformer *_performer; NSMutableDictionary *_cameraCreationDelayParameters; NSMutableDictionary *_cameraCreationDelaySplits; } - (instancetype)init { self = [super init]; if (self) { _cameraCreationDelayParameters = [NSMutableDictionary dictionary]; _cameraCreationDelaySplits = [NSMutableDictionary dictionary]; _performer = [[SCQueuePerformer alloc] initWithLabel:kSCCoreCameraLoggerQueueLabel qualityOfService:QOS_CLASS_UNSPECIFIED queueType:DISPATCH_QUEUE_SERIAL context:SCQueuePerformerContextCoreCamera]; } return self; } + (instancetype)sharedInstance { static SCCoreCameraLogger *sharedInstance; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ sharedInstance = [[SCCoreCameraLogger alloc] init]; }); return sharedInstance; } // Camera creation delay metrics - (void)logCameraCreationDelayEventStartWithCaptureSessionId:(NSString *)captureSessionId filterLensId:(NSString *)filterLensId underLowLightCondition:(BOOL)underLowLightCondition isNightModeActive:(BOOL)isNightModeActive isBackCamera:(BOOL)isBackCamera isMainCamera:(BOOL)isMainCamera { CFTimeInterval startTime = CACurrentMediaTime(); [_performer perform:^{ [_cameraCreationDelayParameters removeAllObjects]; [_cameraCreationDelaySplits removeAllObjects]; _cameraCreationDelayParameters[kSCCameraCreationDelayEventStartTimeKey] = @(startTime); _cameraCreationDelayParameters[kSCCameraCreationDelayEventCaptureSessionIdKey] = captureSessionId ?: @"null"; _cameraCreationDelayParameters[kSCCameraCreationDelayEventFilterLensIdKey] = filterLensId ?: @"null"; _cameraCreationDelayParameters[kSCCameraCreationDelayEventNightModeDetectedKey] = @(underLowLightCondition); _cameraCreationDelayParameters[kSCCameraCreationDelayEventNightModeActiveKey] = @(isNightModeActive); _cameraCreationDelayParameters[kSCCameraCreationDelayEventCameraPositionKey] = isBackCamera ? @"back" : @"front"; _cameraCreationDelayParameters[kSCCameraCreationDelayEventCameraOpenSourceKey] = isMainCamera ? @"main_camera" : @"reply_camera"; _cameraCreationDelayParameters[kSCCameraCreationDelayEventStartTypeKey] = SCLaunchType() ?: @"null"; _cameraCreationDelayParameters[kSCCameraCreationDelayEventStartSubTypeKey] = SCLaunchSubType() ?: @"null"; }]; } - (void)logCameraCreationDelaySplitPointRecordingGestureFinished { CFTimeInterval time = CACurrentMediaTime(); [_performer perform:^{ CFTimeInterval endRecordingTimeOffset = time - [_cameraCreationDelayParameters[kSCCameraCreationDelayEventStartTimeKey] doubleValue]; NSNumber *recordStartTimeMillis = (NSNumber *)_cameraCreationDelaySplits[kSCCameraSubmetricsPreCaptureOperationFinished]; if (recordStartTimeMillis) { CFTimeInterval timeDisplacement = ([recordStartTimeMillis doubleValue] / 1000.0) - endRecordingTimeOffset; _cameraCreationDelayParameters[kSCCameraCreationDelayEventStartTimeAdjustmentKey] = @(timeDisplacement); } [self _addSplitPointForKey:kSCCameraSubmetricsRecordingGestureFinished atTime:time]; }]; } - (void)logCameraCreationDelaySplitPointStillImageCaptureApi:(NSString *)api { CFTimeInterval time = CACurrentMediaTime(); [_performer perform:^{ if (api) { _cameraCreationDelayParameters[kSCCameraCreationDelayEventCameraApiKey] = api; } [self _addSplitPointForKey:kSCCameraSubmetricsPreCaptureOperationRequested atTime:time]; }]; } - (void)logCameraCreationDelaySplitPointPreCaptureOperationRequested { CFTimeInterval time = CACurrentMediaTime(); [_performer perform:^{ [self _addSplitPointForKey:kSCCameraSubmetricsPreCaptureOperationRequested atTime:time]; }]; } - (void)logCameraCreationDelaySplitPointPreCaptureOperationFinishedAt:(CFTimeInterval)time { [_performer perform:^{ [self _addSplitPointForKey:kSCCameraSubmetricsPreCaptureOperationFinished atTime:time]; }]; } - (void)updatedCameraCreationDelayWithContentDuration:(CFTimeInterval)duration { [_performer perform:^{ _cameraCreationDelayParameters[kSCCameraCreationDelayEventContentDurationKey] = @(SCTimeToMS(duration)); }]; } - (void)logCameraCreationDelaySplitPointCameraCaptureContentReady { CFTimeInterval time = CACurrentMediaTime(); [_performer perform:^{ [self _addSplitPointForKey:kSCCameraSubmetricsCameraCaptureContentReady atTime:time]; }]; } - (void)logCameraCreationDelaySplitPointPreviewFinishedPreparation { CFTimeInterval time = CACurrentMediaTime(); [_performer perform:^{ [self _addSplitPointForKey:kSCCameraSubmetricsCameraCaptureContentReady atTime:time]; }]; } - (void)logCameraCreationDelaySplitPointPreviewDisplayedForImage:(BOOL)isImage { CFTimeInterval time = CACurrentMediaTime(); [_performer perform:^{ [self _addSplitPointForKey:kSCCameraSubmetricsPreviewLayoutReady atTime:time]; }]; } - (void)logCameraCreationDelaySplitPointPreviewAnimationComplete:(BOOL)isImage { CFTimeInterval time = CACurrentMediaTime(); [_performer perform:^{ [self _addSplitPointForKey:kSCCameraSubmetricsPreviewAnimationFinish atTime:time]; if (_cameraCreationDelaySplits[kSCCameraSubmetricsPreviewPlayerReady]) { [self _completeLogCameraCreationDelayEventWithIsImage:isImage atTime:time]; } }]; } - (void)logCameraCreationDelaySplitPointPreviewFirstFramePlayed:(BOOL)isImage { CFTimeInterval time = CACurrentMediaTime(); [_performer perform:^{ [self _addSplitPointForKey:kSCCameraSubmetricsPreviewPlayerReady atTime:time]; if (_cameraCreationDelaySplits[kSCCameraSubmetricsPreviewAnimationFinish]) { [self _completeLogCameraCreationDelayEventWithIsImage:isImage atTime:time]; } }]; } - (void)cancelCameraCreationDelayEvent { [_performer perform:^{ [_cameraCreationDelayParameters removeAllObjects]; [_cameraCreationDelaySplits removeAllObjects]; }]; } #pragma - Private methods - (void)_completeLogCameraCreationDelayEventWithIsImage:(BOOL)isImage atTime:(CFTimeInterval)time { SCAssertPerformer(_performer); if (_cameraCreationDelayParameters[kSCCameraCreationDelayEventCaptureSessionIdKey]) { _cameraCreationDelayParameters[kSCCameraCreationDelayEventMediaTypeKey] = isImage ? @"image" : @"video"; _cameraCreationDelayParameters[kSCCameraCreationDelayEventEndTimeKey] = @(time); [self _logCameraCreationDelayBlizzardEvent]; } [_cameraCreationDelayParameters removeAllObjects]; [_cameraCreationDelaySplits removeAllObjects]; } - (void)_addSplitPointForKey:(NSString *)key atTime:(CFTimeInterval)time { SCAssertPerformer(_performer); if (key) { CFTimeInterval timeOffset = time - [_cameraCreationDelayParameters[kSCCameraCreationDelayEventStartTimeKey] doubleValue]; NSNumber *timeAdjustment = _cameraCreationDelayParameters[kSCCameraCreationDelayEventStartTimeAdjustmentKey] ?: @(0); _cameraCreationDelaySplits[key] = @(SCTimeToMS(timeOffset + [timeAdjustment doubleValue])); } } - (void)_logCameraCreationDelayBlizzardEvent { SCAssertPerformer(_performer); SCASharedCameraMetricParams *sharedCameraMetricsParams = [[SCASharedCameraMetricParams alloc] init]; [sharedCameraMetricsParams setAnalyticsVersion:kSCCameraCreationDelayEventAnalyticsVersion]; NSString *mediaType = _cameraCreationDelayParameters[kSCCameraCreationDelayEventMediaTypeKey]; if (mediaType) { if ([mediaType isEqualToString:@"image"]) { [sharedCameraMetricsParams setMediaType:SCAMediaType_IMAGE]; } else if ([mediaType isEqualToString:@"video"]) { [sharedCameraMetricsParams setMediaType:SCAMediaType_VIDEO]; } } if (_cameraCreationDelayParameters[kSCCameraCreationDelayEventNightModeDetectedKey] && _cameraCreationDelayParameters[kSCCameraCreationDelayEventNightModeActiveKey]) { BOOL isNightModeDetected = [_cameraCreationDelayParameters[kSCCameraCreationDelayEventNightModeDetectedKey] boolValue]; BOOL isNightModeActive = [_cameraCreationDelayParameters[kSCCameraCreationDelayEventNightModeActiveKey] boolValue]; if (!isNightModeDetected) { [sharedCameraMetricsParams setLowLightStatus:SCALowLightStatus_NOT_DETECTED]; } else if (!isNightModeActive) { [sharedCameraMetricsParams setLowLightStatus:SCALowLightStatus_DETECTED]; } else if (isNightModeActive) { [sharedCameraMetricsParams setLowLightStatus:SCALowLightStatus_ENABLED]; } } [sharedCameraMetricsParams setPowerMode:[[NSProcessInfo processInfo] isLowPowerModeEnabled] ? @"LOW_POWER_MODE_ENABLED" : @"LOW_POWER_MODE_DISABLED"]; [sharedCameraMetricsParams setFilterLensId:_cameraCreationDelayParameters[kSCCameraCreationDelayEventFilterLensIdKey] ?: @"null"]; [sharedCameraMetricsParams setCaptureSessionId:_cameraCreationDelayParameters[kSCCameraCreationDelayEventCaptureSessionIdKey] ?: @"null"]; [sharedCameraMetricsParams setCameraApi:_cameraCreationDelayParameters[kSCCameraCreationDelayEventCameraApiKey] ?: @"null"]; [sharedCameraMetricsParams setCameraPosition:_cameraCreationDelayParameters[kSCCameraCreationDelayEventCameraPositionKey] ?: @"null"]; [sharedCameraMetricsParams setCameraOpenSource:_cameraCreationDelayParameters[kSCCameraCreationDelayEventCameraOpenSourceKey] ?: @"null"]; [sharedCameraMetricsParams setCameraLevel:_cameraCreationDelayParameters[kSCCameraCreationDelayEventCameraLevelKey] ?: @"null"]; [sharedCameraMetricsParams setStartType:_cameraCreationDelayParameters[kSCCameraCreationDelayEventStartTypeKey] ?: @"null"]; [sharedCameraMetricsParams setStartSubType:_cameraCreationDelayParameters[kSCCameraCreationDelayEventStartSubTypeKey] ?: @"null"]; [sharedCameraMetricsParams setSplits:SCDictionaryToJSONString(_cameraCreationDelaySplits)]; SCACameraSnapCreateDelay *creationDelay = [[SCACameraSnapCreateDelay alloc] init]; if (_cameraCreationDelayParameters[kSCCameraCreationDelayEventStartTimeKey] && _cameraCreationDelayParameters[kSCCameraCreationDelayEventEndTimeKey]) { double startTime = [_cameraCreationDelayParameters[kSCCameraCreationDelayEventStartTimeKey] doubleValue]; double endTime = [_cameraCreationDelayParameters[kSCCameraCreationDelayEventEndTimeKey] doubleValue]; NSNumber *timeAdjustment = _cameraCreationDelayParameters[kSCCameraCreationDelayEventStartTimeAdjustmentKey] ?: @(0); [creationDelay setLatencyMillis:SCTimeToMS(endTime - startTime + [timeAdjustment doubleValue])]; } else { [creationDelay setLatencyMillis:0]; } if (_cameraCreationDelayParameters[kSCCameraCreationDelayEventContentDurationKey]) { [creationDelay setContentDurationMillis:SCTimeToMS( [_cameraCreationDelayParameters[kSCCameraCreationDelayEventContentDurationKey] doubleValue])]; } else { [creationDelay setContentDurationMillis:0]; } [creationDelay setSharedCameraMetricParams:sharedCameraMetricsParams]; [[SCLogger sharedInstance] logUserTrackedEvent:creationDelay]; } @end ================================================ FILE: Logging/SCLogger+Camera.h ================================================ // // SCLogger+Camera.h // Snapchat // // Created by Derek Peirce on 5/8/17. // Copyright © 2017 Snapchat, Inc. All rights reserved. // #import "AVCameraViewEnums.h" #import #import #import typedef NS_ENUM(NSUInteger, CameraCreationDelayLoggingStatus) { CAMERA_CREATION_DELAY_LOGGING_START, CAMERA_CREATION_DELAY_LOGGINT_LAST_STEP, CAMERA_CREATION_DELAY_LOGGING_END, }; @interface SCLogger (Camera) @property (nonatomic, strong) NSNumber *cameraCreationDelayLoggingStatus; - (void)logCameraCreationStartWithMethod:(SCCameraRecordingMethod)method lensesEnabled:(BOOL)lensesEnabled activeLensId:(NSString *)activeLensId captureSessionId:(NSString *)captureSessionId; - (void)logStillImageCaptureApi:(NSString *)api; - (void)logPreCaptureOperationRequestedAt:(CFTimeInterval)requestTime; - (void)logPreCaptureOperationFinishedAt:(CFTimeInterval)time; - (void)logCameraCaptureRecordingGestureFinishedAtTime:(CFTimeInterval)endRecordingTime; - (void)logCameraCaptureFinishedWithDuration:(CFTimeInterval)duration; - (void)logCameraCaptureContentReady; - (void)logPreviewFinishedPreparation; - (void)logPreviewDisplayedForImage:(BOOL)isImage; - (void)logPreviewAnimationComplete:(BOOL)isImage; - (void)logPreviewFirstFramePlayed:(BOOL)isImage; - (void)cancelCameraCreationEvent; - (void)logRecordingMayBeTooShortWithMethod:(SCCameraRecordingMethod)method; - (void)logRecordingWasTooShortWithFirstFrame:(CMTime)firstFrame frontFacingCamera:(BOOL)isFrontFacing cameraFlips:(NSInteger)cameraFlips; - (void)logManagedCapturerSettingFailure:(NSString *)settingTask error:(NSError *)error; - (void)logCameraExposureAdjustmentDelayStart; - (void)logCameraExposureAdjustmentDelayEndWithStrategy:(NSString *)strategy; - (void)logCameraCreationDelaySubMetricsStartWithSignCode:(kSCSignPostCodeEnum)signPostCode; - (void)logCameraCreationDelaySubMetricsEndWithSignCode:(kSCSignPostCodeEnum)signPostCod; @end ================================================ FILE: Logging/SCLogger+Camera.m ================================================ // // SCLogger+Camera.m // Snapchat // // Created by Derek Peirce on 5/8/17. // Copyright © 2017 Snapchat, Inc. All rights reserved. // #import "SCLogger+Camera.h" #import "SCCameraTweaks.h" #import #import #import #import #import #import @implementation SCLogger (Camera) @dynamic cameraCreationDelayLoggingStatus; - (NSNumber *)cameraCreationDelayLoggingStatus { return objc_getAssociatedObject(self, @selector(cameraCreationDelayLoggingStatus)); } - (void)setCameraCreationDelayLoggingStatus:(NSNumber *)status { objc_setAssociatedObject(self, @selector(cameraCreationDelayLoggingStatus), status, OBJC_ASSOCIATION_RETAIN_NONATOMIC); } - (BOOL)shouldLogCameraCreationDelay { return [[self cameraCreationDelayLoggingStatus] intValue] != CAMERA_CREATION_DELAY_LOGGING_END; } - (void)logCameraCreationDelayEnd { if ([[self cameraCreationDelayLoggingStatus] intValue] == CAMERA_CREATION_DELAY_LOGGINT_LAST_STEP) { SCTraceSignPostEndForMetrics(kSCSignPostCameraCreationDelay, 0, 0, 0, 0); [self setCameraCreationDelayLoggingStatus:@(CAMERA_CREATION_DELAY_LOGGING_END)]; } else { [self setCameraCreationDelayLoggingStatus:@(CAMERA_CREATION_DELAY_LOGGINT_LAST_STEP)]; } } - (void)logCameraCreationStartWithMethod:(SCCameraRecordingMethod)method lensesEnabled:(BOOL)lensesEnabled activeLensId:(NSString *)activeLensId captureSessionId:(NSString *)captureSessionId { NSMutableDictionary *parameters = [@{ @"lens_ui_enabled" : @(lensesEnabled), @"analytics_version" : kSCCameraDelayEventVersion, @"method" : @(method), } mutableCopy]; if (lensesEnabled && activeLensId) { [parameters setObject:activeLensId forKey:@"lens_id"]; } if (captureSessionId) { [parameters setObject:captureSessionId forKey:@"capture_session_id"]; } [self setCameraCreationDelayLoggingStatus:@(CAMERA_CREATION_DELAY_LOGGING_START)]; [self logCameraCreationDelaySubMetricsStartWithSignCode:kSCSignPostCameraCreationDelay]; [self logCameraCreationDelaySubMetricsStartWithSignCode:kSCSignPostCameraRecordingGestureFinished]; [self logCameraCreationDelaySubMetricsStartWithSignCode:kSCSignPostCameraPreCaptureOperationRequested]; [[SCLogger sharedInstance] logTimedEventStart:kSCCameraCaptureDelayEvent uniqueId:@"" isUniqueEvent:NO parameters:parameters shouldLogStartTime:YES]; } - (void)logCameraExposureAdjustmentDelayStart { [[SCLogger sharedInstance] logTimedEventStart:kSCCameraExposureAdjustmentDelay uniqueId:@"" isUniqueEvent:NO parameters:nil shouldLogStartTime:YES]; } - (void)logCameraExposureAdjustmentDelayEndWithStrategy:(NSString *)strategy { [[SCLogger sharedInstance] logTimedEventEnd:kSCCameraExposureAdjustmentDelay uniqueId:@"" parameters:@{ @"strategy" : strategy }]; } - (void)logCameraCaptureRecordingGestureFinishedAtTime:(CFTimeInterval)endRecordingTime { [self logCameraCreationDelaySubMetricsEndWithSignCode:kSCSignPostCameraRecordingGestureFinished]; [[SCLogger sharedInstance] updateLogTimedEvent:kSCCameraCaptureDelayEvent uniqueId:@"" update:^(NSMutableDictionary *startParameters) { NSMutableDictionary *eventParameters = startParameters[SCPerformanceMetricsKey.kSCLoggerStartEventParametersKey]; NSNumber *recordStartTime = (NSNumber *)eventParameters[kSCCameraSubmetricsPreCaptureOperationFinished]; CFTimeInterval endRecordingTimeOffset = endRecordingTime - [startParameters[SCPerformanceMetricsKey.kSCLoggerStartEventTimeKey] doubleValue]; if (recordStartTime) { CFTimeInterval timeDisplacement = ([recordStartTime doubleValue] / 1000.0) - endRecordingTimeOffset; [eventParameters setObject:@(timeDisplacement) forKey:SCPerformanceMetricsKey.kSCLoggerStartEventTimeAdjustmentKey]; } [self addSplitPoint:kSCCameraSubmetricsRecordingGestureFinished atTime:endRecordingTime toEvent:startParameters]; }]; } - (void)logStillImageCaptureApi:(NSString *)api { [self logCameraCreationDelaySubMetricsEndWithSignCode:kSCSignPostCameraPreCaptureOperationRequested]; [self logCameraCreationDelaySubMetricsStartWithSignCode:kSCSignPostCameraPreCaptureOperationFinished]; [self logCameraCreationDelaySubMetricsStartWithSignCode:kSCSignPostCameraCaptureContentReady]; CFTimeInterval requestTime = CACurrentMediaTime(); [self updateLogTimedEvent:kSCCameraCaptureDelayEvent uniqueId:@"" update:^(NSMutableDictionary *startParameters) { NSMutableDictionary *eventParameters = startParameters[SCPerformanceMetricsKey.kSCLoggerStartEventParametersKey]; [eventParameters setObject:api forKey:@"api_type"]; [eventParameters setObject:@(1) forKey:@"camera_api_level"]; [self addSplitPoint:@"PRE_CAPTURE_OPERATION_REQUESTED" atTime:requestTime toEvent:startParameters]; }]; } - (void)logPreCaptureOperationRequestedAt:(CFTimeInterval)requestTime { [self logCameraCreationDelaySubMetricsEndWithSignCode:kSCSignPostCameraPreCaptureOperationRequested]; [self logCameraCreationDelaySubMetricsStartWithSignCode:kSCSignPostCameraPreCaptureOperationFinished]; [self logCameraCreationDelaySubMetricsStartWithSignCode:kSCSignPostCameraCaptureContentReady]; [self updateLogTimedEvent:kSCCameraCaptureDelayEvent uniqueId:@"" splitPoint:kSCCameraSubmetricsPreCaptureOperationRequested time:requestTime]; } - (void)logPreCaptureOperationFinishedAt:(CFTimeInterval)time { [self logCameraCreationDelaySubMetricsEndWithSignCode:kSCSignPostCameraPreCaptureOperationFinished]; [self logCameraCreationDelaySubMetricsStartWithSignCode:kSCSignPostCameraPreviewPlayerReady]; [self updateLogTimedEvent:kSCCameraCaptureDelayEvent uniqueId:@"" splitPoint:kSCCameraSubmetricsPreCaptureOperationFinished time:time]; } - (void)logCameraCaptureFinishedWithDuration:(CFTimeInterval)duration { [[SCLogger sharedInstance] updateLogTimedEvent:kSCCameraCaptureDelayEvent uniqueId:@"" update:^(NSMutableDictionary *startParameters) { NSMutableDictionary *eventParameters = startParameters[SCPerformanceMetricsKey.kSCLoggerStartEventParametersKey]; [eventParameters setObject:@(SCTimeInMillisecond(duration)) forKey:@"content_duration"]; }]; } - (void)logCameraCaptureContentReady { [self logCameraCreationDelaySubMetricsEndWithSignCode:kSCSignPostCameraCaptureContentReady]; [[SCLogger sharedInstance] updateLogTimedEvent:kSCCameraCaptureDelayEvent uniqueId:@"" splitPoint:kSCCameraSubmetricsCameraCaptureContentReady]; } - (void)logPreviewFinishedPreparation { [self logCameraCreationDelaySubMetricsEndWithSignCode:kSCSignPostCameraPreviewFinishPreparation]; [self logCameraCreationDelaySubMetricsStartWithSignCode:kSCSignPostCameraPreviewAnimationFinish]; [self updateLogTimedEvent:kSCCameraCaptureDelayEvent uniqueId:@"" splitPoint:kSCCameraSubmetricsPreviewFinishPreparation]; } - (void)logPreviewDisplayedForImage:(BOOL)isImage { [self logCameraCreationDelaySubMetricsEndWithSignCode:kSCSignPostCameraPreviewLayoutReady]; [self updateLogTimedEvent:kSCCameraCaptureDelayEvent uniqueId:@"" splitPoint:kSCCameraSubmetricsPreviewLayoutReady]; } - (void)logPreviewAnimationComplete:(BOOL)isImage { [self updateLogTimedEvent:kSCCameraCaptureDelayEvent uniqueId:@"" splitPoint:kSCCameraSubmetricsPreviewAnimationFinish]; [self logCameraCreationDelaySubMetricsEndWithSignCode:kSCSignPostCameraPreviewAnimationFinish]; [self logCameraCreationDelayEnd]; [self conditionallyLogTimedEventEnd:kSCCameraCaptureDelayEvent uniqueId:@"" parameters:@{ @"type" : isImage ? @"image" : @"video", } shouldLog:^BOOL(NSDictionary *startParameters) { // For video, PREVIEW_PLAYER_READY and PREVIEW_ANIMATION_FINISH can happen in either // order. So here we check for existence of this key, and end timer if the other // event have happened. NSMutableDictionary *eventParameters = startParameters[SCPerformanceMetricsKey.kSCLoggerStartEventParametersKey]; return eventParameters[kSCCameraSubmetricsPreviewPlayerReady] != nil; }]; } - (void)logPreviewFirstFramePlayed:(BOOL)isImage { [self updateLogTimedEvent:kSCCameraCaptureDelayEvent uniqueId:@"" splitPoint:kSCCameraSubmetricsPreviewPlayerReady]; [self logCameraCreationDelaySubMetricsEndWithSignCode:kSCSignPostCameraPreviewPlayerReady]; [self logCameraCreationDelayEnd]; [self conditionallyLogTimedEventEnd:kSCCameraCaptureDelayEvent uniqueId:@"" parameters:@{ @"type" : isImage ? @"image" : @"video", } shouldLog:^BOOL(NSDictionary *startParameters) { NSMutableDictionary *eventParameters = startParameters[SCPerformanceMetricsKey.kSCLoggerStartEventParametersKey]; // See the comment above for PREVIEW_PLAYER_READY and PREVIEW_ANIMATION_FINISH. return eventParameters[kSCCameraSubmetricsPreviewAnimationFinish] != nil; }]; } - (void)cancelCameraCreationEvent { [self cancelLogTimedEvent:kSCCameraCaptureDelayEvent uniqueId:@""]; } - (void)logRecordingMayBeTooShortWithMethod:(SCCameraRecordingMethod)method { [[SCLogger sharedInstance] cancelLogTimedEvent:kSCCameraMetricsRecordingTooShort uniqueId:@""]; [[SCLogger sharedInstance] logTimedEventStart:kSCCameraMetricsRecordingTooShort uniqueId:@"" isUniqueEvent:NO parameters:@{ @"method" : @(method), @"analytics_version" : kSCCameraRecordingTooShortVersion, } shouldLogStartTime:YES]; } - (void)logRecordingWasTooShortWithFirstFrame:(CMTime)firstFrame frontFacingCamera:(BOOL)isFrontFacing cameraFlips:(NSInteger)cameraFlips { [self logTimedEventEnd:kSCCameraMetricsRecordingTooShort uniqueId:@"" update:^(NSDictionary *startParameters, CFTimeInterval eventEndTime, CFTimeInterval adjustedTime) { NSMutableDictionary *eventParameters = startParameters[SCPerformanceMetricsKey.kSCLoggerStartEventParametersKey]; if (CMTIME_IS_VALID(firstFrame)) { CFTimeInterval startTime = [startParameters[SCPerformanceMetricsKey.kSCLoggerStartEventTimeKey] doubleValue]; CFTimeInterval firstFrameRelative = CMTimeGetSeconds(firstFrame) - startTime; [eventParameters setObject:@(firstFrameRelative) forKey:@"first_frame_s"]; } [eventParameters setObject:@(isFrontFacing) forKey:@"is_front_facing"]; if (cameraFlips) { [eventParameters setObject:@(cameraFlips > 0) forKey:@"has_camera_been_flipped"]; } }]; } - (void)logManagedCapturerSettingFailure:(NSString *)settingTask error:(NSError *)error { NSMutableDictionary *parameters = [[NSMutableDictionary alloc] init]; parameters[@"setting_task"] = settingTask; if (error) { parameters[@"setting error"] = error; } [[SCLogger sharedInstance] logTimedEventEnd:kSCCameraManagedCaptureSettingFailure uniqueId:@"" parameters:parameters]; } - (void)logCameraCreationDelaySubMetricsStartWithSignCode:(kSCSignPostCodeEnum)signPostCode { if ([self shouldLogCameraCreationDelay]) { SCTraceSignPostStartForMetrics(signPostCode, 0, 0, 0, 0); } } - (void)logCameraCreationDelaySubMetricsEndWithSignCode:(kSCSignPostCodeEnum)signPostCode { if ([self shouldLogCameraCreationDelay]) { SCTraceSignPostEndForMetrics(signPostCode, 0, 0, 0, 0); } } @end ================================================ FILE: Logging/SCManiphestTicketCreator.h ================================================ // // SCManiphestTicketCreator.h // SCCamera // // Created by Michel Loenngren on 4/16/18. // #import /** Protocol for filing jira tickets and beta s2r. */ @protocol SCManiphestTicketCreator - (void)createAndFile:(NSData *)image creationTime:(long)reportCreationTime description:(NSString *)bugDescription email:(NSString *)otherEmail project:(NSString *)projectName subproject:(NSString *)subprojectName; - (void)createAndFileBetaReport:(NSString *)msg; @end ================================================ FILE: ManagedCapturer/ARConfiguration+SCConfiguration.h ================================================ // // ARConfiguration+SCConfiguration.h // Snapchat // // Created by Max Goedjen on 11/7/17. // #import "SCManagedCaptureDevice.h" #import @interface ARConfiguration (SCConfiguration) + (BOOL)sc_supportedForDevicePosition:(SCManagedCaptureDevicePosition)position; + (ARConfiguration *_Nullable)sc_configurationForDevicePosition:(SCManagedCaptureDevicePosition)position; @end ================================================ FILE: ManagedCapturer/ARConfiguration+SCConfiguration.m ================================================ // // ARConfiguration+SCConfiguration.m // Snapchat // // Created by Max Goedjen on 11/7/17. // #import "ARConfiguration+SCConfiguration.h" #import "SCCapturerDefines.h" @implementation ARConfiguration (SCConfiguration) + (BOOL)sc_supportedForDevicePosition:(SCManagedCaptureDevicePosition)position { return [[[self sc_configurationForDevicePosition:position] class] isSupported]; } + (ARConfiguration *)sc_configurationForDevicePosition:(SCManagedCaptureDevicePosition)position { if (@available(iOS 11.0, *)) { if (position == SCManagedCaptureDevicePositionBack) { ARWorldTrackingConfiguration *config = [[ARWorldTrackingConfiguration alloc] init]; config.planeDetection = ARPlaneDetectionHorizontal; config.lightEstimationEnabled = NO; return config; } else { #ifdef SC_USE_ARKIT_FACE return [[ARFaceTrackingConfiguration alloc] init]; #endif } } return nil; } @end ================================================ FILE: ManagedCapturer/AVCaptureConnection+InputDevice.h ================================================ // // AVCaptureConnection+InputDevice.h // Snapchat // // Created by William Morriss on 1/20/15 // Copyright (c) 2015 Snapchat, Inc. All rights reserved. // #import @interface AVCaptureConnection (InputDevice) - (AVCaptureDevice *)inputDevice; @end ================================================ FILE: ManagedCapturer/AVCaptureConnection+InputDevice.m ================================================ // // AVCaptureConnection+InputDevice.m // Snapchat // // Created by William Morriss on 1/20/15 // Copyright (c) 2015 Snapchat, Inc. All rights reserved. // #import "AVCaptureConnection+InputDevice.h" #import @implementation AVCaptureConnection (InputDevice) - (AVCaptureDevice *)inputDevice { NSArray *inputPorts = self.inputPorts; AVCaptureInputPort *port = [inputPorts firstObject]; SCAssert([port.input isKindOfClass:[AVCaptureDeviceInput class]], @"unexpected port"); AVCaptureDeviceInput *deviceInput = (AVCaptureDeviceInput *)port.input; AVCaptureDevice *device = deviceInput.device; return device; } @end ================================================ FILE: ManagedCapturer/AVCaptureDevice+ConfigurationLock.h ================================================ // // AVCaptureDevice+ConfigurationLock.h // Snapchat // // Created by Derek Peirce on 4/19/17. // Copyright © 2017 Snapchat, Inc. All rights reserved. // #import #import @interface AVCaptureDevice (ConfigurationLock) /* The following method will lock this AVCaptureDevice, run the task, then unlock the device. The task is usually related to set AVCaptureDevice. It will return a boolean telling you whether or not your task ran successfully. You can use the boolean to adjust your strategy to handle this failure. For some cases, we don't have a good mechanism to handle the failure. E.g. if we want to re-focus, but failed to do so. What is next step? Pop up a alert view to user? If yes, it is intrusive, if not, user will get confused. Just because the error handling is difficulty, we would like to notify you if the task fails. If the task does not run successfully. We will log an event using SCLogger for better visibility. */ - (BOOL)runTask:(NSString *)taskName withLockedConfiguration:(void (^)(void))task; /* The following method has the same function as the above one. The difference is that it retries the operation for certain times. Please give a number below or equal 2. When retry equals 0, we will only try to lock for once. When retry equals 1, we will retry once if the 1st try fails. .... */ - (BOOL)runTask:(NSString *)taskName withLockedConfiguration:(void (^)(void))task retry:(NSUInteger)retryTimes; @end ================================================ FILE: ManagedCapturer/AVCaptureDevice+ConfigurationLock.m ================================================ // // AVCaptureDevice+ConfigurationLock.m // Snapchat // // Created by Derek Peirce on 4/19/17. // Copyright © 2017 Snapchat, Inc. All rights reserved. // #import "AVCaptureDevice+ConfigurationLock.h" #import "SCLogger+Camera.h" #import #import #import @implementation AVCaptureDevice (ConfigurationLock) - (BOOL)runTask:(NSString *)taskName withLockedConfiguration:(void (^)(void))task { return [self runTask:taskName withLockedConfiguration:task retry:0]; } - (BOOL)runTask:(NSString *)taskName withLockedConfiguration:(void (^)(void))task retry:(NSUInteger)retryTimes { SCAssert(taskName, @"camera logger taskString should not be empty"); SCAssert(retryTimes <= 2 && retryTimes >= 0, @"retry times should be equal to or below 2."); NSError *error = nil; BOOL deviceLockSuccess = NO; NSUInteger retryCounter = 0; while (retryCounter <= retryTimes && !deviceLockSuccess) { deviceLockSuccess = [self lockForConfiguration:&error]; retryCounter++; } if (deviceLockSuccess) { task(); [self unlockForConfiguration]; SCLogCoreCameraInfo(@"AVCapture Device setting success, task:%@ tryCount:%zu", taskName, (unsigned long)retryCounter); } else { SCLogCoreCameraError(@"AVCapture Device Encountered error when %@ %@", taskName, error); [[SCLogger sharedInstance] logManagedCapturerSettingFailure:taskName error:error]; } return deviceLockSuccess; } @end ================================================ FILE: ManagedCapturer/CapturerV2/Configuration/SCCaptureConfiguration.h ================================================ // // SCCaptureConfiguration.h // Snapchat // // Created by Lin Jia on 10/3/17. // // #import "SCCaptureConfigurationAnnouncer.h" #import "SCManagedCaptureDevice.h" #import "SCManagedCapturerState.h" #import "SCVideoCaptureSessionInfo.h" #import #import #import /* SCCaptureConfiguration is the configuration class which is going to be used for customer to configure camera. This is how to use it: SCCaptureConfiguration *configuration = [SCCaptureConfiguration new]; // Conduct the setting here. e.g: configuration.torchActive = YES; // Commit your configuration [captureConfigurator commitConfiguration:configuration completionHandler:handler] Here are several interesting facts about SCCaptureConfiguration: 1) Though SCCaptureConfiguration has so many parameters, you don't need to care the parameters which you do not intend to set. For example, if you only want to set night mode active, here is the code: SCCaptureConfiguration *configuration = [SCCaptureConfiguration new]; configuration.isNightModeActive = YES; [captureConfigurator commitConfiguration:configuration completionHandler:handler] That is it. 2) you can set multiple configuration settings, then commit, before you commit, nothing will happen, e.g.: SCCaptureConfiguration *configuration = [SCCaptureConfiguration new]; configuration.isNightModeActive = YES; configuration.zoomFactor = 5; configuration.lensesActive = YES; [captureConfigurator commitConfiguration:configuration completionHandler:handler] 3) commit a configuration means the configuration is gone. If you set parameters on configuration after it is commited, it will crash on debug build, and on other builds such as production, the setting will be ignored, e.g.: SCCaptureConfiguration *configuration = [SCCaptureConfiguration new]; configuration.isNightModeActive = YES; [captureConfigurator commitConfiguration:configuration completionHandler:handler] // The line below will crash on debug, and ignored on other builds. configuration.zoomFactor = 5; 4) commiting a configuration is an atomic action. That means all changes customers want to have on camera will happen in a group. If 2 customers commit at the same time, we will handle them one by one. 5) We are still figuring out what parameters should be in this configuration, parameters could be added or deleted later. In the end, the configuration is going to be the only way customers confige the camera. */ @interface SCCaptureConfiguration : NSObject @property (nonatomic, assign) BOOL isRunning; @property (nonatomic, assign) BOOL isNightModeActive; @property (nonatomic, assign) BOOL lowLightCondition; @property (nonatomic, assign) BOOL adjustingExposure; @property (nonatomic, assign) SCManagedCaptureDevicePosition devicePosition; @property (nonatomic, assign) CGFloat zoomFactor; @property (nonatomic, assign) BOOL flashSupported; @property (nonatomic, assign) BOOL torchSupported; @property (nonatomic, assign) BOOL flashActive; @property (nonatomic, assign) BOOL torchActive; @property (nonatomic, assign) BOOL lensesActive; @property (nonatomic, assign) BOOL arSessionActive; @property (nonatomic, assign) BOOL liveVideoStreaming; @property (nonatomic, strong) AVCaptureVideoPreviewLayer *videoPreviewLayer; @property (nonatomic, strong) LSAGLView *videoPreviewGLView; @property (nonatomic, assign) SCVideoCaptureSessionInfo captureSessionInfo; @end ================================================ FILE: ManagedCapturer/CapturerV2/Configuration/SCCaptureConfiguration.m ================================================ // // SCCaptureConfiguration.m // Snapchat // // Created by Lin Jia on 10/3/17. // // #import "SCCaptureConfiguration.h" #import "SCCaptureConfiguration_Private.h" #import #import @interface SCCaptureConfiguration () { BOOL _sealed; NSMutableSet *_dirtyKeys; } @end @implementation SCCaptureConfiguration - (instancetype)init { self = [super init]; if (self) { _dirtyKeys = [[NSMutableSet alloc] init]; _sealed = NO; } return self; } - (void)setIsRunning:(BOOL)running { if ([self _configurationSealed]) { return; } _isRunning = running; [_dirtyKeys addObject:@(SCCaptureConfigurationKeyIsRunning)]; } /* All set methods will be added later. They follow the format of setIsRunning. */ @end @implementation SCCaptureConfiguration (privateMethods) - (NSArray *)dirtyKeys { if (!_sealed && SCIsDebugBuild()) { SCAssert(NO, @"Configuration not sealed yet, setting is still happening!"); } return [_dirtyKeys allObjects]; } - (void)seal { _sealed = YES; } - (BOOL)_configurationSealed { if (_sealed) { if (SCIsDebugBuild()) { SCAssert(NO, @"Try to set property after commit configuration to configurator"); } return YES; } else { return NO; } } @end ================================================ FILE: ManagedCapturer/CapturerV2/Configuration/SCCaptureConfigurationAnnouncer.h ================================================ // // SCCaptureConfigurationAnnouncer.h // Snapchat // // Created by Lin Jia on 10/2/17. // // #import "SCCaptureConfigurationListener.h" #import /* All APIs are thread safe. Announcer will not retain your object. So even if customer forgets to call remove listener, it will not create zombie objects. */ @interface SCCaptureConfigurationAnnouncer : NSObject /* When customer adds an object to be a listener, that object will receive an update of current truth. That is the chance for the object to do adjustment according to the current configuration of the camera. */ - (void)addListener:(id)listener; - (void)removeListener:(id)listener; @end ================================================ FILE: ManagedCapturer/CapturerV2/Configuration/SCCaptureConfigurationAnnouncer.m ================================================ // // SCCaptureConfigurationAnnouncer.m // Snapchat // // Created by Lin Jia on 10/2/17. // // #import "SCCaptureConfigurationAnnouncer.h" #import "SCCaptureConfigurationAnnouncer_Private.h" #import "SCCaptureConfigurator.h" #import #import @interface SCCaptureConfigurationAnnouncer () { NSHashTable> *_listeners; SCQueuePerformer *_performer; __weak SCCaptureConfigurator *_configurator; } @end @implementation SCCaptureConfigurationAnnouncer - (instancetype)initWithPerformer:(SCQueuePerformer *)performer configurator:(SCCaptureConfigurator *)configurator { self = [super init]; if (self) { _listeners = [NSHashTable> hashTableWithOptions:NSHashTableWeakMemory]; SCAssert(performer, @"performer should not be nil"); _performer = performer; _configurator = configurator; } return self; } - (void)addListener:(id)listener { [_performer perform:^{ SCAssert(listener, @"listener should not be nil"); [_listeners addObject:listener]; [listener captureConfigurationDidChangeTo:_configurator.currentConfiguration]; }]; } - (void)removeListener:(id)listener { [_performer perform:^{ SCAssert(listener, @"listener should not be nil"); [_listeners removeObject:listener]; }]; } - (void)deliverConfigurationChange:(id)configuration { SCAssertPerformer(_performer); for (id listener in _listeners) { [listener captureConfigurationDidChangeTo:configuration]; } } - (void)dealloc { [_listeners removeAllObjects]; } @end ================================================ FILE: ManagedCapturer/CapturerV2/Configuration/SCCaptureConfigurationAnnouncer_Private.h ================================================ // // SCCaptureConfigurationAnnouncer_Private.h // Snapchat // // Created by Lin Jia on 10/2/17. // // #import "SCCaptureConfigurationAnnouncer.h" #import "SCManagedCapturerState.h" #import @class SCCaptureConfigurator; /* This private header is only going to be used by SCCaptureConfigurator. Other customers should only use the public header. */ @interface SCCaptureConfigurationAnnouncer () /* The announcer is going to be instantiated by SCCaptureConfigurator. It will take in a queue performer. The design is that announcer and configurator is going to share the same serial queue to avoid racing. This is something we could change later. */ - (instancetype)initWithPerformer:(SCQueuePerformer *)performer configurator:(SCCaptureConfigurator *)configurator; /* The API below is called by configurator to notify listener that configuration has changed. */ - (void)deliverConfigurationChange:(id)configuration; @end ================================================ FILE: ManagedCapturer/CapturerV2/Configuration/SCCaptureConfigurationListener.h ================================================ // // SCCaptureConfigurationListener.h // Snapchat // // Created by Lin Jia on 10/2/17. // #import "SCManagedCapturerState.h" #import @class SCCaptureConfiguration; /* As a listener to configuration of camera core, you will get an update whenever the configuration changes, and you will receive an immutable state object for the current truth. */ @protocol SCCaptureConfigurationListener - (void)captureConfigurationDidChangeTo:(id)state; @end ================================================ FILE: ManagedCapturer/CapturerV2/Configuration/SCCaptureConfiguration_Private.h ================================================ // // SCCaptureConfiguration_Private.h // Snapchat // // Created by Lin Jia on 10/3/17. // // #import "SCCaptureConfiguration_Private.h" typedef NSNumber SCCaptureConfigurationDirtyKey; /* The key values to identify dirty keys in SCCaptureConfiguration. Dirty key is defined as the key customer changes. e.g. if customer toggle device position. Dirty keys will have SCCaptureConfigurationKeyDevicePosition. It is not complete, and it is only a draft now. It will be gradually tuned while we work on the APIs. */ typedef NS_ENUM(NSUInteger, SCCaptureConfigurationKey) { SCCaptureConfigurationKeyIsRunning, SCCaptureConfigurationKeyIsNightModeActive, SCCaptureConfigurationKeyLowLightCondition, SCCaptureConfigurationKeyDevicePosition, SCCaptureConfigurationKeyZoomFactor, SCCaptureConfigurationKeyFlashActive, SCCaptureConfigurationKeyTorchActive, SCCaptureConfigurationKeyARSessionActive, SCCaptureConfigurationKeyLensesActive, SCCaptureConfigurationKeyVideoRecording, }; @interface SCCaptureConfiguration (internalMethods) // Return dirtyKeys, which identify the parameters customer want to set. - (NSArray *)dirtyKeys; // Called by SCCaptureConfigurator to seal a configuration, so future changes are ignored. - (void)seal; - (BOOL)_configurationSealed; @end ================================================ FILE: ManagedCapturer/CapturerV2/Configuration/SCCaptureConfigurator.h ================================================ // // SCCaptureConfigurator.h // Snapchat // // Created by Lin Jia on 10/2/17. // // #import "SCCaptureConfiguration.h" #import "SCCaptureConfigurationAnnouncer.h" #import "SCManagedCaptureDevice.h" #import "SCVideoCaptureSessionInfo.h" #import #import #import /* SCCaptureConfigurator is the class you use to config the setting of the camera hardware. Such as setting the camera to be front or back, setting camera hardware to be certain resolution, or to activate night mode. You can use this class for many things: a) do 1 time poking to checkout the current camera configuration via the currentConfiguration. Note that we represent configuration via id. It is going to be an immutable object. b) register to be the listener of the configuration change via the announcer. Every time a camera configuration change, you will receive an update. c) set the configuration via commitConfiguration API. You convey your setting intention via SCCaptureConfiguration. You can register a completionHandler to be called after your configuration gets done. Inside the completionHandler, we will pass you an error if it happens, and there will be a boolean cameraChanged. If your configuration already equals the current configuration of the camera, we will not change the camera, the boolean will be true. d) All APIs are thread safe. */ typedef void (^SCCaptureConfigurationCompletionHandler)(NSError *error, BOOL cameraChanged); @interface SCCaptureConfigurator : NSObject @property (nonatomic, strong, readonly) SCCaptureConfigurationAnnouncer *announcer; @property (nonatomic, strong, readonly) id currentConfiguration; - (instancetype)init NS_UNAVAILABLE; - (instancetype)initWithPerformer:(SCQueuePerformer *)performer; - (void)commitConfiguration:(SCCaptureConfiguration *)configuration completionHandler:(SCCaptureConfigurationCompletionHandler)completionHandler; @end ================================================ FILE: ManagedCapturer/CapturerV2/Configuration/SCCaptureConfigurator.m ================================================ // // SCCaptureConfiguration.m // Snapchat // // Created by Lin Jia on 10/2/17. // // #import "SCCaptureConfigurator.h" #import "SCCaptureConfigurationAnnouncer_Private.h" #import "SCCaptureConfiguration_Private.h" #import @interface SCCaptureConfigurator () { SCQueuePerformer *_performer; } @end @implementation SCCaptureConfigurator - (instancetype)initWithPerformer:(SCQueuePerformer *)performer { self = [super init]; if (self) { _announcer = [[SCCaptureConfigurationAnnouncer alloc] initWithPerformer:performer configurator:self]; _performer = performer; // TODO: initialize _currentConfiguration } return self; } - (void)commitConfiguration:(SCCaptureConfiguration *)configuration completionHandler:(SCCaptureConfigurationCompletionHandler)completionHandler { [configuration seal]; [_performer perform:^() { SCAssert(configuration, @"Configuration must be a valid input parameter"); NSArray *dirtyKeys = [configuration dirtyKeys]; for (SCCaptureConfigurationDirtyKey *key in dirtyKeys) { [self _processKey:[key integerValue] configuration:configuration]; } if (completionHandler) { // TODO: passing in right parameters. completionHandler(NULL, YES); } }]; } - (void)_processKey:(SCCaptureConfigurationKey)key configuration:(SCCaptureConfiguration *)configuration { // Tune the hardware depending on what key is dirty, and what is the value is inside configuration. } @end ================================================ FILE: ManagedCapturer/CapturerV2/Core/SCCaptureCore.h ================================================ // // SCCaptureCore.h // Snapchat // // Created by Lin Jia on 10/2/17. // // #import "SCCaptureStateMachineContext.h" #import "SCCapturer.h" #import #import @class SCCaptureConfigurator; /* SCCaptureCore abstracts away the hardware aspect of a camera. SCCaptureCore is the V2 version of the SCManagedCapturerV1. SCCaptureCore itself does very little things actually. Its main job is to expose APIs of camera hardware to outside customers. The actual heavy lifting is done via delegating the jobs to multiple worker classes. We generally categorize the operation of camera hardware into 2 categories: 1) make camera hardware do state transition. Such as what is shown in this graph: https://docs.google.com/presentation/d/1KWk-XSgO0wFAjBZXsl_OnHBGpi_pd9-ds6Wje8vX-0s/edit#slide=id.g2017e46295_1_10 2) config camera hardware setting, such as setting the camera to be front or back, such as setting camera hardware to be certain resolution, or to activate night mode. Indeed, we create 2 working classes to do the heavy lifting. Both of them are under construction. Feel free to checkout SCCaptureConfigurator, which is responsible for 2). */ @interface SCCaptureCore : NSObject @property (nonatomic, strong, readonly) SCCaptureStateMachineContext *stateMachine; @end ================================================ FILE: ManagedCapturer/CapturerV2/Core/SCCaptureCore.m ================================================ // // SCCaptureCore.m // Snapchat // // Created by Lin Jia on 10/2/17. // // #import "SCCaptureCore.h" #import "SCCaptureDeviceAuthorizationChecker.h" #import "SCCaptureResource.h" #import "SCCaptureWorker.h" #import "SCManagedCapturePreviewLayerController.h" #import "SCManagedCapturerGLViewManagerAPI.h" #import "SCManagedCapturerLSAComponentTrackerAPI.h" #import "SCManagedCapturerV1_Private.h" #import #import static const char *kSCCaptureDeviceAuthorizationManagerQueueLabel = "com.snapchat.capture_device_authorization_checker_queue"; @implementation SCCaptureCore { SCManagedCapturerV1 *_managedCapturerV1; SCQueuePerformer *_queuePerformer; SCCaptureDeviceAuthorizationChecker *_authorizationChecker; } @synthesize blackCameraDetector = _blackCameraDetector; - (instancetype)init { SCTraceStart(); SCAssertMainThread(); self = [super init]; if (self) { _managedCapturerV1 = [SCManagedCapturerV1 sharedInstance]; SCCaptureResource *resource = _managedCapturerV1.captureResource; _queuePerformer = resource.queuePerformer; _stateMachine = [[SCCaptureStateMachineContext alloc] initWithResource:resource]; SCQueuePerformer *authorizationCheckPerformer = [[SCQueuePerformer alloc] initWithLabel:kSCCaptureDeviceAuthorizationManagerQueueLabel qualityOfService:QOS_CLASS_USER_INTERACTIVE queueType:DISPATCH_QUEUE_SERIAL context:SCQueuePerformerContextCamera]; _authorizationChecker = [[SCCaptureDeviceAuthorizationChecker alloc] initWithPerformer:authorizationCheckPerformer]; } return self; } - (id)lensProcessingCore { return _managedCapturerV1.lensProcessingCore; } // For APIs inside protocol SCCapture, if they are related to capture state machine, we delegate to state machine. - (void)setupWithDevicePositionAsynchronously:(SCManagedCaptureDevicePosition)devicePosition completionHandler:(dispatch_block_t)completionHandler context:(NSString *)context { [_stateMachine initializeCaptureWithDevicePositionAsynchronously:devicePosition completionHandler:completionHandler context:context]; } - (SCCapturerToken *)startRunningAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler context:(NSString *)context { return [_stateMachine startRunningWithContext:context completionHandler:completionHandler]; } #pragma mark - Recording / Capture - (void)captureStillImageAsynchronouslyWithAspectRatio:(CGFloat)aspectRatio captureSessionID:(NSString *)captureSessionID completionHandler: (sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler context:(NSString *)context { [_stateMachine captureStillImageAsynchronouslyWithAspectRatio:aspectRatio captureSessionID:captureSessionID completionHandler:completionHandler context:context]; } - (void)stopRunningAsynchronously:(SCCapturerToken *)token completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler context:(NSString *)context { [_stateMachine stopRunningWithCapturerToken:token completionHandler:completionHandler context:context]; } - (void)stopRunningAsynchronously:(SCCapturerToken *)token completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler after:(NSTimeInterval)delay context:(NSString *)context { [_stateMachine stopRunningWithCapturerToken:token after:delay completionHandler:completionHandler context:context]; } #pragma mark - Scanning - (void)startScanAsynchronouslyWithScanConfiguration:(SCScanConfiguration *)configuration context:(NSString *)context { [_stateMachine startScanAsynchronouslyWithScanConfiguration:configuration context:context]; } - (void)stopScanAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler context:(NSString *)context { [_stateMachine stopScanAsynchronouslyWithCompletionHandler:completionHandler context:context]; } - (void)prepareForRecordingAsynchronouslyWithContext:(NSString *)context audioConfiguration:(SCAudioConfiguration *)configuration { [_stateMachine prepareForRecordingAsynchronouslyWithAudioConfiguration:configuration context:context]; } - (void)startRecordingAsynchronouslyWithOutputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings audioConfiguration:(SCAudioConfiguration *)configuration maxDuration:(NSTimeInterval)maxDuration fileURL:(NSURL *)fileURL captureSessionID:(NSString *)captureSessionID completionHandler: (sc_managed_capturer_start_recording_completion_handler_t)completionHandler context:(NSString *)context { [_stateMachine startRecordingWithOutputSettings:outputSettings audioConfiguration:configuration maxDuration:maxDuration fileURL:fileURL captureSessionID:captureSessionID completionHandler:completionHandler context:context]; } - (void)stopRecordingAsynchronouslyWithContext:(NSString *)context { [_stateMachine stopRecordingWithContext:context]; } - (void)cancelRecordingAsynchronouslyWithContext:(NSString *)context { [_stateMachine cancelRecordingWithContext:context]; [[self snapCreationTriggers] markSnapCreationEndWithContext:context]; } #pragma mark - - (void)startStreamingAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler context:(NSString *)context { [_managedCapturerV1 startStreamingAsynchronouslyWithCompletionHandler:completionHandler context:context]; } - (void)addSampleBufferDisplayController:(id)sampleBufferDisplayController context:(NSString *)context { [_managedCapturerV1 addSampleBufferDisplayController:sampleBufferDisplayController context:context]; } #pragma mark - Utilities - (void)convertViewCoordinates:(CGPoint)viewCoordinates completionHandler:(sc_managed_capturer_convert_view_coordniates_completion_handler_t)completionHandler context:(NSString *)context { [_managedCapturerV1 convertViewCoordinates:viewCoordinates completionHandler:completionHandler context:context]; } - (void)detectLensCategoryOnNextFrame:(CGPoint)point lenses:(NSArray *)lenses completion:(sc_managed_lenses_processor_category_point_completion_handler_t)completion context:(NSString *)context { [_managedCapturerV1 detectLensCategoryOnNextFrame:point lenses:lenses completion:completion context:context]; } #pragma mark - Configurations - (void)setDevicePositionAsynchronously:(SCManagedCaptureDevicePosition)devicePosition completionHandler:(dispatch_block_t)completionHandler context:(NSString *)context { [_managedCapturerV1 setDevicePositionAsynchronously:devicePosition completionHandler:completionHandler context:context]; } - (void)setFlashActive:(BOOL)flashActive completionHandler:(dispatch_block_t)completionHandler context:(NSString *)context { [_managedCapturerV1 setFlashActive:flashActive completionHandler:completionHandler context:context]; } - (void)setLensesActive:(BOOL)lensesActive completionHandler:(dispatch_block_t)completionHandler context:(NSString *)context { [_managedCapturerV1 setLensesActive:lensesActive completionHandler:completionHandler context:context]; } - (void)setLensesActive:(BOOL)lensesActive filterFactory:(SCLookseryFilterFactory *)filterFactory completionHandler:(dispatch_block_t)completionHandler context:(NSString *)context { [_managedCapturerV1 setLensesActive:lensesActive filterFactory:filterFactory completionHandler:completionHandler context:context]; } - (void)setLensesInTalkActive:(BOOL)lensesActive completionHandler:(dispatch_block_t)completionHandler context:(NSString *)context { [_managedCapturerV1 setLensesInTalkActive:lensesActive completionHandler:completionHandler context:context]; } - (void)setTorchActiveAsynchronously:(BOOL)torchActive completionHandler:(dispatch_block_t)completionHandler context:(NSString *)context { [_managedCapturerV1 setTorchActiveAsynchronously:torchActive completionHandler:completionHandler context:context]; } - (void)setNightModeActiveAsynchronously:(BOOL)active completionHandler:(dispatch_block_t)completionHandler context:(NSString *)context { [_managedCapturerV1 setNightModeActiveAsynchronously:active completionHandler:completionHandler context:context]; } - (void)lockZoomWithContext:(NSString *)context { [_managedCapturerV1 lockZoomWithContext:context]; } - (void)unlockZoomWithContext:(NSString *)context { [_managedCapturerV1 unlockZoomWithContext:context]; } - (void)setZoomFactorAsynchronously:(CGFloat)zoomFactor context:(NSString *)context { [_managedCapturerV1 setZoomFactorAsynchronously:zoomFactor context:context]; } - (void)resetZoomFactorAsynchronously:(CGFloat)zoomFactor devicePosition:(SCManagedCaptureDevicePosition)devicePosition context:(NSString *)context { [_managedCapturerV1 resetZoomFactorAsynchronously:zoomFactor devicePosition:devicePosition context:context]; } - (void)setExposurePointOfInterestAsynchronously:(CGPoint)pointOfInterest fromUser:(BOOL)fromUser completionHandler:(dispatch_block_t)completionHandler context:(NSString *)context { [_managedCapturerV1 setExposurePointOfInterestAsynchronously:pointOfInterest fromUser:fromUser completionHandler:completionHandler context:context]; } - (void)setAutofocusPointOfInterestAsynchronously:(CGPoint)pointOfInterest completionHandler:(dispatch_block_t)completionHandler context:(NSString *)context { [_managedCapturerV1 setAutofocusPointOfInterestAsynchronously:pointOfInterest completionHandler:completionHandler context:context]; } - (void)setPortraitModePointOfInterestAsynchronously:(CGPoint)pointOfInterest completionHandler:(dispatch_block_t)completionHandler context:(NSString *)context { [_managedCapturerV1 setPortraitModePointOfInterestAsynchronously:pointOfInterest completionHandler:completionHandler context:context]; } - (void)continuousAutofocusAndExposureAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler context:(NSString *)context { [_managedCapturerV1 continuousAutofocusAndExposureAsynchronouslyWithCompletionHandler:completionHandler context:context]; } // I need to call these three methods from SCAppDelegate explicitly so that I get the latest information. - (void)applicationDidEnterBackground { [_managedCapturerV1 applicationDidEnterBackground]; } - (void)applicationWillEnterForeground { [_managedCapturerV1 applicationWillEnterForeground]; } - (void)applicationDidBecomeActive { [_managedCapturerV1 applicationDidBecomeActive]; } - (void)applicationWillResignActive { [_managedCapturerV1 applicationWillResignActive]; } - (void)mediaServicesWereReset { [_managedCapturerV1 mediaServicesWereReset]; } - (void)mediaServicesWereLost { [_managedCapturerV1 mediaServicesWereLost]; } #pragma mark - Add / Remove Listener - (void)addListener:(id)listener { [_managedCapturerV1 addListener:listener]; } - (void)removeListener:(id)listener { [_managedCapturerV1 removeListener:listener]; } - (void)addVideoDataSourceListener:(id)listener { [_managedCapturerV1 addVideoDataSourceListener:listener]; } - (void)removeVideoDataSourceListener:(id)listener { [_managedCapturerV1 removeVideoDataSourceListener:listener]; } - (void)addDeviceCapacityAnalyzerListener:(id)listener { [_managedCapturerV1 addDeviceCapacityAnalyzerListener:listener]; } - (void)removeDeviceCapacityAnalyzerListener:(id)listener { [_managedCapturerV1 removeDeviceCapacityAnalyzerListener:listener]; } - (NSString *)debugInfo { return [_managedCapturerV1 debugInfo]; } - (id)currentVideoDataSource { return [_managedCapturerV1 currentVideoDataSource]; } // For APIs inside protocol SCCapture, if they are not related to capture state machine, we directly delegate to V1. - (void)checkRestrictedCamera:(void (^)(BOOL, BOOL, AVAuthorizationStatus))callback { [_managedCapturerV1 checkRestrictedCamera:callback]; } - (void)recreateAVCaptureSession { [_managedCapturerV1 recreateAVCaptureSession]; } #pragma mark - - (CMTime)firstWrittenAudioBufferDelay { return [SCCaptureWorker firstWrittenAudioBufferDelay:_managedCapturerV1.captureResource]; } - (BOOL)audioQueueStarted { return [SCCaptureWorker audioQueueStarted:_managedCapturerV1.captureResource]; } - (BOOL)isLensApplied { return [SCCaptureWorker isLensApplied:_managedCapturerV1.captureResource]; } - (BOOL)isVideoMirrored { return [SCCaptureWorker isVideoMirrored:_managedCapturerV1.captureResource]; } - (SCVideoCaptureSessionInfo)activeSession { return _managedCapturerV1.activeSession; } - (void)setBlackCameraDetector:(SCBlackCameraDetector *)blackCameraDetector deviceMotionProvider:(id)deviceMotionProvider fileInputDecider:(id)fileInputDecider arImageCaptureProvider:(id)arImageCaptureProvider glviewManager:(id)glViewManager lensAPIProvider:(id)lensAPIProvider lsaComponentTracker:(id)lsaComponentTracker managedCapturerPreviewLayerControllerDelegate: (id)previewLayerControllerDelegate { _managedCapturerV1.captureResource.blackCameraDetector = blackCameraDetector; _managedCapturerV1.captureResource.deviceMotionProvider = deviceMotionProvider; _managedCapturerV1.captureResource.fileInputDecider = fileInputDecider; _managedCapturerV1.captureResource.arImageCaptureProvider = arImageCaptureProvider; _managedCapturerV1.captureResource.videoPreviewGLViewManager = glViewManager; [_managedCapturerV1.captureResource.videoPreviewGLViewManager configureWithCaptureResource:_managedCapturerV1.captureResource]; _managedCapturerV1.captureResource.lensAPIProvider = lensAPIProvider; _managedCapturerV1.captureResource.lsaTrackingComponentHandler = lsaComponentTracker; [_managedCapturerV1.captureResource.lsaTrackingComponentHandler configureWithCaptureResource:_managedCapturerV1.captureResource]; _managedCapturerV1.captureResource.previewLayerControllerDelegate = previewLayerControllerDelegate; [SCManagedCapturePreviewLayerController sharedInstance].delegate = _managedCapturerV1.captureResource.previewLayerControllerDelegate; } - (SCBlackCameraDetector *)blackCameraDetector { return _managedCapturerV1.captureResource.blackCameraDetector; } - (void)captureSingleVideoFrameAsynchronouslyWithCompletionHandler: (sc_managed_capturer_capture_video_frame_completion_handler_t)completionHandler context:(NSString *)context { [_managedCapturerV1 captureSingleVideoFrameAsynchronouslyWithCompletionHandler:completionHandler context:context]; } - (void)sampleFrameWithCompletionHandler:(void (^)(UIImage *frame, CMTime presentationTime))completionHandler context:(NSString *)context { [_managedCapturerV1 sampleFrameWithCompletionHandler:completionHandler context:context]; } - (void)addTimedTask:(SCTimedTask *)task context:(NSString *)context { [_managedCapturerV1 addTimedTask:task context:context]; } - (void)clearTimedTasksWithContext:(NSString *)context { [_managedCapturerV1 clearTimedTasksWithContext:context]; } - (BOOL)authorizedForVideoCapture { return [_authorizationChecker authorizedForVideoCapture]; } - (void)preloadVideoCaptureAuthorization { [_authorizationChecker preloadVideoCaptureAuthorization]; } #pragma mark - Snap Creation triggers - (SCSnapCreationTriggers *)snapCreationTriggers { return [_managedCapturerV1 snapCreationTriggers]; } @end ================================================ FILE: ManagedCapturer/ImageProcessing/SCDepthBlurMetalModule.metal ================================================ // // SCDepthBlurMetalModule.metal // Snapchat // // Created by Brian Ng on 10/31/17. // #include using namespace metal; struct DepthBlurRenderData { float depthRange; float depthOffset; float depthBlurForegroundThreshold; float depthBlurBackgroundThreshold; }; kernel void kernel_depth_blur(texture2d sourceYTexture [[texture(0)]], texture2d sourceUVTexture [[texture(1)]], texture2d sourceDepthTexture[[texture(2)]], texture2d sourceBlurredYTexture [[texture(3)]], texture2d destinationYTexture [[texture(4)]], texture2d destinationUVTexture [[texture(5)]], constant DepthBlurRenderData &renderData [[buffer(0)]], uint2 gid [[thread_position_in_grid]], uint2 size [[threads_per_grid]]) { float2 valueUV = sourceUVTexture.read(gid).rg; float depthValue = sourceDepthTexture.read(uint2(gid.x/4, gid.y/4)).r; float normalizedDepthValue = (depthValue - renderData.depthOffset) / renderData.depthRange; float valueYUnblurred = sourceYTexture.read(gid).r; float valueYBlurred = sourceBlurredYTexture.read(gid).r; float valueY = 0; if (normalizedDepthValue > renderData.depthBlurForegroundThreshold) { valueY = valueYUnblurred; } else if (normalizedDepthValue < renderData.depthBlurBackgroundThreshold) { valueY = valueYBlurred; } else { float blendRange = renderData.depthBlurForegroundThreshold - renderData.depthBlurBackgroundThreshold; float normalizedBlendDepthValue = (normalizedDepthValue - renderData.depthBlurBackgroundThreshold) / blendRange; valueY = valueYUnblurred * normalizedBlendDepthValue + valueYBlurred * (1 - normalizedBlendDepthValue); } destinationYTexture.write(valueY, gid); destinationUVTexture.write(float4(valueUV.r, valueUV.g, 0, 0), gid); } ================================================ FILE: ManagedCapturer/ImageProcessing/SCDepthBlurMetalRenderCommand.h ================================================ // // SCDepthBlurMetalRenderCommand.h // Snapchat // // Created by Brian Ng on 11/8/17. // // #import "SCMetalModule.h" #import /* @class SCDepthBlurMetalRenderCommand Prepares the command buffer for the SCDepthBlurMetalModule.metal shader. */ @interface SCDepthBlurMetalRenderCommand : NSObject @property (nonatomic, readonly) NSString *functionName; @end ================================================ FILE: ManagedCapturer/ImageProcessing/SCDepthBlurMetalRenderCommand.m ================================================ // // SCDepthBlurMetalRenderCommand.m // Snapchat // // Created by Brian Ng on 11/8/17. // // #import "SCDepthBlurMetalRenderCommand.h" #import "SCCameraTweaks.h" #import "SCMetalUtils.h" #import @import MetalPerformanceShaders; @implementation SCDepthBlurMetalRenderCommand typedef struct DepthBlurRenderData { float depthRange; float depthOffset; float depthBlurForegroundThreshold; float depthBlurBackgroundThreshold; } DepthBlurRenderData; #pragma mark - SCMetalRenderCommand - (id)encodeMetalCommand:(id)commandBuffer pipelineState:(id)pipelineState textureResource:(SCMetalTextureResource *)textureResource { #if !TARGET_IPHONE_SIMULATOR CGFloat depthBlurForegroundThreshold = textureResource.depthBlurForegroundThreshold; CGFloat depthBlurBackgroundThreshold = textureResource.depthBlurForegroundThreshold > SCCameraTweaksDepthBlurBackgroundThreshold() ? SCCameraTweaksDepthBlurBackgroundThreshold() : 0; DepthBlurRenderData depthBlurRenderData = { .depthRange = textureResource.depthRange, .depthOffset = textureResource.depthOffset, .depthBlurBackgroundThreshold = depthBlurBackgroundThreshold, .depthBlurForegroundThreshold = depthBlurForegroundThreshold, }; id depthBlurRenderDataBuffer = [textureResource.device newBufferWithLength:sizeof(DepthBlurRenderData) options:MTLResourceOptionCPUCacheModeDefault]; memcpy(depthBlurRenderDataBuffer.contents, &depthBlurRenderData, sizeof(DepthBlurRenderData)); MPSImageGaussianBlur *kernel = [[MPSImageGaussianBlur alloc] initWithDevice:textureResource.device sigma:SCCameraTweaksBlurSigma()]; [kernel encodeToCommandBuffer:commandBuffer sourceTexture:textureResource.sourceYTexture destinationTexture:textureResource.sourceBlurredYTexture]; id commandEncoder = [commandBuffer computeCommandEncoder]; [commandEncoder setComputePipelineState:pipelineState]; [commandEncoder setTexture:textureResource.sourceYTexture atIndex:0]; [commandEncoder setTexture:textureResource.sourceUVTexture atIndex:1]; [commandEncoder setTexture:textureResource.sourceDepthTexture atIndex:2]; [commandEncoder setTexture:textureResource.sourceBlurredYTexture atIndex:3]; [commandEncoder setTexture:textureResource.destinationYTexture atIndex:4]; [commandEncoder setTexture:textureResource.destinationUVTexture atIndex:5]; [commandEncoder setBuffer:depthBlurRenderDataBuffer offset:0 atIndex:0]; return commandEncoder; #else return nil; #endif } - (BOOL)requiresDepthData { return YES; } #pragma mark - SCMetalModuleFunctionProvider - (NSString *)functionName { return @"kernel_depth_blur"; } - (NSString *)description { return [NSString sc_stringWithFormat:@"SCDepthBlurMetalRenderCommand (shader function = %@)", self.functionName]; } @end ================================================ FILE: ManagedCapturer/ImageProcessing/SCDepthToGrayscaleMetalModule.metal ================================================ // // SCDepthToGrayscaleMetalModule.metal // Snapchat // // Created by Brian Ng on 12/7/17. // #include using namespace metal; typedef struct DepthToGrayscaleRenderData { float depthRange; float depthOffset; } DepthToGrayscaleRenderData; kernel void kernel_depth_to_grayscale(texture2d sourceDepthTexture[[texture(0)]], texture2d destinationYTexture [[texture(1)]], texture2d destinationUVTexture [[texture(2)]], constant DepthToGrayscaleRenderData &renderData [[buffer(0)]], uint2 gid [[thread_position_in_grid]], uint2 size [[threads_per_grid]]) { float depthValue = sourceDepthTexture.read(uint2(gid.x/4, gid.y/4)).r; float normalizedDepthValue = (depthValue - renderData.depthOffset) / renderData.depthRange; destinationYTexture.write(normalizedDepthValue, gid); destinationUVTexture.write(float4(0.5, 0.5, 0, 0), gid); } ================================================ FILE: ManagedCapturer/ImageProcessing/SCDepthToGrayscaleMetalRenderCommand.h ================================================ // // SCDepthToGrayscaleMetalRenderCommand.h // Snapchat // // Created by Brian Ng on 12/7/17. // // #import "SCMetalModule.h" #import /* @class SCDepthToGrayscaleMetalRenderCommand Prepares the command buffer for the SCDepthToGrayscaleMetalModule.metal shader. */ @interface SCDepthToGrayscaleMetalRenderCommand : NSObject @property (nonatomic, readonly) NSString *functionName; @end ================================================ FILE: ManagedCapturer/ImageProcessing/SCDepthToGrayscaleMetalRenderCommand.m ================================================ // // SCDepthToGrayscaleMetalRenderCommand.m // Snapchat // // Created by Brian Ng on 12/7/17. // // #import "SCDepthToGrayscaleMetalRenderCommand.h" #import "SCCameraTweaks.h" #import "SCMetalUtils.h" #import @import MetalPerformanceShaders; @implementation SCDepthToGrayscaleMetalRenderCommand typedef struct DepthToGrayscaleRenderData { float depthRange; float depthOffset; } DepthToGrayscaleRenderData; #pragma mark - SCMetalRenderCommand - (id)encodeMetalCommand:(id)commandBuffer pipelineState:(id)pipelineState textureResource:(SCMetalTextureResource *)textureResource { #if !TARGET_IPHONE_SIMULATOR DepthToGrayscaleRenderData depthToGrayscaleRenderData = { .depthRange = textureResource.depthRange, .depthOffset = textureResource.depthOffset, }; id depthToGrayscaleDataBuffer = [textureResource.device newBufferWithLength:sizeof(DepthToGrayscaleRenderData) options:MTLResourceOptionCPUCacheModeDefault]; memcpy(depthToGrayscaleDataBuffer.contents, &depthToGrayscaleRenderData, sizeof(DepthToGrayscaleRenderData)); id commandEncoder = [commandBuffer computeCommandEncoder]; [commandEncoder setComputePipelineState:pipelineState]; [commandEncoder setTexture:textureResource.sourceDepthTexture atIndex:0]; [commandEncoder setTexture:textureResource.destinationYTexture atIndex:1]; [commandEncoder setTexture:textureResource.destinationUVTexture atIndex:2]; [commandEncoder setBuffer:depthToGrayscaleDataBuffer offset:0 atIndex:0]; return commandEncoder; #else return nil; #endif } - (BOOL)requiresDepthData { return YES; } #pragma mark - SCMetalModuleFunctionProvider - (NSString *)functionName { return @"kernel_depth_to_grayscale"; } - (NSString *)description { return [NSString sc_stringWithFormat:@"SCDepthToGrayscaleMetalRenderCommand (shader function = %@)", self.functionName]; } @end ================================================ FILE: ManagedCapturer/ImageProcessing/SCDigitalExposureHandler.h ================================================ // // SCDigitalExposureHandler.h // Snapchat // // Created by Yu-Kuan (Anthony) Lai on 6/15/17. // Copyright © 2017 Snapchat, Inc. All rights reserved. // #import #import @class SCExposureAdjustProcessingModule; /* @class SCDigitalExposureHandler The SCDigitalExposureHandler will be built by the SCProcessingBuilder when the user indicates that he/she wants to add SCExposureAdjustProcessingModule to the processing pipeline. The builder will take care of initializing the handler by linking the processing module. Caller of the builder can then link up the handler to the UI element (in this case, SCExposureSlider) so that user's control is hooked up to the processing module. */ @interface SCDigitalExposureHandler : NSObject - (instancetype)initWithProcessingModule:(SCExposureAdjustProcessingModule *)processingModule; - (void)setExposureParameter:(CGFloat)value; @end ================================================ FILE: ManagedCapturer/ImageProcessing/SCDigitalExposureHandler.m ================================================ // // SCDigitalExposureHandler.m // Snapchat // // Created by Yu-Kuan (Anthony) Lai on 6/15/17. // Copyright © 2017 Snapchat, Inc. All rights reserved. // #import "SCDigitalExposureHandler.h" #import "SCExposureAdjustProcessingModule.h" @implementation SCDigitalExposureHandler { __weak SCExposureAdjustProcessingModule *_processingModule; } - (instancetype)initWithProcessingModule:(SCExposureAdjustProcessingModule *)processingModule { if (self = [super init]) { _processingModule = processingModule; } return self; } - (void)setExposureParameter:(CGFloat)value { [_processingModule setEVValue:value]; } @end ================================================ FILE: ManagedCapturer/ImageProcessing/SCExposureAdjustMetalModule.metal ================================================ // // SCExposureAdjustMetalModule.metal // Snapchat // // Created by Michel Loenngren on 7/11/17. // // #include using namespace metal; kernel void kernel_exposure_adjust(texture2d sourceYTexture [[texture(0)]], texture2d sourceUVTexture [[texture(1)]], texture2d destinationYTexture [[texture(2)]], texture2d destinationUVTexture [[texture(3)]], uint2 gid [[thread_position_in_grid]], uint2 size [[threads_per_grid]]) { float valueY = sourceYTexture.read(gid).r; float2 valueUV = sourceUVTexture.read(gid).rg; float factor = 1.0 / pow(1.0 + valueY, 5) + 1.0; valueY *= factor; destinationYTexture.write(valueY, gid); destinationUVTexture.write(float4(valueUV.r, valueUV.g, 0, 0), gid); } kernel void kernel_exposure_adjust_nightvision(texture2d sourceYTexture [[texture(0)]], texture2d sourceUVTexture [[texture(1)]], texture2d destinationYTexture [[texture(2)]], texture2d destinationUVTexture [[texture(3)]], uint2 gid [[thread_position_in_grid]], uint2 size [[threads_per_grid]]) { float valueY = sourceYTexture.read(gid).r; float u = 0.5 - 0.368; float v = 0.5 - 0.291; destinationYTexture.write(valueY, gid); destinationUVTexture.write(float4(u, v, 0, 0), gid); } kernel void kernel_exposure_adjust_inverted_nightvision(texture2d sourceYTexture [[texture(0)]], texture2d sourceUVTexture [[texture(1)]], texture2d destinationYTexture [[texture(2)]], texture2d destinationUVTexture [[texture(3)]], uint2 gid [[thread_position_in_grid]], uint2 size [[threads_per_grid]]) { float valueY = sourceYTexture.read(gid).r; valueY = 1.0 - valueY; float u = 0.5 - 0.368; float v = 0.5 - 0.291; destinationYTexture.write(valueY, gid); destinationUVTexture.write(float4(u, v, 0, 0), gid); } ================================================ FILE: ManagedCapturer/ImageProcessing/SCExposureAdjustMetalRenderCommand.h ================================================ // // SCExposureAdjustMetalRenderCommand.h // Snapchat // // Created by Michel Loenngren on 7/11/17. // // #import "SCMetalModule.h" #import /* @class SCExposureAdjustProcessingModule Prepares the command buffer for the SCExposureAdjustProcessingModule.metal shader. */ @interface SCExposureAdjustMetalRenderCommand : SCMetalModule @property (nonatomic, readonly) NSString *functionName; @end ================================================ FILE: ManagedCapturer/ImageProcessing/SCExposureAdjustMetalRenderCommand.m ================================================ // // SCExposureAdjustMetalRenderCommand.m // Snapchat // // Created by Michel Loenngren on 7/11/17. // // #import "SCExposureAdjustMetalRenderCommand.h" #import "SCCameraTweaks.h" #import "SCMetalUtils.h" #import @import Metal; @implementation SCExposureAdjustMetalRenderCommand #pragma mark - SCMetalRenderCommand - (id)encodeMetalCommand:(id)commandBuffer pipelineState:(id)pipelineState textureResource:(SCMetalTextureResource *)textureResource { id commandEncoder = [commandBuffer computeCommandEncoder]; [commandEncoder setComputePipelineState:pipelineState]; #if !TARGET_IPHONE_SIMULATOR [commandEncoder setTexture:textureResource.sourceYTexture atIndex:0]; [commandEncoder setTexture:textureResource.sourceUVTexture atIndex:1]; [commandEncoder setTexture:textureResource.destinationYTexture atIndex:2]; [commandEncoder setTexture:textureResource.destinationUVTexture atIndex:3]; #endif return commandEncoder; } #pragma mark - SCMetalModuleFunctionProvider - (NSString *)functionName { if (SCCameraExposureAdjustmentMode() == 1) { return @"kernel_exposure_adjust"; } else if (SCCameraExposureAdjustmentMode() == 2) { return @"kernel_exposure_adjust_nightvision"; } else if (SCCameraExposureAdjustmentMode() == 3) { return @"kernel_exposure_adjust_inverted_nightvision"; } else { SCAssertFail(@"Incorrect value from SCCameraExposureAdjustmentMode() %ld", (long)SCCameraExposureAdjustmentMode()); return nil; } } - (BOOL)requiresDepthData { return NO; } - (NSString *)description { return [NSString sc_stringWithFormat:@"SCExposureAdjustMetalRenderCommand (shader function = %@)", self.functionName]; } @end ================================================ FILE: ManagedCapturer/ImageProcessing/SCExposureAdjustProcessingModule.h ================================================ // // SCExposureAdjustProcessingModule.h // Snapchat // // Created by Yu-Kuan (Anthony) Lai on 6/1/17. // Copyright © 2017 Snapchat, Inc. All rights reserved. // #import "SCProcessingModule.h" #import /** NOTE: If we start chaining multiple CIImage modules we should not run them back to back but instead in one CIImage pass as CoreImage will merge the shaders for best performance */ /* @class SCExposureAdjustProcessingModule This module use the CIExposureAdjust CIFilter to process the frames. It use the value provided by the SCDigitalExposurehandler as evValue (default is 0). */ @interface SCExposureAdjustProcessingModule : NSObject - (void)setEVValue:(CGFloat)value; @end ================================================ FILE: ManagedCapturer/ImageProcessing/SCExposureAdjustProcessingModule.m ================================================ // // SCExposureAdjustProcessingModule.m // Snapchat // // Created by Yu-Kuan (Anthony) Lai on 6/1/17. // Copyright © 2017 Snapchat, Inc. All rights reserved. // #import "SCExposureAdjustProcessingModule.h" #import "SCProcessingModuleUtils.h" @import CoreImage; @import CoreMedia; static const CGFloat kSCExposureAdjustProcessingModuleMaxEVValue = 2.0; @implementation SCExposureAdjustProcessingModule { CIContext *_context; CIFilter *_filter; CFMutableDictionaryRef _attributes; CVPixelBufferPoolRef _bufferPool; } - (instancetype)init { if (self = [super init]) { _context = [CIContext context]; _filter = [CIFilter filterWithName:@"CIExposureAdjust"]; [_filter setValue:@0.0 forKey:@"inputEV"]; } return self; } - (void)setEVValue:(CGFloat)value { CGFloat newEVValue = value * kSCExposureAdjustProcessingModuleMaxEVValue; [_filter setValue:@(newEVValue) forKey:@"inputEV"]; } - (void)dealloc { CVPixelBufferPoolFlush(_bufferPool, kCVPixelBufferPoolFlushExcessBuffers); CVPixelBufferPoolRelease(_bufferPool); } - (BOOL)requiresDepthData { return NO; } - (CMSampleBufferRef)render:(RenderData)renderData { CMSampleBufferRef input = renderData.sampleBuffer; CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(input); CIImage *image = [CIImage imageWithCVPixelBuffer:pixelBuffer]; [_filter setValue:image forKey:kCIInputImageKey]; CIImage *result = [_filter outputImage]; return [SCProcessingModuleUtils sampleBufferFromImage:result oldSampleBuffer:input bufferPool:_bufferPool context:_context]; } @end ================================================ FILE: ManagedCapturer/ImageProcessing/SCMetalModule.h ================================================ // // SCMetalModule.h // Snapchat // // Created by Michel Loenngren on 7/19/17. // // #import "SCMetalTextureResource.h" #import "SCMetalUtils.h" #import "SCProcessingModule.h" #import @protocol SCMetalModuleFunctionProvider @property (nonatomic, readonly) NSString *functionName; @end @protocol SCMetalRenderCommand /** Sets textures and parameters for the shader function. When implementing this function, the command encoder must be computed and the pipeline state set. That is, ensure that there are calls to: [commandBuffer computeCommandEncoder] and [commandEncoder setComputePipelineState:pipelineState]. */ #if !TARGET_IPHONE_SIMULATOR - (id)encodeMetalCommand:(id)commandBuffer pipelineState:(id)pipelineState textureResource:(SCMetalTextureResource *)textureResource; #endif - (BOOL)requiresDepthData; @end /** NOTE: If we start chaining multiple metal modules we should not run them back to back but instead chain different render passes. */ @interface SCMetalModule : NSObject // Designated initializer: SCMetalModule should always have a SCMetalRenderCommand - (instancetype)initWithMetalRenderCommand:(id)metalRenderCommand; @end ================================================ FILE: ManagedCapturer/ImageProcessing/SCMetalModule.m ================================================ // // SCMetalModule.m // Snapchat // // Created by Michel Loenngren on 7/19/17. // // #import "SCMetalModule.h" #import "SCCameraTweaks.h" #import #import @interface SCMetalModule () #if !TARGET_IPHONE_SIMULATOR @property (nonatomic, readonly) id library; @property (nonatomic, readonly) id device; @property (nonatomic, readonly) id function; @property (nonatomic, readonly) id computePipelineState; @property (nonatomic, readonly) id commandQueue; @property (nonatomic, readonly) CVMetalTextureCacheRef textureCache; #endif @end @implementation SCMetalModule { id _metalRenderCommand; } #if !TARGET_IPHONE_SIMULATOR @synthesize library = _library; @synthesize function = _function; @synthesize computePipelineState = _computePipelineState; @synthesize commandQueue = _commandQueue; @synthesize textureCache = _textureCache; #endif - (instancetype)initWithMetalRenderCommand:(id)metalRenderCommand { self = [super init]; if (self) { _metalRenderCommand = metalRenderCommand; } return self; } #pragma mark - SCProcessingModule - (CMSampleBufferRef)render:(RenderData)renderData { CMSampleBufferRef input = renderData.sampleBuffer; #if !TARGET_IPHONE_SIMULATOR id pipelineState = self.computePipelineState; SC_GUARD_ELSE_RETURN_VALUE(pipelineState, input); CVMetalTextureCacheRef textureCache = self.textureCache; SC_GUARD_ELSE_RETURN_VALUE(textureCache, input); id commandQueue = self.commandQueue; SC_GUARD_ELSE_RETURN_VALUE(commandQueue, input); SCMetalTextureResource *textureResource = [[SCMetalTextureResource alloc] initWithRenderData:renderData textureCache:textureCache device:self.device]; id commandBuffer = [commandQueue commandBuffer]; if (!_metalRenderCommand) { SCAssertFail(@"Metal module must be initialized with an SCMetalRenderCommand"); } id commandEncoder = [_metalRenderCommand encodeMetalCommand:commandBuffer pipelineState:pipelineState textureResource:textureResource]; NSUInteger w = pipelineState.threadExecutionWidth; NSUInteger h = pipelineState.maxTotalThreadsPerThreadgroup / w; MTLSize threadsPerThreadgroup = MTLSizeMake(w, h, 1); MTLSize threadgroupsPerGrid = MTLSizeMake((textureResource.sourceYTexture.width + w - 1) / w, (textureResource.sourceYTexture.height + h - 1) / h, 1); [commandEncoder dispatchThreadgroups:threadgroupsPerGrid threadsPerThreadgroup:threadsPerThreadgroup]; [commandEncoder endEncoding]; [commandBuffer commit]; [commandBuffer waitUntilCompleted]; CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(renderData.sampleBuffer); SCMetalCopyTexture(textureResource.destinationYTexture, imageBuffer, 0); SCMetalCopyTexture(textureResource.destinationUVTexture, imageBuffer, 1); #endif return input; } - (BOOL)requiresDepthData { return [_metalRenderCommand requiresDepthData]; } #pragma mark - Lazy properties #if !TARGET_IPHONE_SIMULATOR - (id)library { if (!_library) { NSString *libPath = [[NSBundle mainBundle] pathForResource:@"sccamera-default" ofType:@"metallib"]; NSError *error = nil; _library = [self.device newLibraryWithFile:libPath error:&error]; if (error) { SCLogGeneralError(@"Create metallib error: %@", error.description); } } return _library; } - (id)device { return SCGetManagedCaptureMetalDevice(); } - (id)function { return [self.library newFunctionWithName:[_metalRenderCommand functionName]]; } - (id)computePipelineState { if (!_computePipelineState) { NSError *error = nil; _computePipelineState = [self.device newComputePipelineStateWithFunction:self.function error:&error]; if (error) { SCLogGeneralError(@"Error while creating compute pipeline state %@", error.description); } } return _computePipelineState; } - (id)commandQueue { if (!_commandQueue) { _commandQueue = [self.device newCommandQueue]; } return _commandQueue; } - (CVMetalTextureCacheRef)textureCache { if (!_textureCache) { CVMetalTextureCacheCreate(kCFAllocatorDefault, nil, self.device, nil, &_textureCache); } return _textureCache; } #endif @end ================================================ FILE: ManagedCapturer/ImageProcessing/SCMetalTextureResource.h ================================================ // // SCMetalTextureResource.h // Snapchat // // Created by Brian Ng on 11/7/17. // #import "SCProcessingModule.h" #import "SCCapturerDefines.h" #import #if !TARGET_IPHONE_SIMULATOR #import #endif /* @class SCMetalTextureResource The SCMetalTextureResource is created by SCMetalModule and is passed to a SCMetalRenderCommand. This resource provides a collection of textures for rendering, where a SCMetalRenderCommand selects which textures it needs. Textures are lazily initialiazed to optimize performance. Additionally, information pertaining to depth is provided if normalizing depth is desired: depthRange is the range of possible depth values [depthOffset, depthOffset + depthRange], where depthOffset is the min depth value in the given depth map. NOTE: This class is NOT thread safe -- ensure any calls are made by a performer by calling SCAssertPerformer before actually accessing any textures */ @interface SCMetalTextureResource : NSObject #if !TARGET_IPHONE_SIMULATOR @property (nonatomic, readonly) id sourceYTexture; @property (nonatomic, readonly) id sourceUVTexture; @property (nonatomic, readonly) id destinationYTexture; @property (nonatomic, readonly) id destinationUVTexture; // Textures for SCDepthBlurMetalCommand @property (nonatomic, readonly) id sourceBlurredYTexture; @property (nonatomic, readonly) id sourceDepthTexture; @property (nonatomic, readonly) id device; #endif // Available depth-related auxiliary resources (when depth data is provided) @property (nonatomic, readonly) float depthRange; @property (nonatomic, readonly) float depthOffset; @property (nonatomic, readonly) CGFloat depthBlurForegroundThreshold; @property (nonatomic, readonly) SampleBufferMetadata sampleBufferMetadata; #if !TARGET_IPHONE_SIMULATOR - (instancetype)initWithRenderData:(RenderData)renderData textureCache:(CVMetalTextureCacheRef)textureCache device:(id)device; #endif @end ================================================ FILE: ManagedCapturer/ImageProcessing/SCMetalTextureResource.m ================================================ // // SCMetalTextureResource.m // Snapchat // // Created by Brian Ng on 11/7/17. // #import "SCMetalTextureResource.h" #import "SCCameraSettingUtils.h" #import "SCCameraTweaks.h" #import "SCMetalUtils.h" @import CoreImage; #if !TARGET_IPHONE_SIMULATOR static NSInteger const kSCFocusRectSize = 4; #endif @interface SCMetalTextureResource () #if !TARGET_IPHONE_SIMULATOR @property (nonatomic, readonly) CVMetalTextureCacheRef textureCache; #endif @end @implementation SCMetalTextureResource { RenderData _renderData; CVImageBufferRef _imageBuffer; CIContext *_context; } #if !TARGET_IPHONE_SIMULATOR @synthesize sourceYTexture = _sourceYTexture; @synthesize sourceUVTexture = _sourceUVTexture; @synthesize destinationYTexture = _destinationYTexture; @synthesize destinationUVTexture = _destinationUVTexture; @synthesize sourceBlurredYTexture = _sourceBlurredYTexture; @synthesize sourceDepthTexture = _sourceDepthTexture; @synthesize depthRange = _depthRange; @synthesize depthOffset = _depthOffset; @synthesize depthBlurForegroundThreshold = _depthBlurForegroundThreshold; @synthesize device = _device; @synthesize sampleBufferMetadata = _sampleBufferMetadata; - (instancetype)initWithRenderData:(RenderData)renderData textureCache:(CVMetalTextureCacheRef)textureCache device:(id)device { self = [super init]; if (self) { _imageBuffer = CMSampleBufferGetImageBuffer(renderData.sampleBuffer); _renderData = renderData; _textureCache = textureCache; _device = device; _context = [CIContext contextWithOptions:@{ kCIContextWorkingFormat : @(kCIFormatRGBAh) }]; } return self; } #endif #if !TARGET_IPHONE_SIMULATOR - (id)sourceYTexture { if (!_sourceYTexture) { CVPixelBufferLockBaseAddress(_imageBuffer, kCVPixelBufferLock_ReadOnly); _sourceYTexture = SCMetalTextureFromPixelBuffer(_imageBuffer, 0, MTLPixelFormatR8Unorm, _textureCache); CVPixelBufferUnlockBaseAddress(_imageBuffer, kCVPixelBufferLock_ReadOnly); } return _sourceYTexture; } - (id)sourceUVTexture { if (!_sourceUVTexture) { CVPixelBufferLockBaseAddress(_imageBuffer, kCVPixelBufferLock_ReadOnly); _sourceUVTexture = SCMetalTextureFromPixelBuffer(_imageBuffer, 1, MTLPixelFormatRG8Unorm, _textureCache); CVPixelBufferUnlockBaseAddress(_imageBuffer, kCVPixelBufferLock_ReadOnly); } return _sourceUVTexture; } - (id)destinationYTexture { if (!_destinationYTexture) { MTLTextureDescriptor *textureDescriptor = [MTLTextureDescriptor texture2DDescriptorWithPixelFormat:MTLPixelFormatR8Unorm width:CVPixelBufferGetWidthOfPlane(_imageBuffer, 0) height:CVPixelBufferGetHeightOfPlane(_imageBuffer, 0) mipmapped:NO]; textureDescriptor.usage |= MTLTextureUsageShaderWrite; _destinationYTexture = [_device newTextureWithDescriptor:textureDescriptor]; } return _destinationYTexture; } - (id)destinationUVTexture { if (!_destinationUVTexture) { MTLTextureDescriptor *textureDescriptor = [MTLTextureDescriptor texture2DDescriptorWithPixelFormat:MTLPixelFormatRG8Unorm width:CVPixelBufferGetWidthOfPlane(_imageBuffer, 1) height:CVPixelBufferGetHeightOfPlane(_imageBuffer, 1) mipmapped:NO]; textureDescriptor.usage |= MTLTextureUsageShaderWrite; _destinationUVTexture = [_device newTextureWithDescriptor:textureDescriptor]; } return _destinationUVTexture; } - (id)sourceBlurredYTexture { if (!_sourceBlurredYTexture) { MTLTextureDescriptor *textureDescriptor = [MTLTextureDescriptor texture2DDescriptorWithPixelFormat:MTLPixelFormatR8Unorm width:CVPixelBufferGetWidthOfPlane(_imageBuffer, 0) height:CVPixelBufferGetHeightOfPlane(_imageBuffer, 0) mipmapped:NO]; textureDescriptor.usage |= MTLTextureUsageShaderWrite; _sourceBlurredYTexture = [_device newTextureWithDescriptor:textureDescriptor]; } return _sourceBlurredYTexture; } - (id)sourceDepthTexture { if (!_sourceDepthTexture) { CVPixelBufferLockBaseAddress(_imageBuffer, kCVPixelBufferLock_ReadOnly); _sourceDepthTexture = SCMetalTextureFromPixelBuffer(_renderData.depthDataMap, 0, MTLPixelFormatR16Float, _textureCache); CVPixelBufferUnlockBaseAddress(_imageBuffer, kCVPixelBufferLock_ReadOnly); } return _sourceDepthTexture; } - (float)depthRange { if (_depthRange == 0) { // Get min/max values of depth image to normalize size_t bufferWidth = CVPixelBufferGetWidth(_renderData.depthDataMap); size_t bufferHeight = CVPixelBufferGetHeight(_renderData.depthDataMap); size_t bufferBytesPerRow = CVPixelBufferGetBytesPerRow(_renderData.depthDataMap); CVPixelBufferLockBaseAddress(_renderData.depthDataMap, kCVPixelBufferLock_ReadOnly); unsigned char *pixelBufferPointer = CVPixelBufferGetBaseAddress(_renderData.depthDataMap); __fp16 *bufferPtr = (__fp16 *)pixelBufferPointer; uint32_t ptrInc = (int)bufferBytesPerRow / sizeof(__fp16); float depthMin = MAXFLOAT; float depthMax = -MAXFLOAT; for (int j = 0; j < bufferHeight; j++) { for (int i = 0; i < bufferWidth; i++) { float value = bufferPtr[i]; if (!isnan(value)) { depthMax = MAX(depthMax, value); depthMin = MIN(depthMin, value); } } bufferPtr += ptrInc; } CVPixelBufferUnlockBaseAddress(_renderData.depthDataMap, kCVPixelBufferLock_ReadOnly); _depthRange = depthMax - depthMin; _depthOffset = depthMin; } return _depthRange; } - (float)depthOffset { if (_depthRange == 0) { [self depthRange]; } return _depthOffset; } - (CGFloat)depthBlurForegroundThreshold { if (_renderData.depthBlurPointOfInterest) { CGPoint point = *_renderData.depthBlurPointOfInterest; CIImage *disparityImage = [CIImage imageWithCVPixelBuffer:_renderData.depthDataMap]; CIVector *vector = [CIVector vectorWithX:point.x * CVPixelBufferGetWidth(_renderData.depthDataMap) - kSCFocusRectSize / 2 Y:point.y * CVPixelBufferGetHeight(_renderData.depthDataMap) - kSCFocusRectSize / 2 Z:kSCFocusRectSize W:kSCFocusRectSize]; CIImage *minMaxImage = [[disparityImage imageByClampingToExtent] imageByApplyingFilter:@"CIAreaMinMaxRed" withInputParameters:@{kCIInputExtentKey : vector}]; UInt8 pixel[4] = {0, 0, 0, 0}; [_context render:minMaxImage toBitmap:&pixel rowBytes:4 bounds:CGRectMake(0, 0, 1, 1) format:kCIFormatRGBA8 colorSpace:nil]; CGFloat disparity = pixel[1] / 255.0; CGFloat normalizedDisparity = (disparity - self.depthOffset) / self.depthRange; return normalizedDisparity; } else { return SCCameraTweaksDepthBlurForegroundThreshold(); } } - (SampleBufferMetadata)sampleBufferMetadata { SampleBufferMetadata sampleMetadata = { .isoSpeedRating = 0, .exposureTime = 0.033, .brightness = 0, }; retrieveSampleBufferMetadata(_renderData.sampleBuffer, &sampleMetadata); return sampleMetadata; } #endif @end ================================================ FILE: ManagedCapturer/ImageProcessing/SCNightModeEnhancementMetalModule.metal ================================================ // // SCNightModeEnhancementMetalModule.metal // Snapchat // // Created by Chao Pang on 12/21/17. // // #include using namespace metal; typedef struct SampleBufferMetadata { int iosSpeedRating; float exposureTime; float brightness; }SampleBufferMetadata; kernel void kernel_night_mode_enhancement(texture2d sourceYTexture [[texture(0)]], texture2d sourceUVTexture [[texture(1)]], texture2d destinationYTexture [[texture(2)]], texture2d destinationUVTexture [[texture(3)]], constant SampleBufferMetadata &metaData [[buffer(0)]], uint2 gid [[thread_position_in_grid]], uint2 size [[threads_per_grid]]) { float valueY = sourceYTexture.read(gid).r; float2 valueUV = sourceUVTexture.read(gid).rg; float factor = 1.0 - metaData.brightness * 0.1; factor = max(min(factor, 1.3), 1.0); valueY = min(valueY * factor, 1.0); valueUV.rg = max(min((valueUV.rg - 0.5) * factor + 0.5, 1.0), 0.0); destinationYTexture.write(valueY, gid); destinationUVTexture.write(float4(valueUV.r, valueUV.g, 0, 0), gid); } ================================================ FILE: ManagedCapturer/ImageProcessing/SCNightModeEnhancementMetalRenderCommand.h ================================================ // // SCNightModeEnhancementMetalRenderCommand.h // Snapchat // // Created by Chao Pang on 12/21/17. // #import "SCMetalModule.h" #import /* Prepares the command buffer for the SCNightModeEnhancementMetalModule.metal. */ @interface SCNightModeEnhancementMetalRenderCommand : SCMetalModule @property (nonatomic, readonly) NSString *functionName; @end ================================================ FILE: ManagedCapturer/ImageProcessing/SCNightModeEnhancementMetalRenderCommand.m ================================================ // // SCNightModeEnhancementMetalRenderCommand.m // Snapchat // // Created by Chao Pang on 12/21/17. // #import "SCNightModeEnhancementMetalRenderCommand.h" #import "SCCameraTweaks.h" #import "SCMetalUtils.h" #import @import Metal; @implementation SCNightModeEnhancementMetalRenderCommand #pragma mark - SCMetalRenderCommand - (id)encodeMetalCommand:(id)commandBuffer pipelineState:(id)pipelineState textureResource:(SCMetalTextureResource *)textureResource { id commandEncoder = [commandBuffer computeCommandEncoder]; [commandEncoder setComputePipelineState:pipelineState]; #if !TARGET_IPHONE_SIMULATOR SampleBufferMetadata sampleBufferMetadata = { .isoSpeedRating = textureResource.sampleBufferMetadata.isoSpeedRating, .exposureTime = textureResource.sampleBufferMetadata.exposureTime, .brightness = textureResource.sampleBufferMetadata.brightness, }; id metadataBuffer = [textureResource.device newBufferWithLength:sizeof(SampleBufferMetadata) options:MTLResourceOptionCPUCacheModeDefault]; memcpy(metadataBuffer.contents, &sampleBufferMetadata, sizeof(SampleBufferMetadata)); [commandEncoder setTexture:textureResource.sourceYTexture atIndex:0]; [commandEncoder setTexture:textureResource.sourceUVTexture atIndex:1]; [commandEncoder setTexture:textureResource.destinationYTexture atIndex:2]; [commandEncoder setTexture:textureResource.destinationUVTexture atIndex:3]; [commandEncoder setBuffer:metadataBuffer offset:0 atIndex:0]; #endif return commandEncoder; } #pragma mark - SCMetalModuleFunctionProvider - (NSString *)functionName { return @"kernel_night_mode_enhancement"; } - (BOOL)requiresDepthData { return NO; } - (NSString *)description { return [NSString sc_stringWithFormat:@"SCNightModeEnhancementMetalRenderCommand (shader function = %@)", self.functionName]; } @end ================================================ FILE: ManagedCapturer/ImageProcessing/SCProcessingModule.h ================================================ // // SCProcessingModule.h // Snapchat // // Created by Yu-Kuan (Anthony) Lai on 5/30/17. // Copyright © 2017 Snapchat, Inc. All rights reserved. // #import #import #import typedef struct RenderData { CMSampleBufferRef sampleBuffer; CVPixelBufferRef depthDataMap; // Optional - for depth blur rendering CGPoint *depthBlurPointOfInterest; // Optional - for depth blur rendering } RenderData; /* @protocol SCProcessingModule A single module that is responsible for the actual image processing work. Multiple modules can be chained together by the SCProcessingPipelineBuilder and the frame can be passed through the entire SCProcessingPipeline. */ @protocol SCProcessingModule - (CMSampleBufferRef)render:(RenderData)renderData; // Needed to protect against depth data potentially being nil during the render pass - (BOOL)requiresDepthData; @end ================================================ FILE: ManagedCapturer/ImageProcessing/SCProcessingModuleUtils.h ================================================ // // SCProcessingModuleUtils.h // Snapchat // // Created by Brian Ng on 11/10/17. // #import #import #import @interface SCProcessingModuleUtils : NSObject + (CVPixelBufferRef)pixelBufferFromImage:(CIImage *)image bufferPool:(CVPixelBufferPoolRef)bufferPool context:(CIContext *)context; + (CMSampleBufferRef)sampleBufferFromImage:(CIImage *)image oldSampleBuffer:(CMSampleBufferRef)oldSampleBuffer bufferPool:(CVPixelBufferPoolRef)bufferPool context:(CIContext *)context; @end ================================================ FILE: ManagedCapturer/ImageProcessing/SCProcessingModuleUtils.m ================================================ // // SCProcessingModuleUtils.m // Snapchat // // Created by Brian Ng on 11/10/17. // #import "SCProcessingModuleUtils.h" #import @import CoreImage; @implementation SCProcessingModuleUtils + (CVPixelBufferRef)pixelBufferFromImage:(CIImage *)image bufferPool:(CVPixelBufferPoolRef)bufferPool context:(CIContext *)context { CVReturn result; if (bufferPool == NULL) { NSDictionary *pixelAttributes = @{ (NSString *) kCVPixelBufferIOSurfacePropertiesKey : @{}, (NSString *) kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange), (NSString *) kCVPixelBufferWidthKey : @(image.extent.size.width), (NSString *) kCVPixelBufferHeightKey : @(image.extent.size.height) }; result = CVPixelBufferPoolCreate(kCFAllocatorDefault, NULL, (__bridge CFDictionaryRef _Nullable)(pixelAttributes), &bufferPool); if (result != kCVReturnSuccess) { SCLogGeneralError(@"[Processing Pipeline] Error creating pixel buffer pool %i", result); return NULL; } } CVPixelBufferRef resultBuffer = NULL; result = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, bufferPool, &resultBuffer); if (result == kCVReturnSuccess) { [context render:image toCVPixelBuffer:resultBuffer]; } else { SCLogGeneralError(@"[Processing Pipeline] Error creating pixel buffer from pool %i", result); } return resultBuffer; } + (CMSampleBufferRef)sampleBufferFromImage:(CIImage *)image oldSampleBuffer:(CMSampleBufferRef)oldSampleBuffer bufferPool:(CVPixelBufferPoolRef)bufferPool context:(CIContext *)context { CVPixelBufferRef pixelBuffer = [SCProcessingModuleUtils pixelBufferFromImage:image bufferPool:bufferPool context:context]; if (!pixelBuffer) { SCLogGeneralError(@"[Processing Pipeline] Error creating new pixel buffer from image"); return oldSampleBuffer; } CMSampleBufferRef newSampleBuffer = NULL; CMSampleTimingInfo timimgInfo = kCMTimingInfoInvalid; CMSampleBufferGetSampleTimingInfo(oldSampleBuffer, 0, &timimgInfo); CMVideoFormatDescriptionRef videoInfo = NULL; OSStatus status = CMVideoFormatDescriptionCreateForImageBuffer(NULL, pixelBuffer, &videoInfo); if (status != noErr) { SCLogGeneralError(@"[Processing Pipeline] Error creating video format description %i", (int)status); CVPixelBufferRelease(pixelBuffer); return oldSampleBuffer; } status = CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, true, NULL, NULL, videoInfo, &timimgInfo, &newSampleBuffer); if (status != noErr) { SCLogGeneralError(@"[Processing Pipeline] Error creating CMSampleBuffer %i", (int)status); CVPixelBufferRelease(pixelBuffer); return oldSampleBuffer; } CVPixelBufferRelease(pixelBuffer); return newSampleBuffer; } @end ================================================ FILE: ManagedCapturer/ImageProcessing/SCProcessingPipeline.h ================================================ // // SCProcessingPipeline.h // Snapchat // // Created by Yu-Kuan (Anthony) Lai on 5/30/17. // Copyright © 2017 Snapchat, Inc. All rights reserved. // #import "SCProcessingModule.h" #import /* @class SCProcessingPipeline The SCProcessingPipeline chains together a series of SCProcessingModules and passes the frame through each of them in a pre-determined order. This is done through a chain of command, where the resulting frame from the the first module is passed to the second, then to the third, etc. */ @interface SCProcessingPipeline : NSObject @property (nonatomic, strong) NSMutableArray> *processingModules; @end ================================================ FILE: ManagedCapturer/ImageProcessing/SCProcessingPipeline.m ================================================ // // SCProcessingPipeline.m // Snapchat // // Created by Yu-Kuan (Anthony) Lai on 5/30/17. // Copyright © 2017 Snapchat, Inc. All rights reserved. // #import "SCProcessingPipeline.h" #import @import CoreMedia; @implementation SCProcessingPipeline - (CMSampleBufferRef)render:(RenderData)renderData { for (id module in self.processingModules) { if (![module requiresDepthData] || ([module requiresDepthData] && renderData.depthDataMap)) { renderData.sampleBuffer = [module render:renderData]; } } return renderData.sampleBuffer; } - (NSString *)description { NSMutableString *desc = [NSMutableString new]; [desc appendString:@"ProcessingPipeline, modules: "]; for (id module in self.processingModules) { [desc appendFormat:@"%@, ", [module description]]; } if (self.processingModules.count > 0) { return [desc substringToIndex:desc.lengthOfCharacterSequences - 2]; } return desc; } - (BOOL)requiresDepthData { return NO; } @end ================================================ FILE: ManagedCapturer/ImageProcessing/SCProcessingPipelineBuilder.h ================================================ // // SCProcessingPipelineBuilder.h // Snapchat // // Created by Yu-Kuan (Anthony) Lai on 6/1/17. // Copyright © 2017 Snapchat, Inc. All rights reserved. // #import @class SCDigitalExposureHandler; @class SCProcessingPipeline; /* @class SCProcessingPipelineBuilder The builder object is responsible for creating the SCProcessingPipeline, the underneath SCProcessingModules, and eventually chaining the SCProcessingModules together in a pre-determined order. The builder is also responsible for providing consumers with handler objects. */ @interface SCProcessingPipelineBuilder : NSObject @property (nonatomic) BOOL useExposureAdjust; @property (nonatomic) BOOL portraitModeEnabled; @property (nonatomic) BOOL enhancedNightMode; - (SCProcessingPipeline *)build; @end ================================================ FILE: ManagedCapturer/ImageProcessing/SCProcessingPipelineBuilder.m ================================================ // // SCProcessingPipelineBuilder.m // Snapchat // // Created by Yu-Kuan (Anthony) Lai on 6/1/17. // Copyright © 2017 Snapchat, Inc. All rights reserved. // #import "SCProcessingPipelineBuilder.h" #import "SCCameraTweaks.h" #import "SCDepthBlurMetalRenderCommand.h" #import "SCDepthToGrayscaleMetalRenderCommand.h" #import "SCDigitalExposureHandler.h" #import "SCExposureAdjustMetalRenderCommand.h" #import "SCMetalUtils.h" #import "SCNightModeEnhancementMetalRenderCommand.h" #import "SCProcessingPipeline.h" @implementation SCProcessingPipelineBuilder - (SCProcessingPipeline *)build { if (!_useExposureAdjust && !_portraitModeEnabled && !_enhancedNightMode) { // in the future: && !useA && !useB ... return nil; } SCProcessingPipeline *processingPipeline = [[SCProcessingPipeline alloc] init]; NSMutableArray> *processingModules = [NSMutableArray array]; // order of adding module matters! if (_useExposureAdjust && SCDeviceSupportsMetal()) { // this check looks redundant right now, but when we have more modules it will be necessary SCMetalModule *exposureAdjustMetalModule = [[SCMetalModule alloc] initWithMetalRenderCommand:[SCExposureAdjustMetalRenderCommand new]]; [processingModules addObject:exposureAdjustMetalModule]; } if (_portraitModeEnabled) { id renderCommand = SCCameraTweaksDepthToGrayscaleOverride() ? [SCDepthToGrayscaleMetalRenderCommand new] : [SCDepthBlurMetalRenderCommand new]; SCMetalModule *depthBlurMetalModule = [[SCMetalModule alloc] initWithMetalRenderCommand:renderCommand]; [processingModules addObject:depthBlurMetalModule]; } if (_enhancedNightMode && SCDeviceSupportsMetal()) { SCMetalModule *nightModeEnhancementModule = [[SCMetalModule alloc] initWithMetalRenderCommand:[SCNightModeEnhancementMetalRenderCommand new]]; [processingModules addObject:nightModeEnhancementModule]; } processingPipeline.processingModules = processingModules; return processingPipeline; } @end ================================================ FILE: ManagedCapturer/ImageProcessing/SCStillImageDepthBlurFilter.h ================================================ // // SCStillImageDepthBlurFilter.h // Snapchat // // Created by Brian Ng on 10/11/17. // #import "SCProcessingModule.h" #import /* @class SCStillImageDepthBlurFilter This module uses the CIDepthBlurEffect CIFilter that uses rgb and depth information to produce an image with the portrait mode effect (background blurred, foreground sharp). */ @interface SCStillImageDepthBlurFilter : NSObject // Applies the CIDepthBlurEffect filter to a still image capture photo. If an error occured, the original // photoData will be returned - (NSData *)renderWithPhotoData:(NSData *)photoData renderData:(RenderData)renderData NS_AVAILABLE_IOS(11_0); @end ================================================ FILE: ManagedCapturer/ImageProcessing/SCStillImageDepthBlurFilter.m ================================================ // // SCStillImageDepthBlurFilter.m // Snapchat // // Created by Brian Ng on 10/11/17. // #import "SCStillImageDepthBlurFilter.h" #import "SCCameraTweaks.h" #import "SCProcessingModuleUtils.h" @import CoreMedia; @implementation SCStillImageDepthBlurFilter { CIContext *_context; CIFilter *_filter; CVPixelBufferPoolRef _bufferPool; } - (instancetype)init { if (self = [super init]) { _context = [CIContext contextWithOptions:@{ kCIContextWorkingFormat : @(kCIFormatRGBAh) }]; _filter = [CIFilter filterWithName:@"CIDepthBlurEffect"]; } return self; } - (void)dealloc { CVPixelBufferPoolFlush(_bufferPool, kCVPixelBufferPoolFlushExcessBuffers); CVPixelBufferPoolRelease(_bufferPool); } - (NSData *)renderWithPhotoData:(NSData *)photoData renderData:(RenderData)renderData NS_AVAILABLE_IOS(11_0) { CIImage *mainImage = [CIImage imageWithData:photoData]; CVPixelBufferRef disparityImagePixelBuffer = renderData.depthDataMap; CIImage *disparityImage = [CIImage imageWithCVPixelBuffer:disparityImagePixelBuffer]; if (!disparityImage) { return photoData; } [_filter setValue:mainImage forKey:kCIInputImageKey]; [_filter setValue:disparityImage forKey:kCIInputDisparityImageKey]; if (renderData.depthBlurPointOfInterest && SCCameraTweaksEnableFilterInputFocusRect()) { CGPoint pointOfInterest = *renderData.depthBlurPointOfInterest; [_filter setValue:[CIVector vectorWithX:pointOfInterest.x Y:pointOfInterest.y Z:1 W:1] forKey:@"inputFocusRect"]; } CIImage *result = [_filter outputImage]; if (!result) { return photoData; } CGColorSpaceRef deviceRGBColorSpace = CGColorSpaceCreateDeviceRGB(); NSData *processedPhotoData = [_context JPEGRepresentationOfImage:result colorSpace:deviceRGBColorSpace options:@{}]; CGColorSpaceRelease(deviceRGBColorSpace); if (!processedPhotoData) { return photoData; } renderData.sampleBuffer = [SCProcessingModuleUtils sampleBufferFromImage:result oldSampleBuffer:renderData.sampleBuffer bufferPool:_bufferPool context:_context]; return processedPhotoData; } @end ================================================ FILE: ManagedCapturer/NSURL+Asset.h ================================================ // // NSURL+NSURL_Asset.h // Snapchat // // Created by Michel Loenngren on 4/30/17. // Copyright © 2017 Snapchat, Inc. All rights reserved. // #import @interface NSURL (Asset) /** In case the media server is reset while recording AVFoundation gets in a weird state. Even though we reload our AVFoundation object we still need to reload the assetkeys on the outputfile. If we don't the AVAssetWriter will fail when started. */ - (void)reloadAssetKeys; @end ================================================ FILE: ManagedCapturer/NSURL+Asset.m ================================================ // // NSURL+NSURL_Asset.m // Snapchat // // Created by Michel Loenngren on 4/30/17. // Copyright © 2017 Snapchat, Inc. All rights reserved. // #import "NSURL+Asset.h" #import @import AVFoundation; @implementation NSURL (Asset) - (void)reloadAssetKeys { AVAsset *videoAsset = [AVAsset assetWithURL:self]; [videoAsset loadValuesAsynchronouslyForKeys:@[ @keypath(videoAsset.duration) ] completionHandler:nil]; } @end ================================================ FILE: ManagedCapturer/OWNERS ================================================ --- !OWNERS version: 2 default: jira_project: CCAM owners: num_required_reviewers: 0 teams: - Snapchat/core-camera-ios ================================================ FILE: ManagedCapturer/SCAudioCaptureSession.h ================================================ // // SCAudioCaptureSession.h // Snapchat // // Created by Liu Liu on 3/5/15. // Copyright (c) 2015 Snapchat, Inc. All rights reserved. // #import #import extern double const kSCAudioCaptureSessionDefaultSampleRate; typedef void (^audio_capture_session_block)(NSError *error); @protocol SCAudioCaptureSession; @protocol SCAudioCaptureSessionDelegate - (void)audioCaptureSession:(id)audioCaptureSession didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer; @end @protocol SCAudioCaptureSession @property (nonatomic, weak) id delegate; // Return detail informantions dictionary if error occured, else return nil - (void)beginAudioRecordingAsynchronouslyWithSampleRate:(double)sampleRate completionHandler:(audio_capture_session_block)completionHandler; - (void)disposeAudioRecordingSynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler; @end @interface SCAudioCaptureSession : NSObject @end ================================================ FILE: ManagedCapturer/SCAudioCaptureSession.m ================================================ // // SCAudioCaptureSession.m // Snapchat // // Created by Liu Liu on 3/5/15. // Copyright (c) 2015 Snapchat, Inc. All rights reserved. // #import "SCAudioCaptureSession.h" #import #import #import #import #import #import @import AVFoundation; double const kSCAudioCaptureSessionDefaultSampleRate = 44100; NSString *const SCAudioCaptureSessionErrorDomain = @"SCAudioCaptureSessionErrorDomain"; static NSInteger const kNumberOfAudioBuffersInQueue = 15; static float const kAudioBufferDurationInSeconds = 0.2; static char *const kSCAudioCaptureSessionQueueLabel = "com.snapchat.audio-capture-session"; @implementation SCAudioCaptureSession { SCQueuePerformer *_performer; AudioQueueRef _audioQueue; AudioQueueBufferRef _audioQueueBuffers[kNumberOfAudioBuffersInQueue]; CMAudioFormatDescriptionRef _audioFormatDescription; } @synthesize delegate = _delegate; - (instancetype)init { SCTraceStart(); self = [super init]; if (self) { _performer = [[SCQueuePerformer alloc] initWithLabel:kSCAudioCaptureSessionQueueLabel qualityOfService:QOS_CLASS_USER_INTERACTIVE queueType:DISPATCH_QUEUE_SERIAL context:SCQueuePerformerContextCamera]; } return self; } - (void)dealloc { [self disposeAudioRecordingSynchronouslyWithCompletionHandler:NULL]; } static AudioStreamBasicDescription setupAudioFormat(UInt32 inFormatID, Float64 sampleRate) { SCTraceStart(); AudioStreamBasicDescription recordFormat = {0}; recordFormat.mSampleRate = sampleRate; recordFormat.mChannelsPerFrame = (UInt32)[SCAudioSession sharedInstance].inputNumberOfChannels; recordFormat.mFormatID = inFormatID; if (inFormatID == kAudioFormatLinearPCM) { // if we want pcm, default to signed 16-bit little-endian recordFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked; recordFormat.mBitsPerChannel = 16; recordFormat.mBytesPerPacket = recordFormat.mBytesPerFrame = (recordFormat.mBitsPerChannel / 8) * recordFormat.mChannelsPerFrame; recordFormat.mFramesPerPacket = 1; } return recordFormat; } static int computeRecordBufferSize(const AudioStreamBasicDescription *format, const AudioQueueRef audioQueue, float seconds) { SCTraceStart(); int packets, frames, bytes = 0; frames = (int)ceil(seconds * format->mSampleRate); if (format->mBytesPerFrame > 0) { bytes = frames * format->mBytesPerFrame; } else { UInt32 maxPacketSize; if (format->mBytesPerPacket > 0) maxPacketSize = format->mBytesPerPacket; // constant packet size else { UInt32 propertySize = sizeof(maxPacketSize); AudioQueueGetProperty(audioQueue, kAudioQueueProperty_MaximumOutputPacketSize, &maxPacketSize, &propertySize); } if (format->mFramesPerPacket > 0) packets = frames / format->mFramesPerPacket; else packets = frames; // worst-case scenario: 1 frame in a packet if (packets == 0) // sanity check packets = 1; bytes = packets * maxPacketSize; } return bytes; } static NSTimeInterval machHostTimeToSeconds(UInt64 mHostTime) { static dispatch_once_t onceToken; static mach_timebase_info_data_t timebase_info; dispatch_once(&onceToken, ^{ (void)mach_timebase_info(&timebase_info); }); return (double)mHostTime * timebase_info.numer / timebase_info.denom / NSEC_PER_SEC; } static void audioQueueBufferHandler(void *inUserData, AudioQueueRef inAQ, AudioQueueBufferRef inBuffer, const AudioTimeStamp *nStartTime, UInt32 inNumPackets, const AudioStreamPacketDescription *inPacketDesc) { SCTraceStart(); SCAudioCaptureSession *audioCaptureSession = (__bridge SCAudioCaptureSession *)inUserData; if (inNumPackets > 0) { CMTime PTS = CMTimeMakeWithSeconds(machHostTimeToSeconds(nStartTime->mHostTime), 600); [audioCaptureSession appendAudioQueueBuffer:inBuffer numPackets:inNumPackets PTS:PTS packetDescriptions:inPacketDesc]; } AudioQueueEnqueueBuffer(inAQ, inBuffer, 0, NULL); } - (void)appendAudioQueueBuffer:(AudioQueueBufferRef)audioQueueBuffer numPackets:(UInt32)numPackets PTS:(CMTime)PTS packetDescriptions:(const AudioStreamPacketDescription *)packetDescriptions { SCTraceStart(); CMBlockBufferRef dataBuffer = NULL; CMBlockBufferCreateWithMemoryBlock(NULL, NULL, audioQueueBuffer->mAudioDataByteSize, NULL, NULL, 0, audioQueueBuffer->mAudioDataByteSize, 0, &dataBuffer); if (dataBuffer) { CMBlockBufferReplaceDataBytes(audioQueueBuffer->mAudioData, dataBuffer, 0, audioQueueBuffer->mAudioDataByteSize); CMSampleBufferRef sampleBuffer = NULL; CMAudioSampleBufferCreateWithPacketDescriptions(NULL, dataBuffer, true, NULL, NULL, _audioFormatDescription, numPackets, PTS, packetDescriptions, &sampleBuffer); if (sampleBuffer) { [self processAudioSampleBuffer:sampleBuffer]; CFRelease(sampleBuffer); } CFRelease(dataBuffer); } } - (void)processAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer { SCTraceStart(); [_delegate audioCaptureSession:self didOutputSampleBuffer:sampleBuffer]; } - (NSError *)_generateErrorForType:(NSString *)errorType errorCode:(int)errorCode format:(AudioStreamBasicDescription)format { NSDictionary *errorInfo = @{ @"error_type" : errorType, @"error_code" : @(errorCode), @"record_format" : @{ @"format_id" : @(format.mFormatID), @"format_flags" : @(format.mFormatFlags), @"sample_rate" : @(format.mSampleRate), @"bytes_per_packet" : @(format.mBytesPerPacket), @"frames_per_packet" : @(format.mFramesPerPacket), @"bytes_per_frame" : @(format.mBytesPerFrame), @"channels_per_frame" : @(format.mChannelsPerFrame), @"bits_per_channel" : @(format.mBitsPerChannel) } }; SCLogGeneralInfo(@"Audio queue error occured. ErrorInfo: %@", errorInfo); return [NSError errorWithDomain:SCAudioCaptureSessionErrorDomain code:errorCode userInfo:errorInfo]; } - (NSError *)beginAudioRecordingWithSampleRate:(Float64)sampleRate { SCTraceStart(); if ([SCAudioSession sharedInstance].inputAvailable) { // SCAudioSession should be activated already SCTraceSignal(@"Set audio session to be active"); AudioStreamBasicDescription recordFormat = setupAudioFormat(kAudioFormatLinearPCM, sampleRate); OSStatus audioQueueCreationStatus = AudioQueueNewInput(&recordFormat, audioQueueBufferHandler, (__bridge void *)self, NULL, NULL, 0, &_audioQueue); if (audioQueueCreationStatus != 0) { NSError *error = [self _generateErrorForType:@"audio_queue_create_error" errorCode:audioQueueCreationStatus format:recordFormat]; return error; } SCTraceSignal(@"Initialize audio queue with new input"); UInt32 bufferByteSize = computeRecordBufferSize( &recordFormat, _audioQueue, kAudioBufferDurationInSeconds); // Enough bytes for half a second for (int i = 0; i < kNumberOfAudioBuffersInQueue; i++) { AudioQueueAllocateBuffer(_audioQueue, bufferByteSize, &_audioQueueBuffers[i]); AudioQueueEnqueueBuffer(_audioQueue, _audioQueueBuffers[i], 0, NULL); } SCTraceSignal(@"Allocate audio buffer"); UInt32 size = sizeof(recordFormat); audioQueueCreationStatus = AudioQueueGetProperty(_audioQueue, kAudioQueueProperty_StreamDescription, &recordFormat, &size); if (0 != audioQueueCreationStatus) { NSError *error = [self _generateErrorForType:@"audio_queue_get_property_error" errorCode:audioQueueCreationStatus format:recordFormat]; [self disposeAudioRecording]; return error; } SCTraceSignal(@"Audio queue sample rate %lf", recordFormat.mSampleRate); AudioChannelLayout acl; bzero(&acl, sizeof(acl)); acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono; audioQueueCreationStatus = CMAudioFormatDescriptionCreate(NULL, &recordFormat, sizeof(acl), &acl, 0, NULL, NULL, &_audioFormatDescription); if (0 != audioQueueCreationStatus) { NSError *error = [self _generateErrorForType:@"audio_queue_audio_format_error" errorCode:audioQueueCreationStatus format:recordFormat]; [self disposeAudioRecording]; return error; } SCTraceSignal(@"Start audio queue"); audioQueueCreationStatus = AudioQueueStart(_audioQueue, NULL); if (0 != audioQueueCreationStatus) { NSError *error = [self _generateErrorForType:@"audio_queue_start_error" errorCode:audioQueueCreationStatus format:recordFormat]; [self disposeAudioRecording]; return error; } } return nil; } - (void)disposeAudioRecording { SCTraceStart(); SCLogGeneralInfo(@"dispose audio recording"); if (_audioQueue) { AudioQueueStop(_audioQueue, true); AudioQueueDispose(_audioQueue, true); for (int i = 0; i < kNumberOfAudioBuffersInQueue; i++) { _audioQueueBuffers[i] = NULL; } _audioQueue = NULL; } if (_audioFormatDescription) { CFRelease(_audioFormatDescription); _audioFormatDescription = NULL; } } #pragma mark - Public methods - (void)beginAudioRecordingAsynchronouslyWithSampleRate:(double)sampleRate completionHandler:(audio_capture_session_block)completionHandler { SCTraceStart(); // Request audio session change for recording mode. [_performer perform:^{ SCTraceStart(); NSError *error = [self beginAudioRecordingWithSampleRate:sampleRate]; if (completionHandler) { completionHandler(error); } }]; } - (void)disposeAudioRecordingSynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler { SCTraceStart(); [_performer performAndWait:^{ SCTraceStart(); [self disposeAudioRecording]; if (completionHandler) { completionHandler(); } }]; } @end ================================================ FILE: ManagedCapturer/SCCameraSettingUtils.h ================================================ // // SCCameraSettingUtils.h // Snapchat // // Created by Pinlin Chen on 12/09/2017. // #import #import #import #import SC_EXTERN_C_BEGIN // Return the value if metadata attribute is found; otherwise, return nil extern NSNumber *retrieveExposureTimeFromEXIFAttachments(CFDictionaryRef exifAttachments); extern NSNumber *retrieveBrightnessFromEXIFAttachments(CFDictionaryRef exifAttachments); extern NSNumber *retrieveISOSpeedRatingFromEXIFAttachments(CFDictionaryRef exifAttachments); extern void retrieveSampleBufferMetadata(CMSampleBufferRef sampleBuffer, SampleBufferMetadata *metadata); SC_EXTERN_C_END ================================================ FILE: ManagedCapturer/SCCameraSettingUtils.m ================================================ // // SCCameraSettingUtils.m // Snapchat // // Created by Pinlin Chen on 12/09/2017. // #import "SCCameraSettingUtils.h" #import #import NSNumber *retrieveExposureTimeFromEXIFAttachments(CFDictionaryRef exifAttachments) { if (!exifAttachments) { return nil; } id value = CFDictionaryGetValue(exifAttachments, kCGImagePropertyExifExposureTime); // Fetching exposure time from the sample buffer if ([value isKindOfClass:[NSNumber class]]) { return (NSNumber *)value; } return nil; } NSNumber *retrieveBrightnessFromEXIFAttachments(CFDictionaryRef exifAttachments) { if (!exifAttachments) { return nil; } id value = CFDictionaryGetValue(exifAttachments, kCGImagePropertyExifBrightnessValue); if ([value isKindOfClass:[NSNumber class]]) { return (NSNumber *)value; } return nil; } NSNumber *retrieveISOSpeedRatingFromEXIFAttachments(CFDictionaryRef exifAttachments) { if (!exifAttachments) { return nil; } NSArray *ISOSpeedRatings = CFDictionaryGetValue(exifAttachments, kCGImagePropertyExifISOSpeedRatings); if ([ISOSpeedRatings respondsToSelector:@selector(count)] && [ISOSpeedRatings respondsToSelector:@selector(firstObject)] && ISOSpeedRatings.count > 0) { id value = [ISOSpeedRatings firstObject]; if ([value isKindOfClass:[NSNumber class]]) { return (NSNumber *)value; } } return nil; } void retrieveSampleBufferMetadata(CMSampleBufferRef sampleBuffer, SampleBufferMetadata *metadata) { CFDictionaryRef exifAttachments = CMGetAttachment(sampleBuffer, kCGImagePropertyExifDictionary, NULL); if (exifAttachments == nil) { SCLogCoreCameraWarning(@"SampleBuffer exifAttachment is nil"); } // Fetching exposure time from the sample buffer NSNumber *currentExposureTimeNum = retrieveExposureTimeFromEXIFAttachments(exifAttachments); if (currentExposureTimeNum) { metadata->exposureTime = [currentExposureTimeNum floatValue]; } NSNumber *currentISOSpeedRatingNum = retrieveISOSpeedRatingFromEXIFAttachments(exifAttachments); if (currentISOSpeedRatingNum) { metadata->isoSpeedRating = (int)[currentISOSpeedRatingNum integerValue]; } NSNumber *currentBrightnessNum = retrieveBrightnessFromEXIFAttachments(exifAttachments); if (currentBrightnessNum) { float currentBrightness = [currentBrightnessNum floatValue]; if (isfinite(currentBrightness)) { metadata->brightness = currentBrightness; } else { metadata->brightness = 0; } } } ================================================ FILE: ManagedCapturer/SCCaptureCommon.h ================================================ // // SCCaptureCommon.h // Snapchat // // Created by Lin Jia on 9/29/17. // // #import "SCManagedCaptureDevice.h" #import "SCManagedDeviceCapacityAnalyzerListener.h" #import "SCVideoCaptureSessionInfo.h" #import #import #import @class SCManagedCapturerState; @class SCManagedLensesProcessor; @class SCManagedVideoDataSource; @class SCManagedVideoCapturerOutputSettings; @class SCLens; @class SCLensCategory; @class SCLookseryFilterFactory; @class SCSnapScannedData; @class SCCraftResourceManager; @class SCScanConfiguration; @class SCCapturerToken; @class SCProcessingPipeline; @class SCTimedTask; @protocol SCManagedSampleBufferDisplayController; typedef void (^sc_managed_capturer_capture_still_image_completion_handler_t)(UIImage *fullScreenImage, NSDictionary *metadata, NSError *error, SCManagedCapturerState *state); typedef void (^sc_managed_capturer_capture_video_frame_completion_handler_t)(UIImage *image); typedef void (^sc_managed_capturer_start_recording_completion_handler_t)(SCVideoCaptureSessionInfo session, NSError *error); typedef void (^sc_managed_capturer_convert_view_coordniates_completion_handler_t)(CGPoint pointOfInterest); typedef void (^sc_managed_capturer_unsafe_changes_t)(AVCaptureSession *session, AVCaptureDevice *front, AVCaptureDeviceInput *frontInput, AVCaptureDevice *back, AVCaptureDeviceInput *backInput, SCManagedCapturerState *state); typedef void (^sc_managed_capturer_stop_running_completion_handler_t)(BOOL succeed); typedef void (^sc_managed_capturer_scan_results_handler_t)(NSObject *resultObject); typedef void (^sc_managed_lenses_processor_category_point_completion_handler_t)(SCLensCategory *category, NSInteger categoriesCount); extern CGFloat const kSCManagedCapturerAspectRatioUnspecified; extern CGFloat const kSCManagedCapturerDefaultVideoActiveFormatWidth; extern CGFloat const kSCManagedCapturerDefaultVideoActiveFormatHeight; extern CGFloat const kSCManagedCapturerVideoActiveFormatWidth1080p; extern CGFloat const kSCManagedCapturerVideoActiveFormatHeight1080p; extern CGFloat const kSCManagedCapturerNightVideoHighResActiveFormatWidth; extern CGFloat const kSCManagedCapturerNightVideoHighResActiveFormatHeight; extern CGFloat const kSCManagedCapturerNightVideoDefaultResActiveFormatWidth; extern CGFloat const kSCManagedCapturerNightVideoDefaultResActiveFormatHeight; extern CGFloat const kSCManagedCapturerLiveStreamingVideoActiveFormatWidth; extern CGFloat const kSCManagedCapturerLiveStreamingVideoActiveFormatHeight; ================================================ FILE: ManagedCapturer/SCCaptureCommon.m ================================================ // // SCCaptureCommon.m // Snapchat // // Created by Lin Jia on 9/29/17. // // #import "SCCaptureCommon.h" CGFloat const kSCManagedCapturerAspectRatioUnspecified = -1.0; CGFloat const kSCManagedCapturerDefaultVideoActiveFormatWidth = 1280; CGFloat const kSCManagedCapturerDefaultVideoActiveFormatHeight = 720; CGFloat const kSCManagedCapturerVideoActiveFormatWidth1080p = 1920; CGFloat const kSCManagedCapturerVideoActiveFormatHeight1080p = 1080; CGFloat const kSCManagedCapturerNightVideoHighResActiveFormatWidth = 2592; CGFloat const kSCManagedCapturerNightVideoHighResActiveFormatHeight = 1936; CGFloat const kSCManagedCapturerNightVideoDefaultResActiveFormatWidth = 640; CGFloat const kSCManagedCapturerNightVideoDefaultResActiveFormatHeight = 480; CGFloat const kSCManagedCapturerLiveStreamingVideoActiveFormatWidth = 1280; CGFloat const kSCManagedCapturerLiveStreamingVideoActiveFormatHeight = 720; ================================================ FILE: ManagedCapturer/SCCaptureCoreImageFaceDetector.h ================================================ // // SCCaptureCoreImageFaceDetector.h // Snapchat // // Created by Jiyang Zhu on 3/27/18. // Copyright © 2018 Snapchat, Inc. All rights reserved. // // This class is intended to detect faces in Camera. It receives CMSampleBuffer, process the face detection using // CIDetector, and announce the bounds and faceIDs. #import "SCCaptureFaceDetector.h" #import #import #import @interface SCCaptureCoreImageFaceDetector : NSObject SC_INIT_AND_NEW_UNAVAILABLE; @end ================================================ FILE: ManagedCapturer/SCCaptureCoreImageFaceDetector.m ================================================ // // SCCaptureCoreImageFaceDetector.m // Snapchat // // Created by Jiyang Zhu on 3/27/18. // Copyright © 2018 Snapchat, Inc. All rights reserved. // #import "SCCaptureCoreImageFaceDetector.h" #import "SCCameraTweaks.h" #import "SCCaptureFaceDetectionParser.h" #import "SCCaptureFaceDetectorTrigger.h" #import "SCCaptureResource.h" #import "SCManagedCapturer.h" #import #import #import #import #import #import #import #import @import ImageIO; static const NSTimeInterval kSCCaptureCoreImageFaceDetectorMaxAllowedLatency = 1; // Drop the face detection result if it is 1 second late. static const NSInteger kDefaultNumberOfSequentialOutputSampleBuffer = -1; // -1 means no sequential sample buffers. static char *const kSCCaptureCoreImageFaceDetectorProcessQueue = "com.snapchat.capture-core-image-face-detector-process"; @implementation SCCaptureCoreImageFaceDetector { CIDetector *_detector; SCCaptureResource *_captureResource; BOOL _isDetecting; BOOL _hasDetectedFaces; NSInteger _numberOfSequentialOutputSampleBuffer; NSUInteger _detectionFrequency; NSDictionary *_detectorOptions; SCManagedCaptureDevicePosition _devicePosition; CIContext *_context; SCQueuePerformer *_callbackPerformer; SCQueuePerformer *_processPerformer; SCCaptureFaceDetectionParser *_parser; SCCaptureFaceDetectorTrigger *_trigger; } @synthesize trigger = _trigger; @synthesize parser = _parser; - (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource { SCTraceODPCompatibleStart(2); self = [super init]; if (self) { SCAssert(captureResource, @"SCCaptureResource should not be nil"); SCAssert(captureResource.queuePerformer, @"SCQueuePerformer should not be nil"); _callbackPerformer = captureResource.queuePerformer; _captureResource = captureResource; _parser = [[SCCaptureFaceDetectionParser alloc] initWithFaceBoundsAreaThreshold:pow(SCCameraFaceFocusMinFaceSize(), 2)]; _processPerformer = [[SCQueuePerformer alloc] initWithLabel:kSCCaptureCoreImageFaceDetectorProcessQueue qualityOfService:QOS_CLASS_USER_INITIATED queueType:DISPATCH_QUEUE_SERIAL context:SCQueuePerformerContextCamera]; _detectionFrequency = SCExperimentWithFaceDetectionFrequency(); _devicePosition = captureResource.device.position; _trigger = [[SCCaptureFaceDetectorTrigger alloc] initWithDetector:self]; } return self; } - (void)_setupDetectionIfNeeded { SCTraceODPCompatibleStart(2); SC_GUARD_ELSE_RETURN(!_detector); if (!_context) { _context = [CIContext context]; } // For CIDetectorMinFeatureSize, the valid range is [0.0100, 0.5000], otherwise, it will cause a crash. if (!_detectorOptions) { _detectorOptions = @{ CIDetectorAccuracy : CIDetectorAccuracyLow, CIDetectorTracking : @(YES), CIDetectorMaxFeatureCount : @(2), CIDetectorMinFeatureSize : @(SCCameraFaceFocusMinFaceSize()), CIDetectorNumberOfAngles : @(3) }; } @try { _detector = [CIDetector detectorOfType:CIDetectorTypeFace context:_context options:_detectorOptions]; } @catch (NSException *exception) { SCLogCoreCameraError(@"Failed to create CIDetector with exception:%@", exception); } } - (void)_resetDetection { SCTraceODPCompatibleStart(2); _detector = nil; [self _setupDetectionIfNeeded]; } - (SCQueuePerformer *)detectionPerformer { return _processPerformer; } - (void)startDetection { SCTraceODPCompatibleStart(2); SCAssert([[self detectionPerformer] isCurrentPerformer], @"Calling -startDetection in an invalid queue."); [self _setupDetectionIfNeeded]; _isDetecting = YES; _hasDetectedFaces = NO; _numberOfSequentialOutputSampleBuffer = kDefaultNumberOfSequentialOutputSampleBuffer; } - (void)stopDetection { SCTraceODPCompatibleStart(2); SCAssert([[self detectionPerformer] isCurrentPerformer], @"Calling -stopDetection in an invalid queue."); _isDetecting = NO; } - (NSDictionary *)_detectFaceFeaturesInImage:(CIImage *)image withOrientation:(CGImagePropertyOrientation)orientation { SCTraceODPCompatibleStart(2); NSDictionary *opts = @{ CIDetectorImageOrientation : @(orientation), CIDetectorEyeBlink : @(NO), CIDetectorSmile : @(NO) }; NSArray *features = [_detector featuresInImage:image options:opts]; return [_parser parseFaceBoundsByFaceIDFromCIFeatures:features withImageSize:image.extent.size imageOrientation:orientation]; } #pragma mark - SCManagedVideoDataSourceListener - (void)managedVideoDataSource:(id)managedVideoDataSource didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer devicePosition:(SCManagedCaptureDevicePosition)devicePosition { SCTraceODPCompatibleStart(2); SC_GUARD_ELSE_RETURN(_isDetecting); // Reset detection if the device position changes. Resetting detection should execute in _processPerformer, so we // just set a flag here, and then do it later in the perform block. BOOL shouldForceResetDetection = NO; if (devicePosition != _devicePosition) { _devicePosition = devicePosition; shouldForceResetDetection = YES; _numberOfSequentialOutputSampleBuffer = kDefaultNumberOfSequentialOutputSampleBuffer; } _numberOfSequentialOutputSampleBuffer++; SC_GUARD_ELSE_RETURN(_numberOfSequentialOutputSampleBuffer % _detectionFrequency == 0); @weakify(self); CFRetain(sampleBuffer); [_processPerformer perform:^{ SCTraceStart(); @strongify(self); SC_GUARD_ELSE_RETURN(self); if (shouldForceResetDetection) { // Resetting detection usually costs no more than 1ms. [self _resetDetection]; } CGImagePropertyOrientation orientation = (devicePosition == SCManagedCaptureDevicePositionBack ? kCGImagePropertyOrientationRight : kCGImagePropertyOrientationLeftMirrored); CIImage *image = [CIImage imageWithCVPixelBuffer:CMSampleBufferGetImageBuffer(sampleBuffer)]; NSDictionary *faceBoundsByFaceID = [self _detectFaceFeaturesInImage:image withOrientation:orientation]; // Calculate the latency for face detection, if it is too long, discard the face detection results. NSTimeInterval latency = CACurrentMediaTime() - CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)); CFRelease(sampleBuffer); if (latency >= kSCCaptureCoreImageFaceDetectorMaxAllowedLatency) { faceBoundsByFaceID = nil; } // Only announce face detection result if faceBoundsByFaceID is not empty, or faceBoundsByFaceID was not empty // last time. if (faceBoundsByFaceID.count > 0 || self->_hasDetectedFaces) { self->_hasDetectedFaces = faceBoundsByFaceID.count > 0; [self->_callbackPerformer perform:^{ [self->_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didDetectFaceBounds:faceBoundsByFaceID]; }]; } }]; } @end ================================================ FILE: ManagedCapturer/SCCaptureDeviceAuthorization.h ================================================ // // SCCaptureDeviceAuthorization.h // Snapchat // // Created by Xiaomu Wu on 8/19/14. // Copyright (c) 2014 Snapchat, Inc. All rights reserved. // #import @interface SCCaptureDeviceAuthorization : NSObject // Methods for checking / requesting authorization to use media capture devices of a given type. + (BOOL)notDeterminedForMediaType:(NSString *)mediaType; + (BOOL)deniedForMediaType:(NSString *)mediaType; + (BOOL)restrictedForMediaType:(NSString *)mediaType; + (void)requestAccessForMediaType:(NSString *)mediaType completionHandler:(void (^)(BOOL granted))handler; // Convenience methods for media type == AVMediaTypeVideo + (BOOL)notDeterminedForVideoCapture; + (BOOL)deniedForVideoCapture; + (void)requestAccessForVideoCaptureWithCompletionHandler:(void (^)(BOOL granted))handler; @end ================================================ FILE: ManagedCapturer/SCCaptureDeviceAuthorization.m ================================================ // // SCCaptureDeviceAuthorization.m // Snapchat // // Created by Xiaomu Wu on 8/19/14. // Copyright (c) 2014 Snapchat, Inc. All rights reserved. // #import "SCCaptureDeviceAuthorization.h" #import #import #import @import AVFoundation; @implementation SCCaptureDeviceAuthorization #pragma mark - Public + (BOOL)notDeterminedForMediaType:(NSString *)mediaType { return [AVCaptureDevice authorizationStatusForMediaType:mediaType] == AVAuthorizationStatusNotDetermined; } + (BOOL)deniedForMediaType:(NSString *)mediaType { return [AVCaptureDevice authorizationStatusForMediaType:mediaType] == AVAuthorizationStatusDenied; } + (BOOL)restrictedForMediaType:(NSString *)mediaType { return [AVCaptureDevice authorizationStatusForMediaType:mediaType] == AVAuthorizationStatusRestricted; } + (void)requestAccessForMediaType:(NSString *)mediaType completionHandler:(void (^)(BOOL granted))handler { [AVCaptureDevice requestAccessForMediaType:mediaType completionHandler:handler]; } #pragma mark - Convenience methods for AVMediaTypeVideo + (BOOL)notDeterminedForVideoCapture { return [self notDeterminedForMediaType:AVMediaTypeVideo]; } + (BOOL)deniedForVideoCapture { return [self deniedForMediaType:AVMediaTypeVideo]; } + (void)requestAccessForVideoCaptureWithCompletionHandler:(void (^)(BOOL granted))handler { BOOL firstTimeAsking = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo] == AVAuthorizationStatusNotDetermined; [self requestAccessForMediaType:AVMediaTypeVideo completionHandler:^(BOOL granted) { if (firstTimeAsking) { SCAPermissionPromptResponse *responseEvent = [[SCAPermissionPromptResponse alloc] init]; [responseEvent setPermissionPromptType:SCAPermissionPromptType_OS_CAMERA]; [responseEvent setAccepted:granted]; [[SCLogger sharedInstance] logUserTrackedEvent:responseEvent]; } if (handler) { handler(granted); } }]; } @end ================================================ FILE: ManagedCapturer/SCCaptureDeviceAuthorizationChecker.h ================================================ // // SCCaptureDeviceAuthorizationChecker.h // Snapchat // // Created by Sun Lei on 15/03/2018. // @class SCQueuePerformer; #import #import /* In general, the function of SCCaptureDeviceAuthorizationChecker is to speed up the checking of AVMediaTypeVideo authorization. It would cache the authorization value. 'preloadVideoCaptureAuthorization' would be called very early after the app is launched to populate the cached value. 'authorizedForVideoCapture' could be called to get the value synchronously. */ @interface SCCaptureDeviceAuthorizationChecker : NSObject SC_INIT_AND_NEW_UNAVAILABLE - (instancetype)initWithPerformer:(SCQueuePerformer *)performer NS_DESIGNATED_INITIALIZER; - (BOOL)authorizedForVideoCapture; - (void)preloadVideoCaptureAuthorization; @end ================================================ FILE: ManagedCapturer/SCCaptureDeviceAuthorizationChecker.m ================================================ // // SCCaptureDeviceAuthorizationChecker.m // Snapchat // // Created by Sun Lei on 15/03/2018. // #import "SCCaptureDeviceAuthorizationChecker.h" #import #import @import AVFoundation; @interface SCCaptureDeviceAuthorizationChecker () { SCQueuePerformer *_performer; BOOL _videoCaptureAuthorizationCachedValue; } @end @implementation SCCaptureDeviceAuthorizationChecker - (instancetype)initWithPerformer:(SCQueuePerformer *)performer { SCTraceODPCompatibleStart(2); self = [super init]; if (self) { _performer = performer; _videoCaptureAuthorizationCachedValue = NO; } return self; } - (void)preloadVideoCaptureAuthorization { SCTraceODPCompatibleStart(2); [_performer perform:^{ SCTraceODPCompatibleStart(2); _videoCaptureAuthorizationCachedValue = [self authorizedForMediaType:AVMediaTypeVideo]; }]; } - (BOOL)authorizedForVideoCapture { SCTraceODPCompatibleStart(2); // Cache authorizedForVideoCapture for low devices if it's YES // [AVCaptureDevice authorizationStatusForMediaType:] is expensive on low devices like iPhone4 if (_videoCaptureAuthorizationCachedValue) { // If the user authorizes and then unauthorizes, iOS would SIGKILL the app. // When the user opens the app, a pop-up tells the user to allow camera access in settings. // So 'return YES' makes sense here. return YES; } else { @weakify(self); [_performer performAndWait:^{ @strongify(self); SC_GUARD_ELSE_RETURN(self); if (!_videoCaptureAuthorizationCachedValue) { _videoCaptureAuthorizationCachedValue = [self authorizedForMediaType:AVMediaTypeVideo]; } }]; return _videoCaptureAuthorizationCachedValue; } } - (BOOL)authorizedForMediaType:(NSString *)mediaType { return [AVCaptureDevice authorizationStatusForMediaType:mediaType] == AVAuthorizationStatusAuthorized; } @end ================================================ FILE: ManagedCapturer/SCCaptureDeviceResolver.h ================================================ // // SCCaptureDeviceResolver.h // Snapchat // // Created by Lin Jia on 11/8/17. // // #import /* See https://jira.sc-corp.net/browse/CCAM-5843 Retrieving AVCaptureDevice is a flaky operation. Thus create capture device resolver to make our code more robust. Resolver is used to retrieve AVCaptureDevice. We are going to do our best to find the camera for you. Resolver is only going to be used by SCManagedCaptureDevice. All APIs are thread safe. */ @interface SCCaptureDeviceResolver : NSObject + (instancetype)sharedInstance; - (AVCaptureDevice *)findAVCaptureDevice:(AVCaptureDevicePosition)position; - (AVCaptureDevice *)findDualCamera; @end ================================================ FILE: ManagedCapturer/SCCaptureDeviceResolver.m ================================================ // // SCCaptureDeviceResolver.m // Snapchat // // Created by Lin Jia on 11/8/17. // // #import "SCCaptureDeviceResolver.h" #import "SCCameraTweaks.h" #import #import @interface SCCaptureDeviceResolver () { AVCaptureDeviceDiscoverySession *_discoverySession; } @end @implementation SCCaptureDeviceResolver + (instancetype)sharedInstance { static SCCaptureDeviceResolver *resolver; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ resolver = [[SCCaptureDeviceResolver alloc] init]; }); return resolver; } - (instancetype)init { self = [super init]; if (self) { NSMutableArray *deviceTypes = [[NSMutableArray alloc] init]; [deviceTypes addObject:AVCaptureDeviceTypeBuiltInWideAngleCamera]; if (SC_AT_LEAST_IOS_10_2) { [deviceTypes addObject:AVCaptureDeviceTypeBuiltInDualCamera]; } // TODO: we should KVO _discoverySession.devices. _discoverySession = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:deviceTypes mediaType:AVMediaTypeVideo position:AVCaptureDevicePositionUnspecified]; } return self; } - (AVCaptureDevice *)findAVCaptureDevice:(AVCaptureDevicePosition)position { SCAssert(position == AVCaptureDevicePositionFront || position == AVCaptureDevicePositionBack, @""); AVCaptureDevice *captureDevice; if (position == AVCaptureDevicePositionFront) { captureDevice = [self _pickBestFrontCamera:[_discoverySession.devices copy]]; } else if (position == AVCaptureDevicePositionBack) { captureDevice = [self _pickBestBackCamera:[_discoverySession.devices copy]]; } if (captureDevice) { return captureDevice; } if (SC_AT_LEAST_IOS_10_2 && SCCameraTweaksEnableDualCamera()) { captureDevice = [AVCaptureDevice defaultDeviceWithDeviceType:AVCaptureDeviceTypeBuiltInDualCamera mediaType:AVMediaTypeVideo position:position]; if (captureDevice) { return captureDevice; } } // if code still execute, discoverSession failed, then we keep searching. captureDevice = [AVCaptureDevice defaultDeviceWithDeviceType:AVCaptureDeviceTypeBuiltInWideAngleCamera mediaType:AVMediaTypeVideo position:position]; if (captureDevice) { return captureDevice; } #if !TARGET_IPHONE_SIMULATOR // We do not return nil at the beginning of the function for simulator, because simulators of different IOS // versions can check whether or not our camera device API access is correct. SCAssertFail(@"No camera is found."); #endif return nil; } - (AVCaptureDevice *)_pickBestFrontCamera:(NSArray *)devices { for (AVCaptureDevice *device in devices) { if (device.position == AVCaptureDevicePositionFront) { return device; } } return nil; } - (AVCaptureDevice *)_pickBestBackCamera:(NSArray *)devices { // Look for dual camera first if needed. If dual camera not found, continue to look for wide angle camera. if (SC_AT_LEAST_IOS_10_2 && SCCameraTweaksEnableDualCamera()) { for (AVCaptureDevice *device in devices) { if (device.position == AVCaptureDevicePositionBack && device.deviceType == AVCaptureDeviceTypeBuiltInDualCamera) { return device; } } } for (AVCaptureDevice *device in devices) { if (device.position == AVCaptureDevicePositionBack && device.deviceType == AVCaptureDeviceTypeBuiltInWideAngleCamera) { return device; } } return nil; } - (AVCaptureDevice *)findDualCamera { if (SC_AT_LEAST_IOS_10_2) { for (AVCaptureDevice *device in [_discoverySession.devices copy]) { if (device.position == AVCaptureDevicePositionBack && device.deviceType == AVCaptureDeviceTypeBuiltInDualCamera) { return device; } } } AVCaptureDevice *captureDevice = [AVCaptureDevice defaultDeviceWithDeviceType:AVCaptureDeviceTypeBuiltInDualCamera mediaType:AVMediaTypeVideo position:AVCaptureDevicePositionBack]; if (captureDevice) { return captureDevice; } #if !TARGET_IPHONE_SIMULATOR // We do not return nil at the beginning of the function for simulator, because simulators of different IOS // versions can check whether or not our camera device API access is correct. SCAssertFail(@"No camera is found."); #endif return nil; } @end ================================================ FILE: ManagedCapturer/SCCaptureFaceDetectionParser.h ================================================ // // SCCaptureFaceDetectionParser.h // Snapchat // // Created by Jiyang Zhu on 3/13/18. // Copyright © 2018 Snapchat, Inc. All rights reserved. // // This class offers methods to parse face bounds from raw data, e.g., AVMetadataObject, CIFeature. #import #import #import @interface SCCaptureFaceDetectionParser : NSObject SC_INIT_AND_NEW_UNAVAILABLE; - (instancetype)initWithFaceBoundsAreaThreshold:(CGFloat)minimumArea; /** Parse face bounds from AVMetadataObject. @param metadataObjects An array of AVMetadataObject. @return A dictionary, value is faceBounds: CGRect, key is faceID: NSString. */ - (NSDictionary *)parseFaceBoundsByFaceIDFromMetadataObjects: (NSArray<__kindof AVMetadataObject *> *)metadataObjects; /** Parse face bounds from CIFeature. @param features An array of CIFeature. @param imageSize Size of the image, where the feature are detected from. @param imageOrientation Orientation of the image. @return A dictionary, value is faceBounds: CGRect, key is faceID: NSString. */ - (NSDictionary *)parseFaceBoundsByFaceIDFromCIFeatures:(NSArray<__kindof CIFeature *> *)features withImageSize:(CGSize)imageSize imageOrientation: (CGImagePropertyOrientation)imageOrientation; @end ================================================ FILE: ManagedCapturer/SCCaptureFaceDetectionParser.m ================================================ // // SCCaptureFaceDetectionParser.m // Snapchat // // Created by Jiyang Zhu on 3/13/18. // Copyright © 2018 Snapchat, Inc. All rights reserved. // #import "SCCaptureFaceDetectionParser.h" #import #import #import @implementation SCCaptureFaceDetectionParser { CGFloat _minimumArea; } - (instancetype)initWithFaceBoundsAreaThreshold:(CGFloat)minimumArea { self = [super init]; if (self) { _minimumArea = minimumArea; } return self; } - (NSDictionary *)parseFaceBoundsByFaceIDFromMetadataObjects: (NSArray<__kindof AVMetadataObject *> *)metadataObjects { SCTraceODPCompatibleStart(2); NSMutableArray *faceObjects = [NSMutableArray array]; [metadataObjects enumerateObjectsUsingBlock:^(__kindof AVMetadataObject *_Nonnull obj, NSUInteger idx, BOOL *_Nonnull stop) { if ([obj isKindOfClass:[AVMetadataFaceObject class]]) { [faceObjects addObject:obj]; } }]; SC_GUARD_ELSE_RETURN_VALUE(faceObjects.count > 0, nil); NSMutableDictionary *faceBoundsByFaceID = [NSMutableDictionary dictionaryWithCapacity:faceObjects.count]; for (AVMetadataFaceObject *faceObject in faceObjects) { CGRect bounds = faceObject.bounds; if (CGRectGetWidth(bounds) * CGRectGetHeight(bounds) >= _minimumArea) { [faceBoundsByFaceID setObject:[NSValue valueWithCGRect:bounds] forKey:@(faceObject.faceID)]; } } return faceBoundsByFaceID; } - (NSDictionary *)parseFaceBoundsByFaceIDFromCIFeatures:(NSArray<__kindof CIFeature *> *)features withImageSize:(CGSize)imageSize imageOrientation: (CGImagePropertyOrientation)imageOrientation { SCTraceODPCompatibleStart(2); NSArray *faceFeatures = [features filteredArrayUsingBlock:^BOOL(id _Nonnull evaluatedObject) { return [evaluatedObject isKindOfClass:[CIFaceFeature class]]; }]; SC_GUARD_ELSE_RETURN_VALUE(faceFeatures.count > 0, nil); NSMutableDictionary *faceBoundsByFaceID = [NSMutableDictionary dictionaryWithCapacity:faceFeatures.count]; CGFloat width = imageSize.width; CGFloat height = imageSize.height; SCLogGeneralInfo(@"Face feature count:%d", faceFeatures.count); for (CIFaceFeature *faceFeature in faceFeatures) { SCLogGeneralInfo(@"Face feature: hasTrackingID:%d, bounds:%@", faceFeature.hasTrackingID, NSStringFromCGRect(faceFeature.bounds)); if (faceFeature.hasTrackingID) { CGRect transferredBounds; // Somehow the detected bounds for back camera is mirrored. if (imageOrientation == kCGImagePropertyOrientationRight) { transferredBounds = CGRectMake( CGRectGetMinX(faceFeature.bounds) / width, 1 - CGRectGetMaxY(faceFeature.bounds) / height, CGRectGetWidth(faceFeature.bounds) / width, CGRectGetHeight(faceFeature.bounds) / height); } else { transferredBounds = CGRectMake( CGRectGetMinX(faceFeature.bounds) / width, CGRectGetMinY(faceFeature.bounds) / height, CGRectGetWidth(faceFeature.bounds) / width, CGRectGetHeight(faceFeature.bounds) / height); } if (CGRectGetWidth(transferredBounds) * CGRectGetHeight(transferredBounds) >= _minimumArea) { [faceBoundsByFaceID setObject:[NSValue valueWithCGRect:transferredBounds] forKey:@(faceFeature.trackingID)]; } } } return faceBoundsByFaceID; } @end ================================================ FILE: ManagedCapturer/SCCaptureFaceDetector.h ================================================ // // SCCaptureFaceDetector.h // Snapchat // // Created by Jiyang Zhu on 3/27/18. // Copyright © 2018 Snapchat, Inc. All rights reserved. // // This protocol declares properties and methods that are used for face detectors. #import @class SCCaptureResource; @class SCQueuePerformer; @class SCCaptureFaceDetectorTrigger; @class SCCaptureFaceDetectionParser; @protocol SCCaptureFaceDetector @property (nonatomic, strong, readonly) SCCaptureFaceDetectorTrigger *trigger; @property (nonatomic, strong, readonly) SCCaptureFaceDetectionParser *parser; - (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource; - (SCQueuePerformer *)detectionPerformer; - (void)startDetection; - (void)stopDetection; @end ================================================ FILE: ManagedCapturer/SCCaptureFaceDetectorTrigger.h ================================================ // // SCCaptureFaceDetectorTrigger.h // Snapchat // // Created by Jiyang Zhu on 3/22/18. // Copyright © 2018 Snapchat, Inc. All rights reserved. // // This class is used to control when should SCCaptureFaceDetector starts and stops. #import #import @protocol SCCaptureFaceDetector; @interface SCCaptureFaceDetectorTrigger : NSObject SC_INIT_AND_NEW_UNAVAILABLE; - (instancetype)initWithDetector:(id)detector; @end ================================================ FILE: ManagedCapturer/SCCaptureFaceDetectorTrigger.m ================================================ // // SCCaptureFaceDetectorTrigger.m // Snapchat // // Created by Jiyang Zhu on 3/22/18. // Copyright © 2018 Snapchat, Inc. All rights reserved. // #import "SCCaptureFaceDetectorTrigger.h" #import "SCCaptureFaceDetector.h" #import #import #import #import #import @interface SCCaptureFaceDetectorTrigger () { id __weak _detector; } @end @implementation SCCaptureFaceDetectorTrigger - (instancetype)initWithDetector:(id)detector { self = [super init]; if (self) { _detector = detector; [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(_applicationDidBecomeActive) name:kSCPostponedUIApplicationDidBecomeActiveNotification object:nil]; [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(_applicationWillResignActive) name:UIApplicationWillResignActiveNotification object:nil]; } return self; } #pragma mark - Internal Methods - (void)_applicationWillResignActive { SCTraceODPCompatibleStart(2); [self _stopDetection]; } - (void)_applicationDidBecomeActive { SCTraceODPCompatibleStart(2); [self _waitUntilAppStartCompleteToStartDetection]; } - (void)_waitUntilAppStartCompleteToStartDetection { SCTraceODPCompatibleStart(2); @weakify(self); if (SCExperimentWithWaitUntilIdleReplacement()) { [[SCTaskManager sharedManager] addTaskToRunWhenAppIdle:"SCCaptureFaceDetectorTrigger.startDetection" performer:[_detector detectionPerformer] block:^{ @strongify(self); SC_GUARD_ELSE_RETURN(self); [self _startDetection]; }]; } else { [[SCIdleMonitor sharedInstance] waitUntilIdleForTag:"SCCaptureFaceDetectorTrigger.startDetection" callbackQueue:[_detector detectionPerformer].queue block:^{ @strongify(self); SC_GUARD_ELSE_RETURN(self); [self _startDetection]; }]; } } - (void)_startDetection { SCTraceODPCompatibleStart(2); [[_detector detectionPerformer] performImmediatelyIfCurrentPerformer:^{ [_detector startDetection]; }]; } - (void)_stopDetection { SCTraceODPCompatibleStart(2); [[_detector detectionPerformer] performImmediatelyIfCurrentPerformer:^{ [_detector stopDetection]; }]; } @end ================================================ FILE: ManagedCapturer/SCCaptureMetadataObjectParser.h ================================================ // // SCCaptureMetadataObjectParser.h // Snapchat // // Created by Jiyang Zhu on 3/13/18. // Copyright © 2018 Snapchat, Inc. All rights reserved. // // This class offers class methods to parse AVMetadataObject. #import @interface SCCaptureMetadataObjectParser : NSObject /** Parse face bounds from AVMetadataObject. @param metadataObjects An array of AVMetadataObject. @return A dictionary, value is faceBounds: CGRect, key is faceID: NSString. */ - (NSDictionary *)parseFaceBoundsByFaceIDFromMetadataObjects: (NSArray<__kindof AVMetadataObject *> *)metadataObjects; @end ================================================ FILE: ManagedCapturer/SCCaptureMetadataObjectParser.m ================================================ // // SCCaptureMetadataObjectParser.m // Snapchat // // Created by Jiyang Zhu on 3/13/18. // Copyright © 2018 Snapchat, Inc. All rights reserved. // #import "SCCaptureMetadataObjectParser.h" #import @import UIKit; @implementation SCCaptureMetadataObjectParser - (NSDictionary *)parseFaceBoundsByFaceIDFromMetadataObjects: (NSArray<__kindof AVMetadataObject *> *)metadataObjects { NSMutableArray *faceObjects = [NSMutableArray array]; [metadataObjects enumerateObjectsUsingBlock:^(__kindof AVMetadataObject *_Nonnull obj, NSUInteger idx, BOOL *_Nonnull stop) { if ([obj isKindOfClass:[AVMetadataFaceObject class]]) { [faceObjects addObject:obj]; } }]; SC_GUARD_ELSE_RETURN_VALUE(faceObjects.count > 0, nil); NSMutableDictionary *faceBoundsByFaceID = [NSMutableDictionary dictionaryWithCapacity:faceObjects.count]; for (AVMetadataFaceObject *faceObject in faceObjects) { [faceBoundsByFaceID setObject:[NSValue valueWithCGRect:faceObject.bounds] forKey:@(faceObject.faceID)]; } return faceBoundsByFaceID; } @end ================================================ FILE: ManagedCapturer/SCCaptureMetadataOutputDetector.h ================================================ // // SCCaptureMetadataOutputDetector.h // Snapchat // // Created by Jiyang Zhu on 12/21/17. // Copyright © 2017 Snapchat, Inc. All rights reserved. // // This class is intended to detect faces in Camera. It receives AVMetadataFaceObjects, and announce the bounds and // faceIDs. #import "SCCaptureFaceDetector.h" #import @interface SCCaptureMetadataOutputDetector : NSObject SC_INIT_AND_NEW_UNAVAILABLE; @end ================================================ FILE: ManagedCapturer/SCCaptureMetadataOutputDetector.m ================================================ // // SCCaptureMetadataOutputDetector.m // Snapchat // // Created by Jiyang Zhu on 12/21/17. // Copyright © 2017 Snapchat, Inc. All rights reserved. // #import "SCCaptureMetadataOutputDetector.h" #import "SCCameraTweaks.h" #import "SCCaptureFaceDetectionParser.h" #import "SCCaptureFaceDetectorTrigger.h" #import "SCCaptureResource.h" #import "SCManagedCaptureSession.h" #import "SCManagedCapturer.h" #import #import #import #import #import #import #import #define SCLogCaptureMetaDetectorInfo(fmt, ...) \ SCLogCoreCameraInfo(@"[SCCaptureMetadataOutputDetector] " fmt, ##__VA_ARGS__) #define SCLogCaptureMetaDetectorWarning(fmt, ...) \ SCLogCoreCameraWarning(@"[SCCaptureMetadataOutputDetector] " fmt, ##__VA_ARGS__) #define SCLogCaptureMetaDetectorError(fmt, ...) \ SCLogCoreCameraError(@"[SCCaptureMetadataOutputDetector] " fmt, ##__VA_ARGS__) static char *const kSCCaptureMetadataOutputDetectorProcessQueue = "com.snapchat.capture-metadata-output-detector-process"; static const NSInteger kDefaultNumberOfSequentialFramesWithFaces = -1; // -1 means no sequential frames with faces. @interface SCCaptureMetadataOutputDetector () @end @implementation SCCaptureMetadataOutputDetector { BOOL _isDetecting; AVCaptureMetadataOutput *_metadataOutput; SCCaptureResource *_captureResource; SCCaptureFaceDetectionParser *_parser; NSInteger _numberOfSequentialFramesWithFaces; NSUInteger _detectionFrequency; SCQueuePerformer *_callbackPerformer; SCQueuePerformer *_metadataProcessPerformer; SCCaptureFaceDetectorTrigger *_trigger; } @synthesize trigger = _trigger; @synthesize parser = _parser; - (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource { SCTraceODPCompatibleStart(2); self = [super init]; if (self) { SCAssert(captureResource, @"SCCaptureResource should not be nil"); SCAssert(captureResource.managedSession.avSession, @"AVCaptureSession should not be nil"); SCAssert(captureResource.queuePerformer, @"SCQueuePerformer should not be nil"); _metadataOutput = [AVCaptureMetadataOutput new]; _callbackPerformer = captureResource.queuePerformer; _captureResource = captureResource; _detectionFrequency = SCExperimentWithFaceDetectionFrequency(); _parser = [[SCCaptureFaceDetectionParser alloc] initWithFaceBoundsAreaThreshold:pow(SCCameraFaceFocusMinFaceSize(), 2)]; _metadataProcessPerformer = [[SCQueuePerformer alloc] initWithLabel:kSCCaptureMetadataOutputDetectorProcessQueue qualityOfService:QOS_CLASS_DEFAULT queueType:DISPATCH_QUEUE_SERIAL context:SCQueuePerformerContextCamera]; if ([self _initDetection]) { _trigger = [[SCCaptureFaceDetectorTrigger alloc] initWithDetector:self]; } } return self; } - (AVCaptureSession *)_captureSession { // _captureResource.avSession may change, so we don't retain any specific AVCaptureSession. return _captureResource.managedSession.avSession; } - (BOOL)_initDetection { BOOL success = NO; if ([[self _captureSession] canAddOutput:_metadataOutput]) { [[self _captureSession] addOutput:_metadataOutput]; if ([_metadataOutput.availableMetadataObjectTypes containsObject:AVMetadataObjectTypeFace]) { _numberOfSequentialFramesWithFaces = kDefaultNumberOfSequentialFramesWithFaces; _metadataOutput.metadataObjectTypes = @[ AVMetadataObjectTypeFace ]; success = YES; SCLogCaptureMetaDetectorInfo(@"AVMetadataObjectTypeFace detection successfully enabled."); } else { [[self _captureSession] removeOutput:_metadataOutput]; success = NO; SCLogCaptureMetaDetectorError(@"AVMetadataObjectTypeFace is not available for " @"AVMetadataOutput[%@]", _metadataOutput); } } else { success = NO; SCLogCaptureMetaDetectorError(@"AVCaptureSession[%@] cannot add AVMetadataOutput[%@] as an output", [self _captureSession], _metadataOutput); } return success; } - (void)startDetection { SCAssert([[self detectionPerformer] isCurrentPerformer], @"Calling -startDetection in an invalid queue."); SC_GUARD_ELSE_RETURN(!_isDetecting); [_captureResource.queuePerformer performImmediatelyIfCurrentPerformer:^{ [_metadataOutput setMetadataObjectsDelegate:self queue:_metadataProcessPerformer.queue]; _isDetecting = YES; SCLogCaptureMetaDetectorInfo(@"AVMetadataObjectTypeFace detection successfully enabled."); }]; } - (void)stopDetection { SCAssert([[self detectionPerformer] isCurrentPerformer], @"Calling -stopDetection in an invalid queue."); SC_GUARD_ELSE_RETURN(_isDetecting); [_captureResource.queuePerformer performImmediatelyIfCurrentPerformer:^{ [_metadataOutput setMetadataObjectsDelegate:nil queue:NULL]; _isDetecting = NO; SCLogCaptureMetaDetectorInfo(@"AVMetadataObjectTypeFace detection successfully disabled."); }]; } - (SCQueuePerformer *)detectionPerformer { return _captureResource.queuePerformer; } #pragma mark - AVCaptureMetadataOutputObjectsDelegate - (void)captureOutput:(AVCaptureOutput *)output didOutputMetadataObjects:(NSArray<__kindof AVMetadataObject *> *)metadataObjects fromConnection:(AVCaptureConnection *)connection { SCTraceODPCompatibleStart(2); BOOL shouldNotify = NO; if (metadataObjects.count == 0 && _numberOfSequentialFramesWithFaces != kDefaultNumberOfSequentialFramesWithFaces) { // There were faces detected before, but there is no face right // now, so send out the notification. _numberOfSequentialFramesWithFaces = kDefaultNumberOfSequentialFramesWithFaces; shouldNotify = YES; } else if (metadataObjects.count > 0) { _numberOfSequentialFramesWithFaces++; shouldNotify = (_numberOfSequentialFramesWithFaces % _detectionFrequency == 0); } SC_GUARD_ELSE_RETURN(shouldNotify); NSDictionary *faceBoundsByFaceID = [_parser parseFaceBoundsByFaceIDFromMetadataObjects:metadataObjects]; [_callbackPerformer perform:^{ [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didDetectFaceBounds:faceBoundsByFaceID]; }]; } @end ================================================ FILE: ManagedCapturer/SCCapturer.h ================================================ // // SCManagedCapturer.h // Snapchat // // Created by Liu Liu on 4/20/15. // Copyright (c) 2015 Liu Liu. All rights reserved. // #import "SCCaptureCommon.h" #import "SCSnapCreationTriggers.h" #import #import #import #define SCCapturerContext [NSString sc_stringWithFormat:@"%s/%d", __FUNCTION__, __LINE__] @class SCBlackCameraDetector; @protocol SCManagedCapturerListener , SCManagedCapturerLensAPI, SCDeviceMotionProvider, SCFileInputDecider, SCManagedCapturerARImageCaptureProvider, SCManagedCapturerGLViewManagerAPI, SCManagedCapturerLensAPIProvider, SCManagedCapturerLSAComponentTrackerAPI, SCManagedCapturePreviewLayerControllerDelegate; @protocol SCCapturer @property (nonatomic, readonly) SCBlackCameraDetector *blackCameraDetector; /** * Returns id for the current capturer. */ - (id)lensProcessingCore; - (CMTime)firstWrittenAudioBufferDelay; - (BOOL)audioQueueStarted; - (BOOL)isLensApplied; - (BOOL)isVideoMirrored; - (SCVideoCaptureSessionInfo)activeSession; #pragma mark - Outside resources - (void)setBlackCameraDetector:(SCBlackCameraDetector *)blackCameraDetector deviceMotionProvider:(id)deviceMotionProvider fileInputDecider:(id)fileInputDecider arImageCaptureProvider:(id)arImageCaptureProvider glviewManager:(id)glViewManager lensAPIProvider:(id)lensAPIProvider lsaComponentTracker:(id)lsaComponentTracker managedCapturerPreviewLayerControllerDelegate: (id)previewLayerControllerDelegate; #pragma mark - Setup, Start & Stop // setupWithDevicePositionAsynchronously will be called on the main thread, executed off the main thread, exactly once - (void)setupWithDevicePositionAsynchronously:(SCManagedCaptureDevicePosition)devicePosition completionHandler:(dispatch_block_t)completionHandler context:(NSString *)context; /** * Important: Remember to call stopRunningAsynchronously to stop the capture session. Dismissing the view is not enough * @param identifier is for knowing the callsite. Pass in the classname of the callsite is generally suggested. * Currently it is used for debugging purposes. In other words the capture session will work without it. */ - (SCCapturerToken *)startRunningAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler context:(NSString *)context; - (void)stopRunningAsynchronously:(SCCapturerToken *)token completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler context:(NSString *)context; - (void)stopRunningAsynchronously:(SCCapturerToken *)token completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler after:(NSTimeInterval)delay context:(NSString *)context; - (void)startStreamingAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler context:(NSString *)context; - (void)addSampleBufferDisplayController:(id)sampleBufferDisplayController context:(NSString *)context; #pragma mark - Recording / Capture - (void)captureStillImageAsynchronouslyWithAspectRatio:(CGFloat)aspectRatio captureSessionID:(NSString *)captureSessionID completionHandler: (sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler context:(NSString *)context; /** * Unlike captureStillImageAsynchronouslyWithAspectRatio, this captures a single frame from the ongoing video * stream. This should be faster but lower quality (and smaller size), and does not play the shutter sound. */ - (void)captureSingleVideoFrameAsynchronouslyWithCompletionHandler: (sc_managed_capturer_capture_video_frame_completion_handler_t)completionHandler context:(NSString *)context; - (void)prepareForRecordingAsynchronouslyWithContext:(NSString *)context audioConfiguration:(SCAudioConfiguration *)configuration; - (void)startRecordingAsynchronouslyWithOutputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings audioConfiguration:(SCAudioConfiguration *)configuration maxDuration:(NSTimeInterval)maxDuration fileURL:(NSURL *)fileURL captureSessionID:(NSString *)captureSessionID completionHandler: (sc_managed_capturer_start_recording_completion_handler_t)completionHandler context:(NSString *)context; - (void)stopRecordingAsynchronouslyWithContext:(NSString *)context; - (void)cancelRecordingAsynchronouslyWithContext:(NSString *)context; - (void)startScanAsynchronouslyWithScanConfiguration:(SCScanConfiguration *)configuration context:(NSString *)context; - (void)stopScanAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler context:(NSString *)context; - (void)sampleFrameWithCompletionHandler:(void (^)(UIImage *frame, CMTime presentationTime))completionHandler context:(NSString *)context; // AddTimedTask will schedule a task to run, it is thread safe API. Your task will run on main thread, so it is not // recommended to add large amount of tasks which all have the same task target time. - (void)addTimedTask:(SCTimedTask *)task context:(NSString *)context; // clearTimedTasks will cancel the tasks, it is thread safe API. - (void)clearTimedTasksWithContext:(NSString *)context; #pragma mark - Utilities - (void)convertViewCoordinates:(CGPoint)viewCoordinates completionHandler:(sc_managed_capturer_convert_view_coordniates_completion_handler_t)completionHandler context:(NSString *)context; - (void)detectLensCategoryOnNextFrame:(CGPoint)point lenses:(NSArray *)lenses completion:(sc_managed_lenses_processor_category_point_completion_handler_t)completion context:(NSString *)context; #pragma mark - Configurations - (void)setDevicePositionAsynchronously:(SCManagedCaptureDevicePosition)devicePosition completionHandler:(dispatch_block_t)completionHandler context:(NSString *)context; - (void)setFlashActive:(BOOL)flashActive completionHandler:(dispatch_block_t)completionHandler context:(NSString *)context; - (void)setLensesActive:(BOOL)lensesActive completionHandler:(dispatch_block_t)completionHandler context:(NSString *)context; - (void)setLensesActive:(BOOL)lensesActive filterFactory:(SCLookseryFilterFactory *)filterFactory completionHandler:(dispatch_block_t)completionHandler context:(NSString *)context; - (void)setLensesInTalkActive:(BOOL)lensesActive completionHandler:(dispatch_block_t)completionHandler context:(NSString *)context; - (void)setTorchActiveAsynchronously:(BOOL)torchActive completionHandler:(dispatch_block_t)completionHandler context:(NSString *)context; - (void)setNightModeActiveAsynchronously:(BOOL)active completionHandler:(dispatch_block_t)completionHandler context:(NSString *)context; - (void)lockZoomWithContext:(NSString *)context; - (void)unlockZoomWithContext:(NSString *)context; - (void)setZoomFactorAsynchronously:(CGFloat)zoomFactor context:(NSString *)context; - (void)resetZoomFactorAsynchronously:(CGFloat)zoomFactor devicePosition:(SCManagedCaptureDevicePosition)devicePosition context:(NSString *)context; - (void)setExposurePointOfInterestAsynchronously:(CGPoint)pointOfInterest fromUser:(BOOL)fromUser completionHandler:(dispatch_block_t)completionHandler context:(NSString *)context; - (void)setAutofocusPointOfInterestAsynchronously:(CGPoint)pointOfInterest completionHandler:(dispatch_block_t)completionHandler context:(NSString *)context; - (void)setPortraitModePointOfInterestAsynchronously:(CGPoint)pointOfInterest completionHandler:(dispatch_block_t)completionHandler context:(NSString *)context; - (void)continuousAutofocusAndExposureAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler context:(NSString *)context; // I need to call these three methods from SCAppDelegate explicitly so that I get the latest information. - (void)applicationDidEnterBackground; - (void)applicationWillEnterForeground; - (void)applicationDidBecomeActive; - (void)applicationWillResignActive; - (void)mediaServicesWereReset; - (void)mediaServicesWereLost; #pragma mark - Add / Remove Listener - (void)addListener:(id)listener; - (void)removeListener:(id)listener; - (void)addVideoDataSourceListener:(id)listener; - (void)removeVideoDataSourceListener:(id)listener; - (void)addDeviceCapacityAnalyzerListener:(id)listener; - (void)removeDeviceCapacityAnalyzerListener:(id)listener; - (NSString *)debugInfo; - (id)currentVideoDataSource; - (void)checkRestrictedCamera:(void (^)(BOOL, BOOL, AVAuthorizationStatus))callback; // Need to be visible so that classes like SCCaptureSessionFixer can manage capture session - (void)recreateAVCaptureSession; #pragma mark - Snap Creation triggers - (SCSnapCreationTriggers *)snapCreationTriggers; @optional - (BOOL)authorizedForVideoCapture; - (void)preloadVideoCaptureAuthorization; @end ================================================ FILE: ManagedCapturer/SCCapturerBufferedVideoWriter.h ================================================ // // SCCapturerBufferedVideoWriter.h // Snapchat // // Created by Chao Pang on 12/5/17. // #import #import #import #import @protocol SCCapturerBufferedVideoWriterDelegate - (void)videoWriterDidFailWritingWithError:(NSError *)error; @end @interface SCCapturerBufferedVideoWriter : NSObject - (instancetype)init NS_UNAVAILABLE; - (instancetype)initWithPerformer:(id)performer outputURL:(NSURL *)outputURL delegate:(id)delegate error:(NSError **)error; - (BOOL)prepareWritingWithOutputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings; - (void)startWritingAtSourceTime:(CMTime)sourceTime; - (void)finishWritingAtSourceTime:(CMTime)sourceTime withCompletionHanlder:(dispatch_block_t)completionBlock; - (void)cancelWriting; - (void)appendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer; - (void)appendAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer; - (void)cleanUp; @end ================================================ FILE: ManagedCapturer/SCCapturerBufferedVideoWriter.m ================================================ // // SCCapturerBufferedVideoWriter.m // Snapchat // // Created by Chao Pang on 12/5/17. // #import "SCCapturerBufferedVideoWriter.h" #import "SCAudioCaptureSession.h" #import "SCCaptureCommon.h" #import "SCManagedCapturerUtils.h" #import #import #import #import #import #import @implementation SCCapturerBufferedVideoWriter { SCQueuePerformer *_performer; __weak id _delegate; FBKVOController *_observeController; AVAssetWriter *_assetWriter; AVAssetWriterInput *_audioWriterInput; AVAssetWriterInput *_videoWriterInput; AVAssetWriterInputPixelBufferAdaptor *_pixelBufferAdaptor; CVPixelBufferPoolRef _defaultPixelBufferPool; CVPixelBufferPoolRef _nightPixelBufferPool; CVPixelBufferPoolRef _lensesPixelBufferPool; CMBufferQueueRef _videoBufferQueue; CMBufferQueueRef _audioBufferQueue; } - (instancetype)initWithPerformer:(id)performer outputURL:(NSURL *)outputURL delegate:(id)delegate error:(NSError **)error { self = [super init]; if (self) { _performer = performer; _delegate = delegate; _observeController = [[FBKVOController alloc] initWithObserver:self]; CMBufferQueueCreate(kCFAllocatorDefault, 0, CMBufferQueueGetCallbacksForUnsortedSampleBuffers(), &_videoBufferQueue); CMBufferQueueCreate(kCFAllocatorDefault, 0, CMBufferQueueGetCallbacksForUnsortedSampleBuffers(), &_audioBufferQueue); _assetWriter = [[AVAssetWriter alloc] initWithURL:outputURL fileType:AVFileTypeMPEG4 error:error]; if (*error) { self = nil; return self; } } return self; } - (BOOL)prepareWritingWithOutputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings { SCTraceStart(); SCAssert([_performer isCurrentPerformer], @""); SCAssert(outputSettings, @"empty output setting"); // Audio SCTraceSignal(@"Derive audio output setting"); NSDictionary *audioOutputSettings = @{ AVFormatIDKey : @(kAudioFormatMPEG4AAC), AVNumberOfChannelsKey : @(1), AVSampleRateKey : @(kSCAudioCaptureSessionDefaultSampleRate), AVEncoderBitRateKey : @(outputSettings.audioBitRate) }; _audioWriterInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeAudio outputSettings:audioOutputSettings]; _audioWriterInput.expectsMediaDataInRealTime = YES; // Video SCTraceSignal(@"Derive video output setting"); size_t outputWidth = outputSettings.width; size_t outputHeight = outputSettings.height; SCAssert(outputWidth > 0 && outputHeight > 0 && (outputWidth % 2 == 0) && (outputHeight % 2 == 0), @"invalid output size"); NSDictionary *videoCompressionSettings = @{ AVVideoAverageBitRateKey : @(outputSettings.videoBitRate), AVVideoMaxKeyFrameIntervalKey : @(outputSettings.keyFrameInterval) }; NSDictionary *videoOutputSettings = @{ AVVideoCodecKey : AVVideoCodecH264, AVVideoWidthKey : @(outputWidth), AVVideoHeightKey : @(outputHeight), AVVideoScalingModeKey : AVVideoScalingModeResizeAspectFill, AVVideoCompressionPropertiesKey : videoCompressionSettings }; _videoWriterInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo outputSettings:videoOutputSettings]; _videoWriterInput.expectsMediaDataInRealTime = YES; CGAffineTransform transform = CGAffineTransformMakeTranslation(outputHeight, 0); _videoWriterInput.transform = CGAffineTransformRotate(transform, M_PI_2); _pixelBufferAdaptor = [[AVAssetWriterInputPixelBufferAdaptor alloc] initWithAssetWriterInput:_videoWriterInput sourcePixelBufferAttributes:@{ (NSString *) kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange), (NSString *) kCVPixelBufferWidthKey : @(outputWidth), (NSString *) kCVPixelBufferHeightKey : @(outputHeight) }]; SCTraceSignal(@"Setup video writer input"); if ([_assetWriter canAddInput:_videoWriterInput]) { [_assetWriter addInput:_videoWriterInput]; } else { return NO; } SCTraceSignal(@"Setup audio writer input"); if ([_assetWriter canAddInput:_audioWriterInput]) { [_assetWriter addInput:_audioWriterInput]; } else { return NO; } return YES; } - (void)appendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer { SCAssert([_performer isCurrentPerformer], @""); SC_GUARD_ELSE_RETURN(sampleBuffer); if (!CMBufferQueueIsEmpty(_videoBufferQueue)) { // We need to drain the buffer queue in this case while (_videoWriterInput.readyForMoreMediaData) { // TODO: also need to break out in case of errors CMSampleBufferRef dequeuedSampleBuffer = (CMSampleBufferRef)CMBufferQueueDequeueAndRetain(_videoBufferQueue); if (dequeuedSampleBuffer == NULL) { break; } [self _appendVideoSampleBuffer:dequeuedSampleBuffer]; CFRelease(dequeuedSampleBuffer); } } // Fast path, just append this sample buffer if ready if (_videoWriterInput.readyForMoreMediaData) { [self _appendVideoSampleBuffer:sampleBuffer]; } else { // It is not ready, queuing the sample buffer CMBufferQueueEnqueue(_videoBufferQueue, sampleBuffer); } } - (void)appendAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer { SCAssert([_performer isCurrentPerformer], @""); SC_GUARD_ELSE_RETURN(sampleBuffer); if (!CMBufferQueueIsEmpty(_audioBufferQueue)) { // We need to drain the buffer queue in this case while (_audioWriterInput.readyForMoreMediaData) { CMSampleBufferRef dequeuedSampleBuffer = (CMSampleBufferRef)CMBufferQueueDequeueAndRetain(_audioBufferQueue); if (dequeuedSampleBuffer == NULL) { break; } [_audioWriterInput appendSampleBuffer:sampleBuffer]; CFRelease(dequeuedSampleBuffer); } } // fast path, just append this sample buffer if ready if ((_audioWriterInput.readyForMoreMediaData)) { [_audioWriterInput appendSampleBuffer:sampleBuffer]; } else { // it is not ready, queuing the sample buffer CMBufferQueueEnqueue(_audioBufferQueue, sampleBuffer); } } - (void)startWritingAtSourceTime:(CMTime)sourceTime { SCTraceStart(); SCAssert([_performer isCurrentPerformer], @""); // To observe the status change on assetWriter because when assetWriter errors out, it only changes the // status, no further delegate callbacks etc. [_observeController observe:_assetWriter keyPath:@keypath(_assetWriter, status) options:NSKeyValueObservingOptionNew action:@selector(assetWriterStatusChanged:)]; [_assetWriter startWriting]; [_assetWriter startSessionAtSourceTime:sourceTime]; } - (void)cancelWriting { SCTraceStart(); SCAssert([_performer isCurrentPerformer], @""); CMBufferQueueReset(_videoBufferQueue); CMBufferQueueReset(_audioBufferQueue); [_assetWriter cancelWriting]; } - (void)finishWritingAtSourceTime:(CMTime)sourceTime withCompletionHanlder:(dispatch_block_t)completionBlock { SCTraceStart(); SCAssert([_performer isCurrentPerformer], @""); while (_audioWriterInput.readyForMoreMediaData && !CMBufferQueueIsEmpty(_audioBufferQueue)) { CMSampleBufferRef audioSampleBuffer = (CMSampleBufferRef)CMBufferQueueDequeueAndRetain(_audioBufferQueue); if (audioSampleBuffer == NULL) { break; } [_audioWriterInput appendSampleBuffer:audioSampleBuffer]; CFRelease(audioSampleBuffer); } while (_videoWriterInput.readyForMoreMediaData && !CMBufferQueueIsEmpty(_videoBufferQueue)) { CMSampleBufferRef videoSampleBuffer = (CMSampleBufferRef)CMBufferQueueDequeueAndRetain(_videoBufferQueue); if (videoSampleBuffer == NULL) { break; } [_videoWriterInput appendSampleBuffer:videoSampleBuffer]; CFRelease(videoSampleBuffer); } dispatch_block_t finishWritingBlock = ^() { [_assetWriter endSessionAtSourceTime:sourceTime]; [_audioWriterInput markAsFinished]; [_videoWriterInput markAsFinished]; [_assetWriter finishWritingWithCompletionHandler:^{ if (completionBlock) { completionBlock(); } }]; }; if (CMBufferQueueIsEmpty(_audioBufferQueue) && CMBufferQueueIsEmpty(_videoBufferQueue)) { finishWritingBlock(); } else { // We need to drain the samples from the queues before finish writing __block BOOL isAudioDone = NO; __block BOOL isVideoDone = NO; // Audio [_audioWriterInput requestMediaDataWhenReadyOnQueue:_performer.queue usingBlock:^{ if (!CMBufferQueueIsEmpty(_audioBufferQueue) && _assetWriter.status == AVAssetWriterStatusWriting) { CMSampleBufferRef audioSampleBuffer = (CMSampleBufferRef)CMBufferQueueDequeueAndRetain(_audioBufferQueue); if (audioSampleBuffer) { [_audioWriterInput appendSampleBuffer:audioSampleBuffer]; CFRelease(audioSampleBuffer); } } else if (!isAudioDone) { isAudioDone = YES; } if (isAudioDone && isVideoDone) { finishWritingBlock(); } }]; // Video [_videoWriterInput requestMediaDataWhenReadyOnQueue:_performer.queue usingBlock:^{ if (!CMBufferQueueIsEmpty(_videoBufferQueue) && _assetWriter.status == AVAssetWriterStatusWriting) { CMSampleBufferRef videoSampleBuffer = (CMSampleBufferRef)CMBufferQueueDequeueAndRetain(_videoBufferQueue); if (videoSampleBuffer) { [_videoWriterInput appendSampleBuffer:videoSampleBuffer]; CFRelease(videoSampleBuffer); } } else if (!isVideoDone) { isVideoDone = YES; } if (isAudioDone && isVideoDone) { finishWritingBlock(); } }]; } } - (void)cleanUp { _assetWriter = nil; _videoWriterInput = nil; _audioWriterInput = nil; _pixelBufferAdaptor = nil; } - (void)dealloc { CFRelease(_videoBufferQueue); CFRelease(_audioBufferQueue); CVPixelBufferPoolRelease(_defaultPixelBufferPool); CVPixelBufferPoolRelease(_nightPixelBufferPool); CVPixelBufferPoolRelease(_lensesPixelBufferPool); [_observeController unobserveAll]; } - (void)assetWriterStatusChanged:(NSDictionary *)change { SCTraceStart(); if (_assetWriter.status == AVAssetWriterStatusFailed) { SCTraceSignal(@"Asset writer status failed %@, error %@", change, _assetWriter.error); [_delegate videoWriterDidFailWritingWithError:[_assetWriter.error copy]]; } } #pragma - Private methods - (CVImageBufferRef)_croppedPixelBufferWithInputPixelBuffer:(CVImageBufferRef)inputPixelBuffer { SCAssertTrue([SCDeviceName isIphoneX]); const size_t inputBufferWidth = CVPixelBufferGetWidth(inputPixelBuffer); const size_t inputBufferHeight = CVPixelBufferGetHeight(inputPixelBuffer); const size_t croppedBufferWidth = (size_t)(inputBufferWidth * kSCIPhoneXCapturedImageVideoCropRatio) / 2 * 2; const size_t croppedBufferHeight = (size_t)(croppedBufferWidth * SCManagedCapturedImageAndVideoAspectRatio()) / 2 * 2; const size_t offsetPointX = inputBufferWidth - croppedBufferWidth; const size_t offsetPointY = (inputBufferHeight - croppedBufferHeight) / 4 * 2; SC_GUARD_ELSE_RUN_AND_RETURN_VALUE((inputBufferWidth >= croppedBufferWidth) && (inputBufferHeight >= croppedBufferHeight) && (offsetPointX % 2 == 0) && (offsetPointY % 2 == 0) && (inputBufferWidth >= croppedBufferWidth + offsetPointX) && (inputBufferHeight >= croppedBufferHeight + offsetPointY), SCLogGeneralError(@"Invalid cropping configuration"), NULL); CVPixelBufferRef croppedPixelBuffer = NULL; CVPixelBufferPoolRef pixelBufferPool = [self _pixelBufferPoolWithInputSize:CGSizeMake(inputBufferWidth, inputBufferHeight) croppedSize:CGSizeMake(croppedBufferWidth, croppedBufferHeight)]; if (pixelBufferPool) { CVReturn result = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pixelBufferPool, &croppedPixelBuffer); if ((result != kCVReturnSuccess) || (croppedPixelBuffer == NULL)) { SCLogGeneralError(@"[SCCapturerVideoWriterInput] Error creating croppedPixelBuffer"); return NULL; } } else { SCAssertFail(@"[SCCapturerVideoWriterInput] PixelBufferPool is NULL with inputBufferWidth:%@, " @"inputBufferHeight:%@, croppedBufferWidth:%@, croppedBufferHeight:%@", @(inputBufferWidth), @(inputBufferHeight), @(croppedBufferWidth), @(croppedBufferHeight)); return NULL; } CVPixelBufferLockBaseAddress(inputPixelBuffer, kCVPixelBufferLock_ReadOnly); CVPixelBufferLockBaseAddress(croppedPixelBuffer, 0); const size_t planesCount = CVPixelBufferGetPlaneCount(inputPixelBuffer); for (int planeIndex = 0; planeIndex < planesCount; planeIndex++) { size_t inPlaneHeight = CVPixelBufferGetHeightOfPlane(inputPixelBuffer, planeIndex); size_t inPlaneBytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(inputPixelBuffer, planeIndex); uint8_t *inPlaneAdress = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(inputPixelBuffer, planeIndex); size_t croppedPlaneHeight = CVPixelBufferGetHeightOfPlane(croppedPixelBuffer, planeIndex); size_t croppedPlaneBytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(croppedPixelBuffer, planeIndex); uint8_t *croppedPlaneAdress = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(croppedPixelBuffer, planeIndex); // Note that inPlaneBytesPerRow is not strictly 2x of inPlaneWidth for some devices (e.g. iPhone X). // However, since UV are packed together in memory, we can use offsetPointX for all planes size_t offsetPlaneBytesX = offsetPointX; size_t offsetPlaneBytesY = offsetPointY * inPlaneHeight / inputBufferHeight; inPlaneAdress = inPlaneAdress + offsetPlaneBytesY * inPlaneBytesPerRow + offsetPlaneBytesX; size_t bytesToCopyPerRow = MIN(inPlaneBytesPerRow - offsetPlaneBytesX, croppedPlaneBytesPerRow); for (int i = 0; i < croppedPlaneHeight; i++) { memcpy(croppedPlaneAdress, inPlaneAdress, bytesToCopyPerRow); inPlaneAdress += inPlaneBytesPerRow; croppedPlaneAdress += croppedPlaneBytesPerRow; } } CVPixelBufferUnlockBaseAddress(inputPixelBuffer, kCVPixelBufferLock_ReadOnly); CVPixelBufferUnlockBaseAddress(croppedPixelBuffer, 0); return croppedPixelBuffer; } - (CVPixelBufferPoolRef)_pixelBufferPoolWithInputSize:(CGSize)inputSize croppedSize:(CGSize)croppedSize { if (CGSizeEqualToSize(inputSize, [SCManagedCaptureDevice defaultActiveFormatResolution])) { if (_defaultPixelBufferPool == NULL) { _defaultPixelBufferPool = [self _newPixelBufferPoolWithWidth:croppedSize.width height:croppedSize.height]; } return _defaultPixelBufferPool; } else if (CGSizeEqualToSize(inputSize, [SCManagedCaptureDevice nightModeActiveFormatResolution])) { if (_nightPixelBufferPool == NULL) { _nightPixelBufferPool = [self _newPixelBufferPoolWithWidth:croppedSize.width height:croppedSize.height]; } return _nightPixelBufferPool; } else { if (_lensesPixelBufferPool == NULL) { _lensesPixelBufferPool = [self _newPixelBufferPoolWithWidth:croppedSize.width height:croppedSize.height]; } return _lensesPixelBufferPool; } } - (CVPixelBufferPoolRef)_newPixelBufferPoolWithWidth:(size_t)width height:(size_t)height { NSDictionary *attributes = @{ (NSString *) kCVPixelBufferIOSurfacePropertiesKey : @{}, (NSString *) kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange), (NSString *) kCVPixelBufferWidthKey : @(width), (NSString *) kCVPixelBufferHeightKey : @(height) }; CVPixelBufferPoolRef pixelBufferPool = NULL; CVReturn result = CVPixelBufferPoolCreate(kCFAllocatorDefault, NULL, (__bridge CFDictionaryRef _Nullable)(attributes), &pixelBufferPool); if (result != kCVReturnSuccess) { SCLogGeneralError(@"[SCCapturerBufferredVideoWriter] Error creating pixel buffer pool %i", result); return NULL; } return pixelBufferPool; } - (void)_appendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer { SCAssert([_performer isCurrentPerformer], @""); CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); CVImageBufferRef inputPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); if ([SCDeviceName isIphoneX]) { CVImageBufferRef croppedPixelBuffer = [self _croppedPixelBufferWithInputPixelBuffer:inputPixelBuffer]; if (croppedPixelBuffer) { [_pixelBufferAdaptor appendPixelBuffer:croppedPixelBuffer withPresentationTime:presentationTime]; CVPixelBufferRelease(croppedPixelBuffer); } } else { [_pixelBufferAdaptor appendPixelBuffer:inputPixelBuffer withPresentationTime:presentationTime]; } } @end ================================================ FILE: ManagedCapturer/SCCapturerDefines.h ================================================ // // SCCapturerDefines.h // Snapchat // // Created by Chao Pang on 12/20/17. // #import typedef NS_ENUM(NSInteger, SCCapturerLightingConditionType) { SCCapturerLightingConditionTypeNormal = 0, SCCapturerLightingConditionTypeDark, SCCapturerLightingConditionTypeExtremeDark, }; typedef struct SampleBufferMetadata { int isoSpeedRating; float exposureTime; float brightness; } SampleBufferMetadata; ================================================ FILE: ManagedCapturer/SCCapturerToken.h ================================================ // // SCCapturerToken.h // Snapchat // // Created by Xishuo Liu on 3/24/17. // Copyright © 2017 Snapchat, Inc. All rights reserved. // #import @interface SCCapturerToken : NSObject - (instancetype)initWithIdentifier:(NSString *)identifier NS_DESIGNATED_INITIALIZER; - (instancetype)init __attribute__((unavailable("Use initWithIdentifier: instead."))); - (instancetype) new __attribute__((unavailable("Use initWithIdentifier: instead."))); @end ================================================ FILE: ManagedCapturer/SCCapturerToken.m ================================================ // // SCCapturerToken.m // Snapchat // // Created by Xishuo Liu on 3/24/17. // Copyright © 2017 Snapchat, Inc. All rights reserved. // #import "SCCapturerToken.h" #import @implementation SCCapturerToken { NSString *_identifier; } - (instancetype)initWithIdentifier:(NSString *)identifier { if (self = [super init]) { _identifier = identifier.copy; } return self; } - (NSString *)debugDescription { return [NSString sc_stringWithFormat:@"%@_%@", _identifier, self]; } @end ================================================ FILE: ManagedCapturer/SCCapturerTokenProvider.h ================================================ // // Created by Aaron Levine on 10/16/17. // #import #import @class SCCapturerToken; NS_ASSUME_NONNULL_BEGIN @interface SCCapturerTokenProvider : NSObject SC_INIT_AND_NEW_UNAVAILABLE + (instancetype)providerWithToken:(SCCapturerToken *)token; - (nullable SCCapturerToken *)getTokenAndInvalidate; @end NS_ASSUME_NONNULL_END ================================================ FILE: ManagedCapturer/SCCapturerTokenProvider.m ================================================ // // Created by Aaron Levine on 10/16/17. // #import "SCCapturerTokenProvider.h" #import "SCCapturerToken.h" #import #import @implementation SCCapturerTokenProvider { SCCapturerToken *_Nullable _token; } + (instancetype)providerWithToken:(SCCapturerToken *)token { return [[self alloc] initWithToken:token]; } - (instancetype)initWithToken:(SCCapturerToken *)token { self = [super init]; if (self) { _token = token; } return self; } - (nullable SCCapturerToken *)getTokenAndInvalidate { // ensure serial access by requiring calls be on the main thread SCAssertMainThread(); let token = _token; _token = nil; return token; } @end ================================================ FILE: ManagedCapturer/SCExposureState.h ================================================ // // SCExposureState.h // Snapchat // // Created by Derek Peirce on 4/10/17. // Copyright © 2017 Snapchat, Inc. All rights reserved. // #import #import @interface SCExposureState : NSObject - (instancetype)initWithDevice:(AVCaptureDevice *)device; - (void)applyISOAndExposureDurationToDevice:(AVCaptureDevice *)device; @end ================================================ FILE: ManagedCapturer/SCExposureState.m ================================================ // // SCExposureState.m // Snapchat // // Created by Derek Peirce on 4/10/17. // Copyright © 2017 Snapchat, Inc. All rights reserved. // #import "SCExposureState.h" #import "AVCaptureDevice+ConfigurationLock.h" #import @import AVFoundation; @implementation SCExposureState { float _ISO; CMTime _exposureDuration; } - (instancetype)initWithDevice:(AVCaptureDevice *)device { if (self = [super init]) { _ISO = device.ISO; _exposureDuration = device.exposureDuration; } return self; } - (void)applyISOAndExposureDurationToDevice:(AVCaptureDevice *)device { if ([device isExposureModeSupported:AVCaptureExposureModeCustom]) { [device runTask:@"set prior exposure" withLockedConfiguration:^() { CMTime exposureDuration = CMTimeClampToRange(_exposureDuration, CMTimeRangeMake(device.activeFormat.minExposureDuration, device.activeFormat.maxExposureDuration)); [device setExposureModeCustomWithDuration:exposureDuration ISO:SC_CLAMP(_ISO, device.activeFormat.minISO, device.activeFormat.maxISO) completionHandler:nil]; }]; } } @end ================================================ FILE: ManagedCapturer/SCFileAudioCaptureSession.h ================================================ // // SCFileAudioCaptureSession.h // Snapchat // // Created by Xiaomu Wu on 2/2/17. // Copyright © 2017 Snapchat, Inc. All rights reserved. // #import "SCAudioCaptureSession.h" #import @interface SCFileAudioCaptureSession : NSObject // Linear PCM is required. // To best mimic `SCAudioCaptureSession`, use an audio file recorded from it. - (void)setFileURL:(NSURL *)fileURL; @end ================================================ FILE: ManagedCapturer/SCFileAudioCaptureSession.m ================================================ // // SCFileAudioCaptureSession.m // Snapchat // // Created by Xiaomu Wu on 2/2/17. // Copyright © 2017 Snapchat, Inc. All rights reserved. // #import "SCFileAudioCaptureSession.h" #import #import #import #import @import AudioToolbox; static float const kAudioBufferDurationInSeconds = 0.2; // same as SCAudioCaptureSession static char *const kSCFileAudioCaptureSessionQueueLabel = "com.snapchat.file-audio-capture-session"; @implementation SCFileAudioCaptureSession { SCQueuePerformer *_performer; SCSentinel *_sentinel; NSURL *_fileURL; AudioFileID _audioFile; // audio file AudioStreamBasicDescription _asbd; // audio format (core audio) CMAudioFormatDescriptionRef _formatDescription; // audio format (core media) SInt64 _readCurPacket; // current packet index to read UInt32 _readNumPackets; // number of packets to read every time UInt32 _readNumBytes; // number of bytes to read every time void *_readBuffer; // data buffer to hold read packets } @synthesize delegate = _delegate; #pragma mark - Public - (instancetype)init { self = [super init]; if (self) { _performer = [[SCQueuePerformer alloc] initWithLabel:kSCFileAudioCaptureSessionQueueLabel qualityOfService:QOS_CLASS_UNSPECIFIED queueType:DISPATCH_QUEUE_SERIAL context:SCQueuePerformerContextCamera]; _sentinel = [[SCSentinel alloc] init]; } return self; } - (void)dealloc { if (_audioFile) { AudioFileClose(_audioFile); } if (_formatDescription) { CFRelease(_formatDescription); } if (_readBuffer) { free(_readBuffer); } } - (void)setFileURL:(NSURL *)fileURL { [_performer perform:^{ _fileURL = fileURL; }]; } #pragma mark - SCAudioCaptureSession - (void)beginAudioRecordingAsynchronouslyWithSampleRate:(double)sampleRate // `sampleRate` ignored completionHandler:(audio_capture_session_block)completionHandler { [_performer perform:^{ BOOL succeeded = [self _setup]; int32_t sentinelValue = [_sentinel value]; if (completionHandler) { completionHandler(nil); } if (succeeded) { [_performer perform:^{ SC_GUARD_ELSE_RETURN([_sentinel value] == sentinelValue); [self _read]; } after:kAudioBufferDurationInSeconds]; } }]; } - (void)disposeAudioRecordingSynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler { [_performer performAndWait:^{ [self _teardown]; if (completionHandler) { completionHandler(); } }]; } #pragma mark - Private - (BOOL)_setup { SCAssert([_performer isCurrentPerformer], @""); [_sentinel increment]; OSStatus status = noErr; status = AudioFileOpenURL((__bridge CFURLRef)_fileURL, kAudioFileReadPermission, 0, &_audioFile); if (noErr != status) { SCLogGeneralError(@"Cannot open file at URL %@, error code %d", _fileURL, (int)status); return NO; } _asbd = (AudioStreamBasicDescription){0}; UInt32 asbdSize = sizeof(_asbd); status = AudioFileGetProperty(_audioFile, kAudioFilePropertyDataFormat, &asbdSize, &_asbd); if (noErr != status) { SCLogGeneralError(@"Cannot get audio data format, error code %d", (int)status); AudioFileClose(_audioFile); _audioFile = NULL; return NO; } if (kAudioFormatLinearPCM != _asbd.mFormatID) { SCLogGeneralError(@"Linear PCM is required"); AudioFileClose(_audioFile); _audioFile = NULL; _asbd = (AudioStreamBasicDescription){0}; return NO; } UInt32 aclSize = 0; AudioChannelLayout *acl = NULL; status = AudioFileGetPropertyInfo(_audioFile, kAudioFilePropertyChannelLayout, &aclSize, NULL); if (noErr == status) { acl = malloc(aclSize); status = AudioFileGetProperty(_audioFile, kAudioFilePropertyChannelLayout, &aclSize, acl); if (noErr != status) { aclSize = 0; free(acl); acl = NULL; } } status = CMAudioFormatDescriptionCreate(NULL, &_asbd, aclSize, acl, 0, NULL, NULL, &_formatDescription); if (acl) { free(acl); acl = NULL; } if (noErr != status) { SCLogGeneralError(@"Cannot create format description, error code %d", (int)status); AudioFileClose(_audioFile); _audioFile = NULL; _asbd = (AudioStreamBasicDescription){0}; return NO; } _readCurPacket = 0; _readNumPackets = ceil(_asbd.mSampleRate * kAudioBufferDurationInSeconds); _readNumBytes = _asbd.mBytesPerPacket * _readNumPackets; _readBuffer = malloc(_readNumBytes); return YES; } - (void)_read { SCAssert([_performer isCurrentPerformer], @""); OSStatus status = noErr; UInt32 numBytes = _readNumBytes; UInt32 numPackets = _readNumPackets; status = AudioFileReadPacketData(_audioFile, NO, &numBytes, NULL, _readCurPacket, &numPackets, _readBuffer); if (noErr != status) { SCLogGeneralError(@"Cannot read audio data, error code %d", (int)status); return; } if (0 == numPackets) { return; } CMTime PTS = CMTimeMakeWithSeconds(_readCurPacket / _asbd.mSampleRate, 600); _readCurPacket += numPackets; CMBlockBufferRef dataBuffer = NULL; status = CMBlockBufferCreateWithMemoryBlock(NULL, NULL, numBytes, NULL, NULL, 0, numBytes, 0, &dataBuffer); if (kCMBlockBufferNoErr == status) { if (dataBuffer) { CMBlockBufferReplaceDataBytes(_readBuffer, dataBuffer, 0, numBytes); CMSampleBufferRef sampleBuffer = NULL; CMAudioSampleBufferCreateWithPacketDescriptions(NULL, dataBuffer, true, NULL, NULL, _formatDescription, numPackets, PTS, NULL, &sampleBuffer); if (sampleBuffer) { [_delegate audioCaptureSession:self didOutputSampleBuffer:sampleBuffer]; CFRelease(sampleBuffer); } CFRelease(dataBuffer); } } else { SCLogGeneralError(@"Cannot create data buffer, error code %d", (int)status); } int32_t sentinelValue = [_sentinel value]; [_performer perform:^{ SC_GUARD_ELSE_RETURN([_sentinel value] == sentinelValue); [self _read]; } after:kAudioBufferDurationInSeconds]; } - (void)_teardown { SCAssert([_performer isCurrentPerformer], @""); [_sentinel increment]; if (_audioFile) { AudioFileClose(_audioFile); _audioFile = NULL; } _asbd = (AudioStreamBasicDescription){0}; if (_formatDescription) { CFRelease(_formatDescription); _formatDescription = NULL; } _readCurPacket = 0; _readNumPackets = 0; _readNumBytes = 0; if (_readBuffer) { free(_readBuffer); _readBuffer = NULL; } } @end ================================================ FILE: ManagedCapturer/SCManagedAudioStreamer.h ================================================ // // SCManagedAudioStreamer.h // Snapchat // // Created by Ricardo Sánchez-Sáez on 7/28/16. // Copyright © 2016 Snapchat, Inc. All rights reserved. // #import #import @interface SCManagedAudioStreamer : NSObject + (instancetype)sharedInstance; + (instancetype) new NS_UNAVAILABLE; - (instancetype)init NS_UNAVAILABLE; @end ================================================ FILE: ManagedCapturer/SCManagedAudioStreamer.m ================================================ // // SCManagedAudioStreamer.m // Snapchat // // Created by Ricardo Sánchez-Sáez on 7/28/16. // Copyright © 2016 Snapchat, Inc. All rights reserved. // #import "SCManagedAudioStreamer.h" #import "SCAudioCaptureSession.h" #import #import #import #import #import #import static char *const kSCManagedAudioStreamerQueueLabel = "com.snapchat.audioStreamerQueue"; @interface SCManagedAudioStreamer () @end @implementation SCManagedAudioStreamer { SCAudioCaptureSession *_captureSession; SCAudioConfigurationToken *_audioConfiguration; SCManagedAudioDataSourceListenerAnnouncer *_announcer; SCScopedAccess *_scopedMutableAudioSession; } @synthesize performer = _performer; + (instancetype)sharedInstance { static dispatch_once_t onceToken; static SCManagedAudioStreamer *managedAudioStreamer; dispatch_once(&onceToken, ^{ managedAudioStreamer = [[SCManagedAudioStreamer alloc] initSharedInstance]; }); return managedAudioStreamer; } - (instancetype)initSharedInstance { SCTraceStart(); self = [super init]; if (self) { _performer = [[SCQueuePerformer alloc] initWithLabel:kSCManagedAudioStreamerQueueLabel qualityOfService:QOS_CLASS_USER_INTERACTIVE queueType:DISPATCH_QUEUE_SERIAL context:SCQueuePerformerContextCamera]; _announcer = [[SCManagedAudioDataSourceListenerAnnouncer alloc] init]; _captureSession = [[SCAudioCaptureSession alloc] init]; _captureSession.delegate = self; } return self; } - (BOOL)isStreaming { return _audioConfiguration != nil; } - (void)startStreamingWithAudioConfiguration:(SCAudioConfiguration *)configuration { SCTraceStart(); [_performer perform:^{ if (!self.isStreaming) { // Begin audio recording asynchronously. First we need to have the proper audio session category. _audioConfiguration = [SCAudioSessionExperimentAdapter configureWith:configuration performer:_performer completion:^(NSError *error) { [_captureSession beginAudioRecordingAsynchronouslyWithSampleRate:kSCAudioCaptureSessionDefaultSampleRate completionHandler:NULL]; }]; } }]; } - (void)stopStreaming { [_performer perform:^{ if (self.isStreaming) { [_captureSession disposeAudioRecordingSynchronouslyWithCompletionHandler:NULL]; [SCAudioSessionExperimentAdapter relinquishConfiguration:_audioConfiguration performer:nil completion:nil]; _audioConfiguration = nil; } }]; } - (void)addListener:(id)listener { SCTraceStart(); [_announcer addListener:listener]; } - (void)removeListener:(id)listener { SCTraceStart(); [_announcer removeListener:listener]; } - (void)audioCaptureSession:(SCAudioCaptureSession *)audioCaptureSession didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer { [_announcer managedAudioDataSource:self didOutputSampleBuffer:sampleBuffer]; } @end ================================================ FILE: ManagedCapturer/SCManagedCaptureDevice+SCManagedCapturer.h ================================================ // // SCManagedCaptureDevice+SCManagedCapturer.h // Snapchat // // Created by Liu Liu on 5/9/15. // Copyright (c) 2015 Snapchat, Inc. All rights reserved. // #import "SCManagedCaptureDevice.h" #import @interface SCManagedCaptureDevice (SCManagedCapturer) @property (nonatomic, strong, readonly) AVCaptureDevice *device; @property (nonatomic, strong, readonly) AVCaptureDeviceInput *deviceInput; @property (nonatomic, copy, readonly) NSError *error; @property (nonatomic, assign, readonly) BOOL isConnected; @property (nonatomic, strong, readonly) AVCaptureDeviceFormat *activeFormat; // Setup and hook up with device - (BOOL)setDeviceAsInput:(AVCaptureSession *)session; - (void)removeDeviceAsInput:(AVCaptureSession *)session; - (void)resetDeviceAsInput; // Configurations @property (nonatomic, assign) BOOL flashActive; @property (nonatomic, assign) BOOL torchActive; @property (nonatomic, assign) float zoomFactor; @property (nonatomic, assign, readonly) BOOL liveVideoStreamingActive; @property (nonatomic, assign, readonly) BOOL isNightModeActive; @property (nonatomic, assign, readonly) BOOL isFlashSupported; @property (nonatomic, assign, readonly) BOOL isTorchSupported; - (void)setNightModeActive:(BOOL)nightModeActive session:(AVCaptureSession *)session; - (void)setLiveVideoStreaming:(BOOL)liveVideoStreaming session:(AVCaptureSession *)session; - (void)setCaptureDepthData:(BOOL)captureDepthData session:(AVCaptureSession *)session; - (void)setExposurePointOfInterest:(CGPoint)pointOfInterest fromUser:(BOOL)fromUser; - (void)setAutofocusPointOfInterest:(CGPoint)pointOfInterest; - (void)continuousAutofocus; - (void)setRecording:(BOOL)recording; - (void)updateActiveFormatWithSession:(AVCaptureSession *)session; // Utilities - (CGPoint)convertViewCoordinates:(CGPoint)viewCoordinates viewSize:(CGSize)viewSize videoGravity:(NSString *)videoGravity; @end ================================================ FILE: ManagedCapturer/SCManagedCaptureDevice+SCManagedDeviceCapacityAnalyzer.h ================================================ // // SCManagedCaptureDevice+SCManagedDeviceCapacityAnalyzer.h // Snapchat // // Created by Kam Sheffield on 10/29/15. // Copyright © 2015 Snapchat, Inc. All rights reserved. // #import "SCManagedCaptureDevice.h" #import @interface SCManagedCaptureDevice (SCManagedDeviceCapacityAnalyzer) @property (nonatomic, strong, readonly) AVCaptureDevice *device; @end ================================================ FILE: ManagedCapturer/SCManagedCaptureDevice.h ================================================ // // SCManagedCaptureDevice.h // Snapchat // // Created by Liu Liu on 4/22/15. // Copyright (c) 2015 Liu Liu. All rights reserved. // #import #import #import #import extern CGFloat const kSCMaxVideoZoomFactor; extern CGFloat const kSCMinVideoZoomFactor; @class SCManagedCaptureDevice; @protocol SCManagedCaptureDeviceDelegate @optional - (void)managedCaptureDevice:(SCManagedCaptureDevice *)device didChangeAdjustingExposure:(BOOL)adjustingExposure; - (void)managedCaptureDevice:(SCManagedCaptureDevice *)device didChangeExposurePoint:(CGPoint)exposurePoint; - (void)managedCaptureDevice:(SCManagedCaptureDevice *)device didChangeFocusPoint:(CGPoint)focusPoint; @end @interface SCManagedCaptureDevice : NSObject @property (nonatomic, weak) id delegate; // These two class methods are thread safe + (instancetype)front; + (instancetype)back; + (instancetype)dualCamera; + (instancetype)deviceWithPosition:(SCManagedCaptureDevicePosition)position; + (BOOL)is1080pSupported; + (BOOL)isMixCaptureSupported; + (BOOL)isNightModeSupported; + (BOOL)isEnhancedNightModeSupported; + (CGSize)defaultActiveFormatResolution; + (CGSize)nightModeActiveFormatResolution; - (BOOL)softwareZoom; - (SCManagedCaptureDevicePosition)position; - (BOOL)isAvailable; @end ================================================ FILE: ManagedCapturer/SCManagedCaptureDevice.m ================================================ // // SCManagedCaptureDevice.m // Snapchat // // Created by Liu Liu on 4/22/15. // Copyright (c) 2015 Liu Liu. All rights reserved. // #import "SCManagedCaptureDevice.h" #import "AVCaptureDevice+ConfigurationLock.h" #import "SCCameraTweaks.h" #import "SCCaptureCommon.h" #import "SCCaptureDeviceResolver.h" #import "SCManagedCaptureDevice+SCManagedCapturer.h" #import "SCManagedCaptureDeviceAutoExposureHandler.h" #import "SCManagedCaptureDeviceAutoFocusHandler.h" #import "SCManagedCaptureDeviceExposureHandler.h" #import "SCManagedCaptureDeviceFaceDetectionAutoExposureHandler.h" #import "SCManagedCaptureDeviceFaceDetectionAutoFocusHandler.h" #import "SCManagedCaptureDeviceFocusHandler.h" #import "SCManagedCapturer.h" #import "SCManagedDeviceCapacityAnalyzer.h" #import #import #import #import static int32_t const kSCManagedCaptureDeviceMaximumHighFrameRate = 30; static int32_t const kSCManagedCaptureDeviceMaximumLowFrameRate = 24; static float const kSCManagedCaptureDevicecSoftwareMaxZoomFactor = 8; CGFloat const kSCMaxVideoZoomFactor = 100; // the max videoZoomFactor acceptable CGFloat const kSCMinVideoZoomFactor = 1; static NSDictionary *SCBestHRSIFormatsForHeights(NSArray *desiredHeights, NSArray *formats, BOOL shouldSupportDepth) { NSMutableDictionary *bestHRSIHeights = [NSMutableDictionary dictionary]; for (NSNumber *height in desiredHeights) { bestHRSIHeights[height] = @0; } NSMutableDictionary *bestHRSIFormats = [NSMutableDictionary dictionary]; for (AVCaptureDeviceFormat *format in formats) { if (@available(ios 11.0, *)) { if (shouldSupportDepth && format.supportedDepthDataFormats.count == 0) { continue; } } if (CMFormatDescriptionGetMediaSubType(format.formatDescription) != kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) { continue; } CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription); NSNumber *height = @(dimensions.height); NSNumber *bestHRSI = bestHRSIHeights[height]; if (bestHRSI) { CMVideoDimensions hrsi = format.highResolutionStillImageDimensions; // If we enabled HSRI, we only intersted in the ones that is good. if (hrsi.height > [bestHRSI intValue]) { bestHRSIHeights[height] = @(hrsi.height); bestHRSIFormats[height] = format; } } } return [bestHRSIFormats copy]; } static inline float SCDegreesToRadians(float theta) { return theta * (float)M_PI / 180.f; } static inline float SCRadiansToDegrees(float theta) { return theta * 180.f / (float)M_PI; } @implementation SCManagedCaptureDevice { AVCaptureDevice *_device; AVCaptureDeviceInput *_deviceInput; AVCaptureDeviceFormat *_defaultFormat; AVCaptureDeviceFormat *_nightFormat; AVCaptureDeviceFormat *_liveVideoStreamingFormat; SCManagedCaptureDevicePosition _devicePosition; // Configurations on the device, shortcut to avoid re-configurations id _exposureHandler; id _focusHandler; FBKVOController *_observeController; // For the private category methods NSError *_error; BOOL _softwareZoom; BOOL _isConnected; BOOL _flashActive; BOOL _torchActive; BOOL _liveVideoStreamingActive; float _zoomFactor; BOOL _isNightModeActive; BOOL _captureDepthData; } @synthesize fieldOfView = _fieldOfView; + (instancetype)front { SCTraceStart(); static dispatch_once_t onceToken; static SCManagedCaptureDevice *front; static dispatch_semaphore_t semaphore; dispatch_once(&onceToken, ^{ semaphore = dispatch_semaphore_create(1); }); /* You can use the tweak below to intentionally kill camera in debug. if (SCIsDebugBuild() && SCCameraTweaksKillFrontCamera()) { return nil; } */ dispatch_semaphore_wait(semaphore, DISPATCH_TIME_FOREVER); if (!front) { AVCaptureDevice *device = [[SCCaptureDeviceResolver sharedInstance] findAVCaptureDevice:AVCaptureDevicePositionFront]; if (device) { front = [[SCManagedCaptureDevice alloc] initWithDevice:device devicePosition:SCManagedCaptureDevicePositionFront]; } } dispatch_semaphore_signal(semaphore); return front; } + (instancetype)back { SCTraceStart(); static dispatch_once_t onceToken; static SCManagedCaptureDevice *back; static dispatch_semaphore_t semaphore; dispatch_once(&onceToken, ^{ semaphore = dispatch_semaphore_create(1); }); /* You can use the tweak below to intentionally kill camera in debug. if (SCIsDebugBuild() && SCCameraTweaksKillBackCamera()) { return nil; } */ dispatch_semaphore_wait(semaphore, DISPATCH_TIME_FOREVER); if (!back) { AVCaptureDevice *device = [[SCCaptureDeviceResolver sharedInstance] findAVCaptureDevice:AVCaptureDevicePositionBack]; if (device) { back = [[SCManagedCaptureDevice alloc] initWithDevice:device devicePosition:SCManagedCaptureDevicePositionBack]; } } dispatch_semaphore_signal(semaphore); return back; } + (SCManagedCaptureDevice *)dualCamera { SCTraceStart(); static dispatch_once_t onceToken; static SCManagedCaptureDevice *dualCamera; static dispatch_semaphore_t semaphore; dispatch_once(&onceToken, ^{ semaphore = dispatch_semaphore_create(1); }); dispatch_semaphore_wait(semaphore, DISPATCH_TIME_FOREVER); if (!dualCamera) { AVCaptureDevice *device = [[SCCaptureDeviceResolver sharedInstance] findDualCamera]; if (device) { dualCamera = [[SCManagedCaptureDevice alloc] initWithDevice:device devicePosition:SCManagedCaptureDevicePositionBackDualCamera]; } } dispatch_semaphore_signal(semaphore); return dualCamera; } + (instancetype)deviceWithPosition:(SCManagedCaptureDevicePosition)position { switch (position) { case SCManagedCaptureDevicePositionFront: return [self front]; case SCManagedCaptureDevicePositionBack: return [self back]; case SCManagedCaptureDevicePositionBackDualCamera: return [self dualCamera]; } } + (BOOL)is1080pSupported { return [SCDeviceName isIphone] && [SCDeviceName isSimilarToIphone6SorNewer]; } + (BOOL)isMixCaptureSupported { return !![self front] && !![self back]; } + (BOOL)isNightModeSupported { return [SCDeviceName isIphone] && [SCDeviceName isSimilarToIphone6orNewer]; } + (BOOL)isEnhancedNightModeSupported { if (SC_AT_LEAST_IOS_11) { return [SCDeviceName isIphone] && [SCDeviceName isSimilarToIphone6SorNewer]; } return NO; } + (CGSize)defaultActiveFormatResolution { if ([SCDeviceName isIphoneX]) { return CGSizeMake(kSCManagedCapturerVideoActiveFormatWidth1080p, kSCManagedCapturerVideoActiveFormatHeight1080p); } return CGSizeMake(kSCManagedCapturerDefaultVideoActiveFormatWidth, kSCManagedCapturerDefaultVideoActiveFormatHeight); } + (CGSize)nightModeActiveFormatResolution { if ([SCManagedCaptureDevice isEnhancedNightModeSupported]) { return CGSizeMake(kSCManagedCapturerNightVideoHighResActiveFormatWidth, kSCManagedCapturerNightVideoHighResActiveFormatHeight); } return CGSizeMake(kSCManagedCapturerNightVideoDefaultResActiveFormatWidth, kSCManagedCapturerNightVideoDefaultResActiveFormatHeight); } - (instancetype)initWithDevice:(AVCaptureDevice *)device devicePosition:(SCManagedCaptureDevicePosition)devicePosition { SCTraceStart(); self = [super init]; if (self) { _device = device; _devicePosition = devicePosition; if (SCCameraTweaksEnableFaceDetectionFocus(devicePosition)) { _exposureHandler = [[SCManagedCaptureDeviceFaceDetectionAutoExposureHandler alloc] initWithDevice:device pointOfInterest:CGPointMake(0.5, 0.5) managedCapturer:[SCManagedCapturer sharedInstance]]; _focusHandler = [[SCManagedCaptureDeviceFaceDetectionAutoFocusHandler alloc] initWithDevice:device pointOfInterest:CGPointMake(0.5, 0.5) managedCapturer:[SCManagedCapturer sharedInstance]]; } else { _exposureHandler = [[SCManagedCaptureDeviceAutoExposureHandler alloc] initWithDevice:device pointOfInterest:CGPointMake(0.5, 0.5)]; _focusHandler = [[SCManagedCaptureDeviceAutoFocusHandler alloc] initWithDevice:device pointOfInterest:CGPointMake(0.5, 0.5)]; } _observeController = [[FBKVOController alloc] initWithObserver:self]; [self _setAsExposureListenerForDevice:device]; if (SCCameraTweaksEnableExposurePointObservation()) { [self _observeExposurePointForDevice:device]; } if (SCCameraTweaksEnableFocusPointObservation()) { [self _observeFocusPointForDevice:device]; } _zoomFactor = 1.0; [self _findSupportedFormats]; } return self; } - (SCManagedCaptureDevicePosition)position { return _devicePosition; } #pragma mark - Setup and hook up with device - (BOOL)setDeviceAsInput:(AVCaptureSession *)session { SCTraceStart(); AVCaptureDeviceInput *deviceInput = [self deviceInput]; if ([session canAddInput:deviceInput]) { [session addInput:deviceInput]; } else { NSString *previousSessionPreset = session.sessionPreset; session.sessionPreset = AVCaptureSessionPresetInputPriority; // Now we surely can add input if ([session canAddInput:deviceInput]) { [session addInput:deviceInput]; } else { session.sessionPreset = previousSessionPreset; return NO; } } [self _enableSubjectAreaChangeMonitoring]; [self _updateActiveFormatWithSession:session fallbackPreset:AVCaptureSessionPreset640x480]; if (_device.activeFormat.videoMaxZoomFactor < 1 + 1e-5) { _softwareZoom = YES; } else { _softwareZoom = NO; if (_device.videoZoomFactor != _zoomFactor) { // Reset the zoom factor [self setZoomFactor:_zoomFactor]; } } [_exposureHandler setVisible:YES]; [_focusHandler setVisible:YES]; _isConnected = YES; return YES; } - (void)removeDeviceAsInput:(AVCaptureSession *)session { SCTraceStart(); if (_isConnected) { [session removeInput:_deviceInput]; [_exposureHandler setVisible:NO]; [_focusHandler setVisible:NO]; _isConnected = NO; } } - (void)resetDeviceAsInput { _deviceInput = nil; AVCaptureDevice *deviceFound; switch (_devicePosition) { case SCManagedCaptureDevicePositionFront: deviceFound = [[SCCaptureDeviceResolver sharedInstance] findAVCaptureDevice:AVCaptureDevicePositionFront]; break; case SCManagedCaptureDevicePositionBack: deviceFound = [[SCCaptureDeviceResolver sharedInstance] findAVCaptureDevice:AVCaptureDevicePositionBack]; break; case SCManagedCaptureDevicePositionBackDualCamera: deviceFound = [[SCCaptureDeviceResolver sharedInstance] findDualCamera]; break; } if (deviceFound) { _device = deviceFound; } } #pragma mark - Configurations - (void)_findSupportedFormats { NSInteger defaultHeight = [SCManagedCaptureDevice defaultActiveFormatResolution].height; NSInteger nightHeight = [SCManagedCaptureDevice nightModeActiveFormatResolution].height; NSInteger liveVideoStreamingHeight = kSCManagedCapturerLiveStreamingVideoActiveFormatHeight; NSArray *heights = @[ @(nightHeight), @(defaultHeight), @(liveVideoStreamingHeight) ]; BOOL formatsShouldSupportDepth = _devicePosition == SCManagedCaptureDevicePositionBackDualCamera; NSDictionary *formats = SCBestHRSIFormatsForHeights(heights, _device.formats, formatsShouldSupportDepth); _nightFormat = formats[@(nightHeight)]; _defaultFormat = formats[@(defaultHeight)]; _liveVideoStreamingFormat = formats[@(liveVideoStreamingHeight)]; } - (AVCaptureDeviceFormat *)_bestSupportedFormat { if (_isNightModeActive) { return _nightFormat; } if (_liveVideoStreamingActive) { return _liveVideoStreamingFormat; } return _defaultFormat; } - (void)setNightModeActive:(BOOL)nightModeActive session:(AVCaptureSession *)session { SCTraceStart(); if (![SCManagedCaptureDevice isNightModeSupported]) { return; } if (_isNightModeActive == nightModeActive) { return; } _isNightModeActive = nightModeActive; [self updateActiveFormatWithSession:session]; } - (void)setLiveVideoStreaming:(BOOL)liveVideoStreaming session:(AVCaptureSession *)session { SCTraceStart(); if (_liveVideoStreamingActive == liveVideoStreaming) { return; } _liveVideoStreamingActive = liveVideoStreaming; [self updateActiveFormatWithSession:session]; } - (void)setCaptureDepthData:(BOOL)captureDepthData session:(AVCaptureSession *)session { SCTraceStart(); _captureDepthData = captureDepthData; [self _findSupportedFormats]; [self updateActiveFormatWithSession:session]; } - (void)updateActiveFormatWithSession:(AVCaptureSession *)session { [self _updateActiveFormatWithSession:session fallbackPreset:AVCaptureSessionPreset640x480]; if (_device.videoZoomFactor != _zoomFactor) { [self setZoomFactor:_zoomFactor]; } } - (void)_updateActiveFormatWithSession:(AVCaptureSession *)session fallbackPreset:(NSString *)fallbackPreset { AVCaptureDeviceFormat *nextFormat = [self _bestSupportedFormat]; if (nextFormat && [session canSetSessionPreset:AVCaptureSessionPresetInputPriority]) { session.sessionPreset = AVCaptureSessionPresetInputPriority; if (nextFormat == _device.activeFormat) { // Need to reconfigure frame rate though active format unchanged [_device runTask:@"update frame rate" withLockedConfiguration:^() { [self _updateDeviceFrameRate]; }]; } else { [_device runTask:@"update active format" withLockedConfiguration:^() { _device.activeFormat = nextFormat; [self _updateDeviceFrameRate]; }]; } } else { session.sessionPreset = fallbackPreset; } [self _updateFieldOfView]; } - (void)_updateDeviceFrameRate { int32_t deviceFrameRate; if (_liveVideoStreamingActive) { deviceFrameRate = kSCManagedCaptureDeviceMaximumLowFrameRate; } else { deviceFrameRate = kSCManagedCaptureDeviceMaximumHighFrameRate; } CMTime frameDuration = CMTimeMake(1, deviceFrameRate); if (@available(ios 11.0, *)) { if (_captureDepthData) { // Sync the video frame rate to the max depth frame rate (24 fps) if (_device.activeDepthDataFormat.videoSupportedFrameRateRanges.firstObject) { frameDuration = _device.activeDepthDataFormat.videoSupportedFrameRateRanges.firstObject.minFrameDuration; } } } _device.activeVideoMaxFrameDuration = frameDuration; _device.activeVideoMinFrameDuration = frameDuration; if (_device.lowLightBoostSupported) { _device.automaticallyEnablesLowLightBoostWhenAvailable = YES; } } - (void)setZoomFactor:(float)zoomFactor { SCTraceStart(); if (_softwareZoom) { // Just remember the software zoom scale if (zoomFactor <= kSCManagedCaptureDevicecSoftwareMaxZoomFactor && zoomFactor >= 1) { _zoomFactor = zoomFactor; } } else { [_device runTask:@"set zoom factor" withLockedConfiguration:^() { if (zoomFactor <= _device.activeFormat.videoMaxZoomFactor && zoomFactor >= 1) { _zoomFactor = zoomFactor; if (_device.videoZoomFactor != _zoomFactor) { _device.videoZoomFactor = _zoomFactor; } } }]; } [self _updateFieldOfView]; } - (void)_updateFieldOfView { float fieldOfView = _device.activeFormat.videoFieldOfView; if (_zoomFactor > 1.f) { // Adjust the field of view to take the zoom factor into account. // Note: this assumes the zoom factor linearly affects the focal length. fieldOfView = 2.f * SCRadiansToDegrees(atanf(tanf(SCDegreesToRadians(0.5f * fieldOfView)) / _zoomFactor)); } self.fieldOfView = fieldOfView; } - (void)setExposurePointOfInterest:(CGPoint)pointOfInterest fromUser:(BOOL)fromUser { [_exposureHandler setExposurePointOfInterest:pointOfInterest fromUser:fromUser]; } // called when user taps on a point on screen, to re-adjust camera focus onto that tapped spot. // this re-adjustment is always necessary, regardless of scenarios (recording video, taking photo, etc), // therefore we don't have to check _focusLock in this method. - (void)setAutofocusPointOfInterest:(CGPoint)pointOfInterest { SCTraceStart(); [_focusHandler setAutofocusPointOfInterest:pointOfInterest]; } - (void)continuousAutofocus { SCTraceStart(); [_focusHandler continuousAutofocus]; } - (void)setRecording:(BOOL)recording { if (SCCameraTweaksSmoothAutoFocusWhileRecording() && [_device isSmoothAutoFocusSupported]) { [self _setSmoothFocus:recording]; } else { [self _setFocusLock:recording]; } [_exposureHandler setStableExposure:recording]; } - (void)_setFocusLock:(BOOL)focusLock { SCTraceStart(); [_focusHandler setFocusLock:focusLock]; } - (void)_setSmoothFocus:(BOOL)smoothFocus { SCTraceStart(); [_focusHandler setSmoothFocus:smoothFocus]; } - (void)setFlashActive:(BOOL)flashActive { SCTraceStart(); if (_flashActive != flashActive) { if ([_device hasFlash]) { #pragma clang diagnostic push #pragma clang diagnostic ignored "-Wdeprecated-declarations" if (flashActive && [_device isFlashModeSupported:AVCaptureFlashModeOn]) { [_device runTask:@"set flash active" withLockedConfiguration:^() { _device.flashMode = AVCaptureFlashModeOn; }]; } else if (!flashActive && [_device isFlashModeSupported:AVCaptureFlashModeOff]) { [_device runTask:@"set flash off" withLockedConfiguration:^() { _device.flashMode = AVCaptureFlashModeOff; }]; } #pragma clang diagnostic pop _flashActive = flashActive; } else { _flashActive = NO; } } } - (void)setTorchActive:(BOOL)torchActive { SCTraceStart(); if (_torchActive != torchActive) { if ([_device hasTorch]) { if (torchActive && [_device isTorchModeSupported:AVCaptureTorchModeOn]) { [_device runTask:@"set torch active" withLockedConfiguration:^() { [_device setTorchMode:AVCaptureTorchModeOn]; }]; } else if (!torchActive && [_device isTorchModeSupported:AVCaptureTorchModeOff]) { [_device runTask:@"set torch off" withLockedConfiguration:^() { _device.torchMode = AVCaptureTorchModeOff; }]; } _torchActive = torchActive; } else { _torchActive = NO; } } } #pragma mark - Utilities - (BOOL)isFlashSupported { return _device.hasFlash; } - (BOOL)isTorchSupported { return _device.hasTorch; } - (CGPoint)convertViewCoordinates:(CGPoint)viewCoordinates viewSize:(CGSize)viewSize videoGravity:(NSString *)videoGravity { SCTraceStart(); CGPoint pointOfInterest = CGPointMake(.5f, .5f); CGRect cleanAperture; AVCaptureDeviceInput *deviceInput = [self deviceInput]; NSArray *ports = [deviceInput.ports copy]; if ([videoGravity isEqualToString:AVLayerVideoGravityResize]) { // Scale, switch x and y, and reverse x return CGPointMake(viewCoordinates.y / viewSize.height, 1.f - (viewCoordinates.x / viewSize.width)); } for (AVCaptureInputPort *port in ports) { if ([port mediaType] == AVMediaTypeVideo && port.formatDescription) { cleanAperture = CMVideoFormatDescriptionGetCleanAperture(port.formatDescription, YES); CGSize apertureSize = cleanAperture.size; CGPoint point = viewCoordinates; CGFloat apertureRatio = apertureSize.height / apertureSize.width; CGFloat viewRatio = viewSize.width / viewSize.height; CGFloat xc = .5f; CGFloat yc = .5f; if ([videoGravity isEqualToString:AVLayerVideoGravityResizeAspect]) { if (viewRatio > apertureRatio) { CGFloat y2 = viewSize.height; CGFloat x2 = viewSize.height * apertureRatio; CGFloat x1 = viewSize.width; CGFloat blackBar = (x1 - x2) / 2; // If point is inside letterboxed area, do coordinate conversion; otherwise, don't change the // default value returned (.5,.5) if (point.x >= blackBar && point.x <= blackBar + x2) { // Scale (accounting for the letterboxing on the left and right of the video preview), // switch x and y, and reverse x xc = point.y / y2; yc = 1.f - ((point.x - blackBar) / x2); } } else { CGFloat y2 = viewSize.width / apertureRatio; CGFloat y1 = viewSize.height; CGFloat x2 = viewSize.width; CGFloat blackBar = (y1 - y2) / 2; // If point is inside letterboxed area, do coordinate conversion. Otherwise, don't change the // default value returned (.5,.5) if (point.y >= blackBar && point.y <= blackBar + y2) { // Scale (accounting for the letterboxing on the top and bottom of the video preview), // switch x and y, and reverse x xc = ((point.y - blackBar) / y2); yc = 1.f - (point.x / x2); } } } else if ([videoGravity isEqualToString:AVLayerVideoGravityResizeAspectFill]) { // Scale, switch x and y, and reverse x if (viewRatio > apertureRatio) { CGFloat y2 = apertureSize.width * (viewSize.width / apertureSize.height); xc = (point.y + ((y2 - viewSize.height) / 2.f)) / y2; // Account for cropped height yc = (viewSize.width - point.x) / viewSize.width; } else { CGFloat x2 = apertureSize.height * (viewSize.height / apertureSize.width); yc = 1.f - ((point.x + ((x2 - viewSize.width) / 2)) / x2); // Account for cropped width xc = point.y / viewSize.height; } } pointOfInterest = CGPointMake(xc, yc); break; } } return pointOfInterest; } #pragma mark - SCManagedCapturer friendly methods - (AVCaptureDevice *)device { return _device; } - (AVCaptureDeviceInput *)deviceInput { SCTraceStart(); if (!_deviceInput) { NSError *error = nil; _deviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:_device error:&error]; if (!_deviceInput) { _error = [error copy]; } } return _deviceInput; } - (NSError *)error { return _error; } - (BOOL)softwareZoom { return _softwareZoom; } - (BOOL)isConnected { return _isConnected; } - (BOOL)flashActive { return _flashActive; } - (BOOL)torchActive { return _torchActive; } - (float)zoomFactor { return _zoomFactor; } - (BOOL)isNightModeActive { return _isNightModeActive; } - (BOOL)liveVideoStreamingActive { return _liveVideoStreamingActive; } - (BOOL)isAvailable { return [_device isConnected]; } #pragma mark - Private methods - (void)_enableSubjectAreaChangeMonitoring { SCTraceStart(); [_device runTask:@"enable SubjectAreaChangeMonitoring" withLockedConfiguration:^() { _device.subjectAreaChangeMonitoringEnabled = YES; }]; } - (AVCaptureDeviceFormat *)activeFormat { return _device.activeFormat; } #pragma mark - Observe -adjustingExposure - (void)_setAsExposureListenerForDevice:(AVCaptureDevice *)device { SCTraceStart(); SCLogCoreCameraInfo(@"Set exposure adjustment KVO for device: %ld", (long)device.position); [_observeController observe:device keyPath:@keypath(device, adjustingExposure) options:NSKeyValueObservingOptionNew action:@selector(_adjustingExposureChanged:)]; } - (void)_adjustingExposureChanged:(NSDictionary *)change { SCTraceStart(); BOOL adjustingExposure = [change[NSKeyValueChangeNewKey] boolValue]; SCLogCoreCameraInfo(@"KVO exposure changed to %d", adjustingExposure); if ([self.delegate respondsToSelector:@selector(managedCaptureDevice:didChangeAdjustingExposure:)]) { [self.delegate managedCaptureDevice:self didChangeAdjustingExposure:adjustingExposure]; } } #pragma mark - Observe -exposurePointOfInterest - (void)_observeExposurePointForDevice:(AVCaptureDevice *)device { SCTraceStart(); SCLogCoreCameraInfo(@"Set exposure point KVO for device: %ld", (long)device.position); [_observeController observe:device keyPath:@keypath(device, exposurePointOfInterest) options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew action:@selector(_exposurePointOfInterestChanged:)]; } - (void)_exposurePointOfInterestChanged:(NSDictionary *)change { SCTraceStart(); CGPoint exposurePoint = [change[NSKeyValueChangeNewKey] CGPointValue]; SCLogCoreCameraInfo(@"KVO exposure point changed to %@", NSStringFromCGPoint(exposurePoint)); if ([self.delegate respondsToSelector:@selector(managedCaptureDevice:didChangeExposurePoint:)]) { [self.delegate managedCaptureDevice:self didChangeExposurePoint:exposurePoint]; } } #pragma mark - Observe -focusPointOfInterest - (void)_observeFocusPointForDevice:(AVCaptureDevice *)device { SCTraceStart(); SCLogCoreCameraInfo(@"Set focus point KVO for device: %ld", (long)device.position); [_observeController observe:device keyPath:@keypath(device, focusPointOfInterest) options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew action:@selector(_focusPointOfInterestChanged:)]; } - (void)_focusPointOfInterestChanged:(NSDictionary *)change { SCTraceStart(); CGPoint focusPoint = [change[NSKeyValueChangeNewKey] CGPointValue]; SCLogCoreCameraInfo(@"KVO focus point changed to %@", NSStringFromCGPoint(focusPoint)); if ([self.delegate respondsToSelector:@selector(managedCaptureDevice:didChangeFocusPoint:)]) { [self.delegate managedCaptureDevice:self didChangeFocusPoint:focusPoint]; } } - (void)dealloc { [_observeController unobserveAll]; } @end ================================================ FILE: ManagedCapturer/SCManagedCaptureDeviceAutoExposureHandler.h ================================================ // // SCManagedCaptureDeviceAutoExposureHandler.h // Snapchat // // Created by Derek Peirce on 3/21/17. // Copyright © 2017 Snapchat, Inc. All rights reserved. // #import "SCManagedCaptureDeviceExposureHandler.h" #import @interface SCManagedCaptureDeviceAutoExposureHandler : NSObject - (instancetype)initWithDevice:(AVCaptureDevice *)device pointOfInterest:(CGPoint)pointOfInterest; @end ================================================ FILE: ManagedCapturer/SCManagedCaptureDeviceAutoExposureHandler.m ================================================ // // SCManagedCaptureDeviceAutoExposureHandler.m // Snapchat // // Created by Derek Peirce on 3/21/17. // Copyright © 2017 Snapchat, Inc. All rights reserved. // #import "SCManagedCaptureDeviceAutoExposureHandler.h" #import "AVCaptureDevice+ConfigurationLock.h" #import "SCManagedCaptureDeviceExposureHandler.h" #import @import AVFoundation; @implementation SCManagedCaptureDeviceAutoExposureHandler { CGPoint _exposurePointOfInterest; AVCaptureDevice *_device; } - (instancetype)initWithDevice:(AVCaptureDevice *)device pointOfInterest:(CGPoint)pointOfInterest { if (self = [super init]) { _device = device; _exposurePointOfInterest = pointOfInterest; } return self; } - (CGPoint)getExposurePointOfInterest { return _exposurePointOfInterest; } - (void)setExposurePointOfInterest:(CGPoint)pointOfInterest fromUser:(BOOL)fromUser { SCTraceStart(); if (!CGPointEqualToPoint(pointOfInterest, _exposurePointOfInterest)) { if ([_device isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure] && [_device isExposurePointOfInterestSupported]) { [_device runTask:@"set exposure" withLockedConfiguration:^() { // Set exposure point before changing focus mode // Be noticed that order does matter _device.exposurePointOfInterest = pointOfInterest; _device.exposureMode = AVCaptureExposureModeContinuousAutoExposure; }]; } _exposurePointOfInterest = pointOfInterest; } } - (void)setStableExposure:(BOOL)stableExposure { } - (void)setVisible:(BOOL)visible { } @end ================================================ FILE: ManagedCapturer/SCManagedCaptureDeviceAutoFocusHandler.h ================================================ // // SCManagedCaptureDeviceAutoFocusHandler.h // Snapchat // // Created by Jiyang Zhu on 3/7/18. // Copyright © 2018 Snapchat, Inc. All rights reserved. // // This class is used to adjust focus related parameters of camera, including focus mode and focus point. #import "SCManagedCaptureDeviceFocusHandler.h" #import @interface SCManagedCaptureDeviceAutoFocusHandler : NSObject - (instancetype)initWithDevice:(AVCaptureDevice *)device pointOfInterest:(CGPoint)pointOfInterest; @end ================================================ FILE: ManagedCapturer/SCManagedCaptureDeviceAutoFocusHandler.m ================================================ // // SCManagedCaptureDeviceAutoFocusHandler.m // Snapchat // // Created by Jiyang Zhu on 3/7/18. // Copyright © 2018 Snapchat, Inc. All rights reserved. // #import "SCManagedCaptureDeviceAutoFocusHandler.h" #import "AVCaptureDevice+ConfigurationLock.h" #import #import @import CoreGraphics; @interface SCManagedCaptureDeviceAutoFocusHandler () @property (nonatomic, assign) CGPoint focusPointOfInterest; @property (nonatomic, strong) AVCaptureDevice *device; @property (nonatomic, assign) BOOL isContinuousAutofocus; @property (nonatomic, assign) BOOL isFocusLock; @end @implementation SCManagedCaptureDeviceAutoFocusHandler - (instancetype)initWithDevice:(AVCaptureDevice *)device pointOfInterest:(CGPoint)pointOfInterest { if (self = [super init]) { _device = device; _focusPointOfInterest = pointOfInterest; _isContinuousAutofocus = YES; _isFocusLock = NO; } return self; } - (CGPoint)getFocusPointOfInterest { return self.focusPointOfInterest; } // called when user taps on a point on screen, to re-adjust camera focus onto that tapped spot. // this re-adjustment is always necessary, regardless of scenarios (recording video, taking photo, etc), // therefore we don't have to check self.isFocusLock in this method. - (void)setAutofocusPointOfInterest:(CGPoint)pointOfInterest { SCTraceODPCompatibleStart(2); SC_GUARD_ELSE_RETURN(!CGPointEqualToPoint(pointOfInterest, self.focusPointOfInterest) || self.isContinuousAutofocus) // Do the setup immediately if the focus lock is off. if ([self.device isFocusModeSupported:AVCaptureFocusModeAutoFocus] && [self.device isFocusPointOfInterestSupported]) { [self.device runTask:@"set autofocus" withLockedConfiguration:^() { // Set focus point before changing focus mode // Be noticed that order does matter self.device.focusPointOfInterest = pointOfInterest; self.device.focusMode = AVCaptureFocusModeAutoFocus; }]; } self.focusPointOfInterest = pointOfInterest; self.isContinuousAutofocus = NO; } - (void)continuousAutofocus { SCTraceODPCompatibleStart(2); SC_GUARD_ELSE_RETURN(!self.isContinuousAutofocus); if (!self.isFocusLock) { // Do the setup immediately if the focus lock is off. if ([self.device isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus] && [self.device isFocusPointOfInterestSupported]) { [self.device runTask:@"set continuous autofocus" withLockedConfiguration:^() { // Set focus point before changing focus mode // Be noticed that order does matter self.device.focusPointOfInterest = CGPointMake(0.5, 0.5); self.device.focusMode = AVCaptureFocusModeContinuousAutoFocus; }]; } } self.focusPointOfInterest = CGPointMake(0.5, 0.5); self.isContinuousAutofocus = YES; } - (void)setFocusLock:(BOOL)focusLock { SCTraceODPCompatibleStart(2); SC_GUARD_ELSE_RETURN(self.isFocusLock != focusLock); // This is the old lock, we only do focus lock on back camera if (focusLock) { if ([self.device isFocusModeSupported:AVCaptureFocusModeLocked]) { [self.device runTask:@"set focus lock on" withLockedConfiguration:^() { self.device.focusMode = AVCaptureFocusModeLocked; }]; } } else { // Restore to previous autofocus configurations if ([self.device isFocusModeSupported:(self.isContinuousAutofocus ? AVCaptureFocusModeContinuousAutoFocus : AVCaptureFocusModeAutoFocus)] && [self.device isFocusPointOfInterestSupported]) { [self.device runTask:@"set focus lock on" withLockedConfiguration:^() { self.device.focusPointOfInterest = self.focusPointOfInterest; self.device.focusMode = self.isContinuousAutofocus ? AVCaptureFocusModeContinuousAutoFocus : AVCaptureFocusModeAutoFocus; }]; } } self.isFocusLock = focusLock; } - (void)setSmoothFocus:(BOOL)smoothFocus { SCTraceODPCompatibleStart(2); SC_GUARD_ELSE_RETURN(smoothFocus != self.device.smoothAutoFocusEnabled); [self.device runTask:@"set smooth autofocus" withLockedConfiguration:^() { [self.device setSmoothAutoFocusEnabled:smoothFocus]; }]; } - (void)setVisible:(BOOL)visible { } @end ================================================ FILE: ManagedCapturer/SCManagedCaptureDeviceDefaultZoomHandler.h ================================================ // // SCManagedCaptureDeviceDefaultZoomHandler.h // Snapchat // // Created by Yu-Kuan Lai on 4/12/17. // Copyright © 2017 Snapchat, Inc. All rights reserved. // #import #import #import @class SCManagedCaptureDevice; @class SCCaptureResource; @interface SCManagedCaptureDeviceDefaultZoomHandler : NSObject SC_INIT_AND_NEW_UNAVAILABLE - (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource; - (void)setZoomFactor:(CGFloat)zoomFactor forDevice:(SCManagedCaptureDevice *)device immediately:(BOOL)immediately; - (void)softwareZoomWithDevice:(SCManagedCaptureDevice *)device; @end ================================================ FILE: ManagedCapturer/SCManagedCaptureDeviceDefaultZoomHandler.m ================================================ // // SCManagedCaptureDeviceDefaultZoomHandler.m // Snapchat // // Created by Yu-Kuan Lai on 4/12/17. // Copyright © 2017 Snapchat, Inc. All rights reserved. // #import "SCManagedCaptureDeviceDefaultZoomHandler_Private.h" #import "SCCaptureResource.h" #import "SCManagedCaptureDevice+SCManagedCapturer.h" #import "SCManagedCapturer.h" #import "SCManagedCapturerLogging.h" #import "SCManagedCapturerStateBuilder.h" #import "SCMetalUtils.h" #import #import #import #import @implementation SCManagedCaptureDeviceDefaultZoomHandler - (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource { self = [super init]; if (self) { _captureResource = captureResource; } return self; } - (void)setZoomFactor:(CGFloat)zoomFactor forDevice:(SCManagedCaptureDevice *)device immediately:(BOOL)immediately { [self _setZoomFactor:zoomFactor forManagedCaptureDevice:device]; } - (void)softwareZoomWithDevice:(SCManagedCaptureDevice *)device { SCTraceODPCompatibleStart(2); SCAssert([_captureResource.queuePerformer isCurrentPerformer] || [[SCQueuePerformer mainQueuePerformer] isCurrentPerformer], @""); SCAssert(device.softwareZoom, @"Only do software zoom for software zoom device"); SC_GUARD_ELSE_RETURN(!SCDeviceSupportsMetal()); float zoomFactor = device.zoomFactor; SCLogCapturerInfo(@"Adjusting software zoom factor to: %f", zoomFactor); AVCaptureVideoPreviewLayer *videoPreviewLayer = _captureResource.videoPreviewLayer; [[SCQueuePerformer mainQueuePerformer] perform:^{ [CATransaction begin]; [CATransaction setDisableActions:YES]; // I end up need to change its superlayer transform to get the zoom effect videoPreviewLayer.superlayer.affineTransform = CGAffineTransformMakeScale(zoomFactor, zoomFactor); [CATransaction commit]; }]; } - (void)_setZoomFactor:(CGFloat)zoomFactor forManagedCaptureDevice:(SCManagedCaptureDevice *)device { SCTraceODPCompatibleStart(2); [_captureResource.queuePerformer perform:^{ SCTraceStart(); if (device) { SCLogCapturerInfo(@"Set zoom factor: %f -> %f", _captureResource.state.zoomFactor, zoomFactor); [device setZoomFactor:zoomFactor]; BOOL zoomFactorChanged = NO; // If the device is our current device, send the notification, update the // state. if (device.isConnected && device == _captureResource.device) { if (device.softwareZoom) { [self softwareZoomWithDevice:device]; } _captureResource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:_captureResource.state] setZoomFactor:zoomFactor] build]; zoomFactorChanged = YES; } SCManagedCapturerState *state = [_captureResource.state copy]; runOnMainThreadAsynchronously(^{ if (zoomFactorChanged) { [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didChangeState:state]; [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didChangeZoomFactor:state]; } }); } }]; } @end ================================================ FILE: ManagedCapturer/SCManagedCaptureDeviceDefaultZoomHandler_Private.h ================================================ // // SCManagedCaptureDeviceDefaultZoomHandler_Private.h // Snapchat // // Created by Joe Qiao on 04/01/2018. // #import "SCManagedCaptureDeviceDefaultZoomHandler.h" @interface SCManagedCaptureDeviceDefaultZoomHandler () @property (nonatomic, weak) SCCaptureResource *captureResource; @property (nonatomic, weak) SCManagedCaptureDevice *currentDevice; - (void)_setZoomFactor:(CGFloat)zoomFactor forManagedCaptureDevice:(SCManagedCaptureDevice *)device; @end ================================================ FILE: ManagedCapturer/SCManagedCaptureDeviceExposureHandler.h ================================================ // // SCManagedCaptureDeviceExposureHandler.h // Snapchat // // Created by Derek Peirce on 3/21/17. // Copyright © 2017 Snapchat, Inc. All rights reserved. // #import #import @protocol SCManagedCaptureDeviceExposureHandler - (CGPoint)getExposurePointOfInterest; - (void)setStableExposure:(BOOL)stableExposure; - (void)setExposurePointOfInterest:(CGPoint)pointOfInterest fromUser:(BOOL)fromUser; - (void)setVisible:(BOOL)visible; @end ================================================ FILE: ManagedCapturer/SCManagedCaptureDeviceFaceDetectionAutoExposureHandler.h ================================================ // // SCManagedCaptureDeviceFaceDetectionAutoExposureHandler.h // Snapchat // // Created by Jiyang Zhu on 3/6/18. // Copyright © 2018 Snapchat, Inc. All rights reserved. // // This class is used to // 1. adjust exposure related parameters of camera, including exposure mode and exposure point. // 2. receive detected face bounds, and set exposure point to a preferred face if needed. #import "SCManagedCaptureDeviceExposureHandler.h" #import #import @protocol SCCapturer; @interface SCManagedCaptureDeviceFaceDetectionAutoExposureHandler : NSObject SC_INIT_AND_NEW_UNAVAILABLE - (instancetype)initWithDevice:(AVCaptureDevice *)device pointOfInterest:(CGPoint)pointOfInterest managedCapturer:(id)managedCapturer; @end ================================================ FILE: ManagedCapturer/SCManagedCaptureDeviceFaceDetectionAutoExposureHandler.m ================================================ // // SCManagedCaptureDeviceFaceDetectionAutoExposureHandler.m // Snapchat // // Created by Jiyang Zhu on 3/6/18. // Copyright © 2018 Snapchat, Inc. All rights reserved. // #import "SCManagedCaptureDeviceFaceDetectionAutoExposureHandler.h" #import "AVCaptureDevice+ConfigurationLock.h" #import "SCCameraTweaks.h" #import "SCManagedCaptureDeviceExposureHandler.h" #import "SCManagedCaptureFaceDetectionAdjustingPOIResource.h" #import "SCManagedCapturer.h" #import "SCManagedCapturerListener.h" #import #import #import @import AVFoundation; @interface SCManagedCaptureDeviceFaceDetectionAutoExposureHandler () @property (nonatomic, strong) AVCaptureDevice *device; @property (nonatomic, weak) id managedCapturer; @property (nonatomic, assign) CGPoint exposurePointOfInterest; @property (nonatomic, assign) BOOL isVisible; @property (nonatomic, copy) NSDictionary *faceBoundsByFaceID; @property (nonatomic, strong) SCManagedCaptureFaceDetectionAdjustingPOIResource *resource; @end @implementation SCManagedCaptureDeviceFaceDetectionAutoExposureHandler - (instancetype)initWithDevice:(AVCaptureDevice *)device pointOfInterest:(CGPoint)pointOfInterest managedCapturer:(id)managedCapturer { if (self = [super init]) { SCAssert(device, @"AVCaptureDevice should not be nil."); SCAssert(managedCapturer, @"id should not be nil."); _device = device; _exposurePointOfInterest = pointOfInterest; SCManagedCaptureDevicePosition position = (device.position == AVCaptureDevicePositionFront ? SCManagedCaptureDevicePositionFront : SCManagedCaptureDevicePositionBack); _resource = [[SCManagedCaptureFaceDetectionAdjustingPOIResource alloc] initWithDefaultPointOfInterest:pointOfInterest shouldTargetOnFaceAutomatically:SCCameraTweaksTurnOnFaceDetectionFocusByDefault(position)]; _managedCapturer = managedCapturer; } return self; } - (void)dealloc { [_managedCapturer removeListener:self]; } - (CGPoint)getExposurePointOfInterest { return self.exposurePointOfInterest; } - (void)setExposurePointOfInterest:(CGPoint)pointOfInterest fromUser:(BOOL)fromUser { SCTraceODPCompatibleStart(2); pointOfInterest = [self.resource updateWithNewProposedPointOfInterest:pointOfInterest fromUser:fromUser]; [self _actuallySetExposurePointOfInterestIfNeeded:pointOfInterest]; } - (void)_actuallySetExposurePointOfInterestIfNeeded:(CGPoint)pointOfInterest { SCTraceODPCompatibleStart(2); SC_GUARD_ELSE_RETURN(!CGPointEqualToPoint(pointOfInterest, self.exposurePointOfInterest)); if ([self.device isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure] && [self.device isExposurePointOfInterestSupported]) { [self.device runTask:@"set exposure" withLockedConfiguration:^() { // Set exposure point before changing exposure mode // Be noticed that order does matter self.device.exposurePointOfInterest = pointOfInterest; self.device.exposureMode = AVCaptureExposureModeContinuousAutoExposure; }]; } self.exposurePointOfInterest = pointOfInterest; } - (void)setStableExposure:(BOOL)stableExposure { } - (void)setVisible:(BOOL)visible { SCTraceODPCompatibleStart(2); SC_GUARD_ELSE_RETURN(_isVisible != visible); _isVisible = visible; if (visible) { [self.managedCapturer addListener:self]; } else { [self.managedCapturer removeListener:self]; [self.resource reset]; } } #pragma mark - SCManagedCapturerListener - (void)managedCapturer:(id)managedCapturer didDetectFaceBounds:(NSDictionary *)faceBoundsByFaceID { SCTraceODPCompatibleStart(2); SC_GUARD_ELSE_RETURN(self.isVisible); CGPoint pointOfInterest = [self.resource updateWithNewDetectedFaceBounds:faceBoundsByFaceID]; [self _actuallySetExposurePointOfInterestIfNeeded:pointOfInterest]; } @end ================================================ FILE: ManagedCapturer/SCManagedCaptureDeviceFaceDetectionAutoFocusHandler.h ================================================ // // SCManagedCaptureDeviceFaceDetectionAutoFocusHandler.h // Snapchat // // Created by Jiyang Zhu on 3/7/18. // Copyright © 2018 Snapchat, Inc. All rights reserved. // // This class is used to // 1. adjust focus related parameters of camera, including focus mode and focus point. // 2. receive detected face bounds, and focus to a preferred face if needed. #import "SCManagedCaptureDeviceFocusHandler.h" #import #import @protocol SCCapturer; @interface SCManagedCaptureDeviceFaceDetectionAutoFocusHandler : NSObject SC_INIT_AND_NEW_UNAVAILABLE - (instancetype)initWithDevice:(AVCaptureDevice *)device pointOfInterest:(CGPoint)pointOfInterest managedCapturer:(id)managedCapturer; @end ================================================ FILE: ManagedCapturer/SCManagedCaptureDeviceFaceDetectionAutoFocusHandler.m ================================================ // // SCManagedCaptureDeviceFaceDetectionAutoFocusHandler.m // Snapchat // // Created by Jiyang Zhu on 3/7/18. // Copyright © 2018 Snapchat, Inc. All rights reserved. // #import "SCManagedCaptureDeviceFaceDetectionAutoFocusHandler.h" #import "AVCaptureDevice+ConfigurationLock.h" #import "SCCameraTweaks.h" #import "SCManagedCaptureFaceDetectionAdjustingPOIResource.h" #import "SCManagedCapturer.h" #import "SCManagedCapturerListener.h" #import #import #import @interface SCManagedCaptureDeviceFaceDetectionAutoFocusHandler () @property (nonatomic, strong) AVCaptureDevice *device; @property (nonatomic, weak) id managedCapturer; @property (nonatomic, assign) CGPoint focusPointOfInterest; @property (nonatomic, assign) BOOL isVisible; @property (nonatomic, assign) BOOL isContinuousAutofocus; @property (nonatomic, assign) BOOL focusLock; @property (nonatomic, copy) NSDictionary *faceBoundsByFaceID; @property (nonatomic, strong) SCManagedCaptureFaceDetectionAdjustingPOIResource *resource; @end @implementation SCManagedCaptureDeviceFaceDetectionAutoFocusHandler - (instancetype)initWithDevice:(AVCaptureDevice *)device pointOfInterest:(CGPoint)pointOfInterest managedCapturer:(id)managedCapturer { if (self = [super init]) { SCAssert(device, @"AVCaptureDevice should not be nil."); SCAssert(managedCapturer, @"id should not be nil."); _device = device; _focusPointOfInterest = pointOfInterest; SCManagedCaptureDevicePosition position = (device.position == AVCaptureDevicePositionFront ? SCManagedCaptureDevicePositionFront : SCManagedCaptureDevicePositionBack); _resource = [[SCManagedCaptureFaceDetectionAdjustingPOIResource alloc] initWithDefaultPointOfInterest:pointOfInterest shouldTargetOnFaceAutomatically:SCCameraTweaksTurnOnFaceDetectionFocusByDefault(position)]; _managedCapturer = managedCapturer; } return self; } - (CGPoint)getFocusPointOfInterest { return self.focusPointOfInterest; } // called when user taps on a point on screen, to re-adjust camera focus onto that tapped spot. // this re-adjustment is always necessary, regardless of scenarios (recording video, taking photo, etc), // therefore we don't have to check self.focusLock in this method. - (void)setAutofocusPointOfInterest:(CGPoint)pointOfInterest { SCTraceODPCompatibleStart(2); pointOfInterest = [self.resource updateWithNewProposedPointOfInterest:pointOfInterest fromUser:YES]; SC_GUARD_ELSE_RETURN(!CGPointEqualToPoint(pointOfInterest, self.focusPointOfInterest) || self.isContinuousAutofocus); [self _actuallySetFocusPointOfInterestIfNeeded:pointOfInterest withFocusMode:AVCaptureFocusModeAutoFocus taskName:@"set autofocus"]; } - (void)continuousAutofocus { SCTraceODPCompatibleStart(2); SC_GUARD_ELSE_RETURN(!self.isContinuousAutofocus); CGPoint pointOfInterest = [self.resource updateWithNewProposedPointOfInterest:CGPointMake(0.5, 0.5) fromUser:NO]; [self _actuallySetFocusPointOfInterestIfNeeded:pointOfInterest withFocusMode:AVCaptureFocusModeContinuousAutoFocus taskName:@"set continuous autofocus"]; } - (void)setFocusLock:(BOOL)focusLock { // Disabled focus lock for face detection and focus handler. } - (void)setSmoothFocus:(BOOL)smoothFocus { SCTraceODPCompatibleStart(2); SC_GUARD_ELSE_RETURN(smoothFocus != self.device.smoothAutoFocusEnabled); [self.device runTask:@"set smooth autofocus" withLockedConfiguration:^() { [self.device setSmoothAutoFocusEnabled:smoothFocus]; }]; } - (void)setVisible:(BOOL)visible { SCTraceODPCompatibleStart(2); SC_GUARD_ELSE_RETURN(_isVisible != visible); self.isVisible = visible; if (visible) { [[SCManagedCapturer sharedInstance] addListener:self]; } else { [[SCManagedCapturer sharedInstance] removeListener:self]; [self.resource reset]; } } - (void)_actuallySetFocusPointOfInterestIfNeeded:(CGPoint)pointOfInterest withFocusMode:(AVCaptureFocusMode)focusMode taskName:(NSString *)taskName { SCTraceODPCompatibleStart(2); SC_GUARD_ELSE_RETURN(!CGPointEqualToPoint(pointOfInterest, self.focusPointOfInterest) && [self.device isFocusModeSupported:focusMode] && [self.device isFocusPointOfInterestSupported]); [self.device runTask:taskName withLockedConfiguration:^() { // Set focus point before changing focus mode // Be noticed that order does matter self.device.focusPointOfInterest = pointOfInterest; self.device.focusMode = focusMode; }]; self.focusPointOfInterest = pointOfInterest; self.isContinuousAutofocus = (focusMode == AVCaptureFocusModeContinuousAutoFocus); } #pragma mark - SCManagedCapturerListener - (void)managedCapturer:(id)managedCapturer didDetectFaceBounds:(NSDictionary *)faceBoundsByFaceID { SCTraceODPCompatibleStart(2); SC_GUARD_ELSE_RETURN(self.isVisible); CGPoint pointOfInterest = [self.resource updateWithNewDetectedFaceBounds:faceBoundsByFaceID]; // If pointOfInterest is equal to CGPointMake(0.5, 0.5), it means no valid face is found, so that we should reset to // AVCaptureFocusModeContinuousAutoFocus. Otherwise, focus on the point and set the mode as // AVCaptureFocusModeAutoFocus. // TODO(Jiyang): Refactor SCManagedCaptureFaceDetectionAdjustingPOIResource to include focusMode and exposureMode. AVCaptureFocusMode focusMode = CGPointEqualToPoint(pointOfInterest, CGPointMake(0.5, 0.5)) ? AVCaptureFocusModeContinuousAutoFocus : AVCaptureFocusModeAutoFocus; [self _actuallySetFocusPointOfInterestIfNeeded:pointOfInterest withFocusMode:focusMode taskName:@"set autofocus from face detection"]; } @end ================================================ FILE: ManagedCapturer/SCManagedCaptureDeviceFocusHandler.h ================================================ // // SCManagedCaptureDeviceFocusHandler.h // Snapchat // // Created by Jiyang Zhu on 3/7/18. // Copyright © 2018 Snapchat, Inc. All rights reserved. // #import #import @protocol SCManagedCaptureDeviceFocusHandler - (CGPoint)getFocusPointOfInterest; /// Called when subject area changes. - (void)continuousAutofocus; /// Called when user taps. - (void)setAutofocusPointOfInterest:(CGPoint)pointOfInterest; - (void)setSmoothFocus:(BOOL)smoothFocus; - (void)setFocusLock:(BOOL)focusLock; - (void)setVisible:(BOOL)visible; @end ================================================ FILE: ManagedCapturer/SCManagedCaptureDeviceHandler.h ================================================ // // SCManagedCaptureDeviceHandler.h // Snapchat // // Created by Jiyang Zhu on 3/8/18. // Copyright © 2018 Snapchat, Inc. All rights reserved. // #import "SCManagedCaptureDevice.h" #import #import @class SCCaptureResource; @interface SCManagedCaptureDeviceHandler : NSObject SC_INIT_AND_NEW_UNAVAILABLE - (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource; @end ================================================ FILE: ManagedCapturer/SCManagedCaptureDeviceHandler.m ================================================ // // SCManagedCaptureDeviceHandler.m // Snapchat // // Created by Jiyang Zhu on 3/8/18. // Copyright © 2018 Snapchat, Inc. All rights reserved. // #import "SCManagedCaptureDeviceHandler.h" #import "SCCaptureResource.h" #import "SCManagedCapturer.h" #import "SCManagedCapturerLogging.h" #import "SCManagedCapturerState.h" #import "SCManagedCapturerStateBuilder.h" #import #import #import #import @interface SCManagedCaptureDeviceHandler () @property (nonatomic, weak) SCCaptureResource *captureResource; @end @implementation SCManagedCaptureDeviceHandler - (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource { self = [super init]; if (self) { SCAssert(captureResource, @"SCCaptureResource should not be nil."); _captureResource = captureResource; } return self; } - (void)managedCaptureDevice:(SCManagedCaptureDevice *)device didChangeAdjustingExposure:(BOOL)adjustingExposure { SC_GUARD_ELSE_RETURN(device == _captureResource.device); SCTraceODPCompatibleStart(2); SCLogCapturerInfo(@"KVO Changes adjustingExposure %d", adjustingExposure); [_captureResource.queuePerformer perform:^{ _captureResource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:_captureResource.state] setAdjustingExposure:adjustingExposure] build]; SCManagedCapturerState *state = [_captureResource.state copy]; runOnMainThreadAsynchronously(^{ [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didChangeState:state]; [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didChangeAdjustingExposure:state]; }); }]; } - (void)managedCaptureDevice:(SCManagedCaptureDevice *)device didChangeExposurePoint:(CGPoint)exposurePoint { SC_GUARD_ELSE_RETURN(device == self.captureResource.device); SCTraceODPCompatibleStart(2); runOnMainThreadAsynchronously(^{ [self.captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didChangeExposurePoint:exposurePoint]; }); } - (void)managedCaptureDevice:(SCManagedCaptureDevice *)device didChangeFocusPoint:(CGPoint)focusPoint { SC_GUARD_ELSE_RETURN(device == self.captureResource.device); SCTraceODPCompatibleStart(2); runOnMainThreadAsynchronously(^{ [self.captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didChangeFocusPoint:focusPoint]; }); } @end ================================================ FILE: ManagedCapturer/SCManagedCaptureDeviceLinearInterpolationZoomHandler.h ================================================ // // SCManagedCaptureDeviceLinearInterpolationZoomHandler.h // Snapchat // // Created by Joe Qiao on 03/01/2018. // #import "SCManagedCaptureDeviceDefaultZoomHandler.h" @interface SCManagedCaptureDeviceLinearInterpolationZoomHandler : SCManagedCaptureDeviceDefaultZoomHandler @end ================================================ FILE: ManagedCapturer/SCManagedCaptureDeviceLinearInterpolationZoomHandler.m ================================================ // // SCManagedCaptureDeviceLinearInterpolationZoomHandler.m // Snapchat // // Created by Joe Qiao on 03/01/2018. // #import "SCManagedCaptureDeviceLinearInterpolationZoomHandler.h" #import "SCCameraTweaks.h" #import "SCManagedCaptureDeviceDefaultZoomHandler_Private.h" #import "SCManagedCapturerLogging.h" #import #import @interface SCManagedCaptureDeviceLinearInterpolationZoomHandler () @property (nonatomic, strong) CADisplayLink *displayLink; @property (nonatomic, assign) double timestamp; @property (nonatomic, assign) float targetFactor; @property (nonatomic, assign) float intermediateFactor; @property (nonatomic, assign) int trend; @property (nonatomic, assign) float stepLength; @end @implementation SCManagedCaptureDeviceLinearInterpolationZoomHandler - (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource { self = [super initWithCaptureResource:captureResource]; if (self) { _timestamp = -1.0; _targetFactor = 1.0; _intermediateFactor = _targetFactor; _trend = 1; _stepLength = 0.0; } return self; } - (void)dealloc { [self _invalidate]; } - (void)setZoomFactor:(CGFloat)zoomFactor forDevice:(SCManagedCaptureDevice *)device immediately:(BOOL)immediately { if (self.currentDevice != device) { if (_displayLink) { // if device changed, interupt smoothing process // and reset to target zoom factor immediately [self _resetToZoomFactor:_targetFactor]; } self.currentDevice = device; immediately = YES; } if (immediately) { [self _resetToZoomFactor:zoomFactor]; } else { [self _addTargetZoomFactor:zoomFactor]; } } #pragma mark - Configurable // smoothen if the update time interval is greater than the threshold - (double)_thresholdTimeIntervalToSmoothen { return SCCameraTweaksSmoothZoomThresholdTime(); } - (double)_thresholdFactorDiffToSmoothen { return SCCameraTweaksSmoothZoomThresholdFactor(); } - (int)_intermediateFactorFramesPerSecond { return SCCameraTweaksSmoothZoomIntermediateFramesPerSecond(); } - (double)_delayTolerantTime { return SCCameraTweaksSmoothZoomDelayTolerantTime(); } // minimum step length between two intermediate factors, // the greater the better as long as could provide a 'smooth experience' during smoothing process - (float)_minimumStepLength { return SCCameraTweaksSmoothZoomMinStepLength(); } #pragma mark - Private methods - (void)_addTargetZoomFactor:(float)factor { SCAssertMainThread(); SCLogCapturerInfo(@"Smooth Zoom - [1] t=%f zf=%f", CACurrentMediaTime(), factor); if (SCFloatEqual(factor, _targetFactor)) { return; } _targetFactor = factor; float diff = _targetFactor - _intermediateFactor; if ([self _isDuringSmoothingProcess]) { // during smoothing, only update data [self _updateDataWithDiff:diff]; } else { double curTimestamp = CACurrentMediaTime(); if (!SCFloatEqual(_timestamp, -1.0) && (curTimestamp - _timestamp) > [self _thresholdTimeIntervalToSmoothen] && ABS(diff) > [self _thresholdFactorDiffToSmoothen]) { // need smoothing [self _updateDataWithDiff:diff]; if ([self _nextStep]) { // use timer to interpolate intermediate factors to avoid sharp jump _displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(_nextStep)]; _displayLink.preferredFramesPerSecond = [self _intermediateFactorFramesPerSecond]; [_displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode]; } } else { _timestamp = curTimestamp; _intermediateFactor = factor; SCLogCapturerInfo(@"Smooth Zoom - [2] t=%f zf=%f", CACurrentMediaTime(), _intermediateFactor); [self _setZoomFactor:_intermediateFactor forManagedCaptureDevice:self.currentDevice]; } } } - (void)_resetToZoomFactor:(float)factor { [self _invalidate]; _timestamp = -1.0; _targetFactor = factor; _intermediateFactor = _targetFactor; [self _setZoomFactor:_intermediateFactor forManagedCaptureDevice:self.currentDevice]; } - (BOOL)_nextStep { _timestamp = CACurrentMediaTime(); _intermediateFactor += (_trend * _stepLength); BOOL hasNext = YES; if (_trend < 0.0) { _intermediateFactor = MAX(_intermediateFactor, _targetFactor); } else { _intermediateFactor = MIN(_intermediateFactor, _targetFactor); } SCLogCapturerInfo(@"Smooth Zoom - [3] t=%f zf=%f", CACurrentMediaTime(), _intermediateFactor); [self _setZoomFactor:_intermediateFactor forManagedCaptureDevice:self.currentDevice]; if (SCFloatEqual(_intermediateFactor, _targetFactor)) { // finish smoothening [self _invalidate]; hasNext = NO; } return hasNext; } - (void)_invalidate { [_displayLink invalidate]; _displayLink = nil; _trend = 1; _stepLength = 0.0; } - (void)_updateDataWithDiff:(CGFloat)diff { _trend = diff < 0.0 ? -1 : 1; _stepLength = MAX(_stepLength, MAX([self _minimumStepLength], ABS(diff) / ([self _delayTolerantTime] * [self _intermediateFactorFramesPerSecond]))); } - (BOOL)_isDuringSmoothingProcess { return (_displayLink ? YES : NO); } @end ================================================ FILE: ManagedCapturer/SCManagedCaptureDeviceLockOnRecordExposureHandler.h ================================================ // // SCManagedCaptureDeviceLockOnRecordExposureHandler.h // Snapchat // // Created by Derek Peirce on 3/24/17. // Copyright © 2017 Snapchat, Inc. All rights reserved. // #import "SCManagedCaptureDeviceExposureHandler.h" #import // An exposure handler that prevents any changes in exposure as soon as recording begins @interface SCManagedCaptureDeviceLockOnRecordExposureHandler : NSObject - (instancetype)initWithDevice:(AVCaptureDevice *)device pointOfInterest:(CGPoint)pointOfInterest allowTap:(BOOL)allowTap; @end ================================================ FILE: ManagedCapturer/SCManagedCaptureDeviceLockOnRecordExposureHandler.m ================================================ // // SCManagedCaptureDeviceLockOnRecordExposureHandler.m // Snapchat // // Created by Derek Peirce on 3/24/17. // Copyright © 2017 Snapchat, Inc. All rights reserved. // #import "SCManagedCaptureDeviceLockOnRecordExposureHandler.h" #import "AVCaptureDevice+ConfigurationLock.h" #import "SCExposureState.h" #import "SCManagedCaptureDeviceExposureHandler.h" #import @import AVFoundation; @implementation SCManagedCaptureDeviceLockOnRecordExposureHandler { CGPoint _exposurePointOfInterest; AVCaptureDevice *_device; // allows the exposure to change when the user taps to refocus BOOL _allowTap; SCExposureState *_exposureState; } - (instancetype)initWithDevice:(AVCaptureDevice *)device pointOfInterest:(CGPoint)pointOfInterest allowTap:(BOOL)allowTap { if (self = [super init]) { _device = device; _exposurePointOfInterest = pointOfInterest; _allowTap = allowTap; } return self; } - (CGPoint)getExposurePointOfInterest { return _exposurePointOfInterest; } - (void)setExposurePointOfInterest:(CGPoint)pointOfInterest fromUser:(BOOL)fromUser { SCTraceStart(); BOOL locked = _device.exposureMode == AVCaptureExposureModeLocked || _device.exposureMode == AVCaptureExposureModeCustom || _device.exposureMode == AVCaptureExposureModeAutoExpose; if (!locked || (fromUser && _allowTap)) { AVCaptureExposureMode exposureMode = (locked ? AVCaptureExposureModeAutoExpose : AVCaptureExposureModeContinuousAutoExposure); if ([_device isExposureModeSupported:exposureMode] && [_device isExposurePointOfInterestSupported]) { [_device runTask:@"set exposure point" withLockedConfiguration:^() { // Set exposure point before changing focus mode // Be noticed that order does matter _device.exposurePointOfInterest = pointOfInterest; _device.exposureMode = exposureMode; }]; } _exposurePointOfInterest = pointOfInterest; } } - (void)setStableExposure:(BOOL)stableExposure { AVCaptureExposureMode exposureMode = stableExposure ? AVCaptureExposureModeLocked : AVCaptureExposureModeContinuousAutoExposure; if ([_device isExposureModeSupported:exposureMode]) { [_device runTask:@"set stable exposure" withLockedConfiguration:^() { _device.exposureMode = exposureMode; }]; } } - (void)setVisible:(BOOL)visible { if (visible) { if (_device.exposureMode == AVCaptureExposureModeLocked || _device.exposureMode == AVCaptureExposureModeCustom) { [_exposureState applyISOAndExposureDurationToDevice:_device]; } } else { _exposureState = [[SCExposureState alloc] initWithDevice:_device]; } } @end ================================================ FILE: ManagedCapturer/SCManagedCaptureDeviceSavitzkyGolayZoomHandler.h ================================================ // // SCManagedCaptureDeviceSavitzkyGolayZoomHandler.h // Snapchat // // Created by Yu-Kuan Lai on 4/12/17. // Copyright © 2017 Snapchat, Inc. All rights reserved. // #import "SCManagedCaptureDeviceDefaultZoomHandler.h" @interface SCManagedCaptureDeviceSavitzkyGolayZoomHandler : SCManagedCaptureDeviceDefaultZoomHandler @end ================================================ FILE: ManagedCapturer/SCManagedCaptureDeviceSavitzkyGolayZoomHandler.m ================================================ // // SCManagedCaptureDeviceSavitzkyGolayZoomHandler.m // Snapchat // // Created by Yu-Kuan Lai on 4/12/17. // Copyright © 2017 Snapchat, Inc. All rights reserved. // https://en.wikipedia.org/wiki/Savitzky%E2%80%93Golay_filter // #import "SCManagedCaptureDeviceSavitzkyGolayZoomHandler.h" #import "SCManagedCaptureDevice.h" #import "SCManagedCaptureDeviceDefaultZoomHandler_Private.h" #import #import static NSUInteger const kSCSavitzkyGolayWindowSize = 9; static CGFloat const kSCUpperSharpZoomThreshold = 1.15; @interface SCManagedCaptureDeviceSavitzkyGolayZoomHandler () @property (nonatomic, strong) NSMutableArray *zoomFactorHistoryArray; @end @implementation SCManagedCaptureDeviceSavitzkyGolayZoomHandler - (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource { self = [super initWithCaptureResource:captureResource]; if (self) { _zoomFactorHistoryArray = [[NSMutableArray alloc] init]; } return self; } - (void)setZoomFactor:(CGFloat)zoomFactor forDevice:(SCManagedCaptureDevice *)device immediately:(BOOL)immediately { if (self.currentDevice != device) { // reset if device changed self.currentDevice = device; [self _resetZoomFactor:zoomFactor forDevice:self.currentDevice]; return; } if (immediately || zoomFactor == 1 || _zoomFactorHistoryArray.count == 0) { // reset if zoomFactor is 1 or this is the first data point [self _resetZoomFactor:zoomFactor forDevice:device]; return; } CGFloat lastVal = [[_zoomFactorHistoryArray lastObject] floatValue]; CGFloat upperThreshold = lastVal * kSCUpperSharpZoomThreshold; if (zoomFactor > upperThreshold) { // sharp change in zoomFactor, reset [self _resetZoomFactor:zoomFactor forDevice:device]; return; } [_zoomFactorHistoryArray addObject:@(zoomFactor)]; if ([_zoomFactorHistoryArray count] > kSCSavitzkyGolayWindowSize) { [_zoomFactorHistoryArray removeObjectAtIndex:0]; } float filteredZoomFactor = SC_CLAMP([self _savitzkyGolayFilteredZoomFactor], kSCMinVideoZoomFactor, kSCMaxVideoZoomFactor); [self _setZoomFactor:filteredZoomFactor forManagedCaptureDevice:device]; } - (CGFloat)_savitzkyGolayFilteredZoomFactor { if ([_zoomFactorHistoryArray count] == kSCSavitzkyGolayWindowSize) { CGFloat filteredZoomFactor = 59 * [_zoomFactorHistoryArray[4] floatValue] + 54 * ([_zoomFactorHistoryArray[3] floatValue] + [_zoomFactorHistoryArray[5] floatValue]) + 39 * ([_zoomFactorHistoryArray[2] floatValue] + [_zoomFactorHistoryArray[6] floatValue]) + 14 * ([_zoomFactorHistoryArray[1] floatValue] + [_zoomFactorHistoryArray[7] floatValue]) - 21 * ([_zoomFactorHistoryArray[0] floatValue] + [_zoomFactorHistoryArray[8] floatValue]); filteredZoomFactor /= 231; return filteredZoomFactor; } else { return [[_zoomFactorHistoryArray lastObject] floatValue]; // use zoomFactor directly if we have less than 9 } } - (void)_resetZoomFactor:(CGFloat)zoomFactor forDevice:(SCManagedCaptureDevice *)device { [_zoomFactorHistoryArray removeAllObjects]; [_zoomFactorHistoryArray addObject:@(zoomFactor)]; [self _setZoomFactor:zoomFactor forManagedCaptureDevice:device]; } @end ================================================ FILE: ManagedCapturer/SCManagedCaptureDeviceSubjectAreaHandler.h ================================================ // // SCManagedCaptureDeviceSubjectAreaHandler.h // Snapchat // // Created by Xiaokang Liu on 19/03/2018. // // This class is used to handle the AVCaptureDeviceSubjectAreaDidChangeNotification notification for SCManagedCapturer. // To reset device's settings when the subject area changed #import #import @class SCCaptureResource; @protocol SCCapturer; @interface SCManagedCaptureDeviceSubjectAreaHandler : NSObject SC_INIT_AND_NEW_UNAVAILABLE - (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource NS_DESIGNATED_INITIALIZER; - (void)stopObserving; - (void)startObserving; @end ================================================ FILE: ManagedCapturer/SCManagedCaptureDeviceSubjectAreaHandler.m ================================================ // // SCManagedCaptureDeviceSubjectAreaHandler.m // Snapchat // // Created by Xiaokang Liu on 19/03/2018. // #import "SCManagedCaptureDeviceSubjectAreaHandler.h" #import "SCCameraTweaks.h" #import "SCCaptureResource.h" #import "SCCaptureWorker.h" #import "SCManagedCaptureDevice+SCManagedCapturer.h" #import "SCManagedCapturer.h" #import "SCManagedCapturerState.h" #import #import @interface SCManagedCaptureDeviceSubjectAreaHandler () { __weak SCCaptureResource *_captureResource; } @end @implementation SCManagedCaptureDeviceSubjectAreaHandler - (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource { self = [super init]; if (self) { SCAssert(captureResource, @""); _captureResource = captureResource; } return self; } - (void)stopObserving { [[NSNotificationCenter defaultCenter] removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:nil]; } - (void)startObserving { [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(_subjectAreaDidChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:nil]; } #pragma mark - Private methods - (void)_subjectAreaDidChange:(NSDictionary *)notification { [_captureResource.queuePerformer perform:^{ if (_captureResource.device.isConnected && !_captureResource.state.arSessionActive) { // Reset to continuous autofocus when the subject area changed [_captureResource.device continuousAutofocus]; [_captureResource.device setExposurePointOfInterest:CGPointMake(0.5, 0.5) fromUser:NO]; if (SCCameraTweaksEnablePortraitModeAutofocus()) { [SCCaptureWorker setPortraitModePointOfInterestAsynchronously:CGPointMake(0.5, 0.5) completionHandler:nil resource:_captureResource]; } } }]; } @end ================================================ FILE: ManagedCapturer/SCManagedCaptureDeviceThresholdExposureHandler.h ================================================ // // SCManagedCaptureDeviceThresholdExposureHandler.h // Snapchat // // Created by Derek Peirce on 4/11/17. // Copyright © 2017 Snapchat, Inc. All rights reserved. // #import "SCManagedCaptureDeviceExposureHandler.h" #import @interface SCManagedCaptureDeviceThresholdExposureHandler : NSObject - (instancetype)initWithDevice:(AVCaptureDevice *)device pointOfInterest:(CGPoint)pointOfInterest threshold:(CGFloat)threshold; @end ================================================ FILE: ManagedCapturer/SCManagedCaptureDeviceThresholdExposureHandler.m ================================================ // // SCManagedCaptureDeviceThresholdExposureHandler.m // Snapchat // // Created by Derek Peirce on 4/11/17. // Copyright © 2017 Snapchat, Inc. All rights reserved. // #import "SCManagedCaptureDeviceThresholdExposureHandler.h" #import "AVCaptureDevice+ConfigurationLock.h" #import "SCCameraTweaks.h" #import "SCExposureState.h" #import "SCManagedCaptureDeviceExposureHandler.h" #import #import @import AVFoundation; @implementation SCManagedCaptureDeviceThresholdExposureHandler { AVCaptureDevice *_device; CGPoint _exposurePointOfInterest; CGFloat _threshold; // allows the exposure to change when the user taps to refocus SCExposureState *_exposureState; FBKVOController *_kvoController; } - (instancetype)initWithDevice:(AVCaptureDevice *)device pointOfInterest:(CGPoint)pointOfInterest threshold:(CGFloat)threshold { if (self = [super init]) { _device = device; _exposurePointOfInterest = pointOfInterest; _threshold = threshold; _kvoController = [FBKVOController controllerWithObserver:self]; @weakify(self); [_kvoController observe:device keyPath:NSStringFromSelector(@selector(exposureMode)) options:NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew block:^(id observer, id object, NSDictionary *change) { @strongify(self); AVCaptureExposureMode old = (AVCaptureExposureMode)[(NSNumber *)change[NSKeyValueChangeOldKey] intValue]; AVCaptureExposureMode new = (AVCaptureExposureMode)[(NSNumber *)change[NSKeyValueChangeNewKey] intValue]; if (old == AVCaptureExposureModeAutoExpose && new == AVCaptureExposureModeLocked) { // auto expose is done, go back to custom self->_exposureState = [[SCExposureState alloc] initWithDevice:self->_device]; [self->_exposureState applyISOAndExposureDurationToDevice:self->_device]; } }]; [_kvoController observe:device keyPath:NSStringFromSelector(@selector(exposureTargetOffset)) options:NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew block:^(id observer, id object, NSDictionary *change) { @strongify(self); if (self->_device.exposureMode == AVCaptureExposureModeCustom) { CGFloat offset = [(NSNumber *)change[NSKeyValueChangeOldKey] floatValue]; if (fabs(offset) > self->_threshold) { [self->_device runTask:@"set exposure point" withLockedConfiguration:^() { // Set exposure point before changing focus mode // Be noticed that order does matter self->_device.exposurePointOfInterest = CGPointMake(0.5, 0.5); self->_device.exposureMode = AVCaptureExposureModeAutoExpose; }]; } } }]; } return self; } - (CGPoint)getExposurePointOfInterest { return _exposurePointOfInterest; } - (void)setExposurePointOfInterest:(CGPoint)pointOfInterest fromUser:(BOOL)fromUser { SCTraceStart(); BOOL locked = _device.exposureMode == AVCaptureExposureModeLocked || _device.exposureMode == AVCaptureExposureModeCustom || _device.exposureMode == AVCaptureExposureModeAutoExpose; if (!locked || fromUser) { AVCaptureExposureMode exposureMode = (locked ? AVCaptureExposureModeAutoExpose : AVCaptureExposureModeContinuousAutoExposure); if ([_device isExposureModeSupported:exposureMode] && [_device isExposurePointOfInterestSupported]) { [_device runTask:@"set exposure point" withLockedConfiguration:^() { // Set exposure point before changing focus mode // Be noticed that order does matter _device.exposurePointOfInterest = pointOfInterest; _device.exposureMode = exposureMode; }]; } _exposurePointOfInterest = pointOfInterest; } } - (void)setStableExposure:(BOOL)stableExposure { if (stableExposure) { _exposureState = [[SCExposureState alloc] initWithDevice:_device]; [_exposureState applyISOAndExposureDurationToDevice:_device]; } else { AVCaptureExposureMode exposureMode = AVCaptureExposureModeContinuousAutoExposure; if ([_device isExposureModeSupported:exposureMode]) { [_device runTask:@"set exposure point" withLockedConfiguration:^() { _device.exposureMode = AVCaptureExposureModeContinuousAutoExposure; }]; } } } - (void)setVisible:(BOOL)visible { if (visible) { if (_device.exposureMode == AVCaptureExposureModeLocked || _device.exposureMode == AVCaptureExposureModeCustom) { [_exposureState applyISOAndExposureDurationToDevice:_device]; } } else { _exposureState = [[SCExposureState alloc] initWithDevice:_device]; } } @end ================================================ FILE: ManagedCapturer/SCManagedCaptureFaceDetectionAdjustingPOIResource.h ================================================ // // SCManagedCaptureFaceDetectionAdjustingPOIResource.h // Snapchat // // Created by Jiyang Zhu on 3/7/18. // Copyright © 2018 Snapchat, Inc. All rights reserved. // // This class is used to keep several properties for face detection and focus/exposure. It provides methods to help // FaceDetectionAutoFocusHandler and FaceDetectionAutoExposureHandler to deal with the point of interest setting events // from user taps, subject area changes, and face detection, by updating itself and return the actual point of // interest. #import #import typedef NS_ENUM(NSInteger, SCManagedCaptureFaceDetectionAdjustingPOIMode) { SCManagedCaptureFaceDetectionAdjustingPOIModeNone = 0, SCManagedCaptureFaceDetectionAdjustingPOIModeFixedOnPointWithFace, SCManagedCaptureFaceDetectionAdjustingPOIModeFixedOnPointWithoutFace, }; @interface SCManagedCaptureFaceDetectionAdjustingPOIResource : NSObject @property (nonatomic, assign) CGPoint pointOfInterest; @property (nonatomic, strong) NSDictionary *faceBoundsByFaceID; @property (nonatomic, assign) SCManagedCaptureFaceDetectionAdjustingPOIMode adjustingPOIMode; @property (nonatomic, assign) BOOL shouldTargetOnFaceAutomatically; @property (nonatomic, strong) NSNumber *targetingFaceID; @property (nonatomic, assign) CGRect targetingFaceBounds; - (instancetype)initWithDefaultPointOfInterest:(CGPoint)pointOfInterest shouldTargetOnFaceAutomatically:(BOOL)shouldTargetOnFaceAutomatically; - (void)reset; /** Update SCManagedCaptureFaceDetectionAdjustingPOIResource when a new POI adjustment comes. It will find the face that the proposedPoint belongs to, return the center of the face, if the adjustingPOIMode and fromUser meets the requirements. @param proposedPoint The point of interest that upper level wants to set. @param fromUser Whether the setting is from user's tap or not. @return The actual point of interest that should be applied. */ - (CGPoint)updateWithNewProposedPointOfInterest:(CGPoint)proposedPoint fromUser:(BOOL)fromUser; /** Update SCManagedCaptureFaceDetectionAdjustingPOIResource when new detected face bounds comes. @param faceBoundsByFaceID A dictionary. Key: FaceID as NSNumber. Value: FaceBounds as CGRect. @return The actual point of interest that should be applied. */ - (CGPoint)updateWithNewDetectedFaceBounds:(NSDictionary *)faceBoundsByFaceID; @end ================================================ FILE: ManagedCapturer/SCManagedCaptureFaceDetectionAdjustingPOIResource.m ================================================ // // SCManagedCaptureFaceDetectionAdjustingPOIResource.m // Snapchat // // Created by Jiyang Zhu on 3/7/18. // Copyright © 2018 Snapchat, Inc. All rights reserved. // #import "SCManagedCaptureFaceDetectionAdjustingPOIResource.h" #import #import #import @implementation SCManagedCaptureFaceDetectionAdjustingPOIResource { CGPoint _defaultPointOfInterest; } #pragma mark - Public Methods - (instancetype)initWithDefaultPointOfInterest:(CGPoint)pointOfInterest shouldTargetOnFaceAutomatically:(BOOL)shouldTargetOnFaceAutomatically { if (self = [super init]) { _pointOfInterest = pointOfInterest; _defaultPointOfInterest = pointOfInterest; _shouldTargetOnFaceAutomatically = shouldTargetOnFaceAutomatically; } return self; } - (void)reset { SCTraceODPCompatibleStart(2); self.adjustingPOIMode = SCManagedCaptureFaceDetectionAdjustingPOIModeNone; self.targetingFaceID = nil; self.targetingFaceBounds = CGRectZero; self.faceBoundsByFaceID = nil; self.pointOfInterest = _defaultPointOfInterest; } - (CGPoint)updateWithNewProposedPointOfInterest:(CGPoint)proposedPoint fromUser:(BOOL)fromUser { SCTraceODPCompatibleStart(2); if (fromUser) { NSNumber *faceID = [self _getFaceIDOfFaceBoundsContainingPoint:proposedPoint fromFaceBounds:self.faceBoundsByFaceID]; if (faceID && [faceID integerValue] >= 0) { CGPoint point = [self _getPointOfInterestWithFaceID:faceID fromFaceBounds:self.faceBoundsByFaceID]; if ([self _isPointOfInterestValid:point]) { [self _setPointOfInterest:point targetingFaceID:faceID adjustingPOIMode:SCManagedCaptureFaceDetectionAdjustingPOIModeFixedOnPointWithFace]; } else { [self _setPointOfInterest:proposedPoint targetingFaceID:nil adjustingPOIMode:SCManagedCaptureFaceDetectionAdjustingPOIModeFixedOnPointWithoutFace]; } } else { [self _setPointOfInterest:proposedPoint targetingFaceID:nil adjustingPOIMode:SCManagedCaptureFaceDetectionAdjustingPOIModeFixedOnPointWithoutFace]; } } else { [self _setPointOfInterest:proposedPoint targetingFaceID:nil adjustingPOIMode:SCManagedCaptureFaceDetectionAdjustingPOIModeNone]; } return self.pointOfInterest; } - (CGPoint)updateWithNewDetectedFaceBounds:(NSDictionary *)faceBoundsByFaceID { SCTraceODPCompatibleStart(2); self.faceBoundsByFaceID = faceBoundsByFaceID; switch (self.adjustingPOIMode) { case SCManagedCaptureFaceDetectionAdjustingPOIModeNone: { if (self.shouldTargetOnFaceAutomatically) { [self _focusOnPreferredFaceInFaceBounds:self.faceBoundsByFaceID]; } } break; case SCManagedCaptureFaceDetectionAdjustingPOIModeFixedOnPointWithFace: { BOOL isFocusingOnCurrentTargetingFaceSuccess = [self _focusOnFaceWithTargetFaceID:self.targetingFaceID inFaceBounds:self.faceBoundsByFaceID]; if (!isFocusingOnCurrentTargetingFaceSuccess && self.shouldTargetOnFaceAutomatically) { // If the targeted face has disappeared, and shouldTargetOnFaceAutomatically is YES, automatically target on // the next preferred face. [self _focusOnPreferredFaceInFaceBounds:self.faceBoundsByFaceID]; } } break; case SCManagedCaptureFaceDetectionAdjustingPOIModeFixedOnPointWithoutFace: // The point of interest should be fixed at a non-face point where user tapped before. break; } return self.pointOfInterest; } #pragma mark - Internal Methods - (BOOL)_focusOnPreferredFaceInFaceBounds:(NSDictionary *)faceBoundsByFaceID { SCTraceODPCompatibleStart(2); NSNumber *preferredFaceID = [self _getPreferredFaceIDFromFaceBounds:faceBoundsByFaceID]; return [self _focusOnFaceWithTargetFaceID:preferredFaceID inFaceBounds:faceBoundsByFaceID]; } - (BOOL)_focusOnFaceWithTargetFaceID:(NSNumber *)preferredFaceID inFaceBounds:(NSDictionary *)faceBoundsByFaceID { SCTraceODPCompatibleStart(2); SC_GUARD_ELSE_RETURN_VALUE(preferredFaceID, NO); NSValue *faceBoundsValue = [faceBoundsByFaceID objectForKey:preferredFaceID]; if (faceBoundsValue) { CGRect faceBounds = [faceBoundsValue CGRectValue]; CGPoint proposedPoint = CGPointMake(CGRectGetMidX(faceBounds), CGRectGetMidY(faceBounds)); if ([self _isPointOfInterestValid:proposedPoint]) { if ([self _shouldChangeToNewPoint:proposedPoint withNewFaceID:preferredFaceID newFaceBounds:faceBounds]) { [self _setPointOfInterest:proposedPoint targetingFaceID:preferredFaceID adjustingPOIMode:SCManagedCaptureFaceDetectionAdjustingPOIModeFixedOnPointWithFace]; } return YES; } } [self reset]; return NO; } - (void)_setPointOfInterest:(CGPoint)pointOfInterest targetingFaceID:(NSNumber *)targetingFaceID adjustingPOIMode:(SCManagedCaptureFaceDetectionAdjustingPOIMode)adjustingPOIMode { SCTraceODPCompatibleStart(2); self.pointOfInterest = pointOfInterest; self.targetingFaceID = targetingFaceID; if (targetingFaceID) { // If targetingFaceID exists, record the current face bounds. self.targetingFaceBounds = [[self.faceBoundsByFaceID objectForKey:targetingFaceID] CGRectValue]; } else { // Otherwise, reset targetingFaceBounds to zero. self.targetingFaceBounds = CGRectZero; } self.adjustingPOIMode = adjustingPOIMode; } - (BOOL)_isPointOfInterestValid:(CGPoint)pointOfInterest { return (pointOfInterest.x >= 0 && pointOfInterest.x <= 1 && pointOfInterest.y >= 0 && pointOfInterest.y <= 1); } - (NSNumber *)_getPreferredFaceIDFromFaceBounds:(NSDictionary *)faceBoundsByFaceID { SCTraceODPCompatibleStart(2); SC_GUARD_ELSE_RETURN_VALUE(faceBoundsByFaceID.count > 0, nil); // Find out the bounds with the max area. __block NSNumber *preferredFaceID = nil; __block CGFloat maxArea = 0; [faceBoundsByFaceID enumerateKeysAndObjectsUsingBlock:^(NSNumber *_Nonnull key, NSValue *_Nonnull obj, BOOL *_Nonnull stop) { CGRect faceBounds = [obj CGRectValue]; CGFloat area = CGRectGetWidth(faceBounds) * CGRectGetHeight(faceBounds); if (area > maxArea) { preferredFaceID = key; maxArea = area; } }]; return preferredFaceID; } - (CGPoint)_getPointOfInterestWithFaceID:(NSNumber *)faceID fromFaceBounds:(NSDictionary *)faceBoundsByFaceID { SCTraceODPCompatibleStart(2); NSValue *faceBoundsValue = [faceBoundsByFaceID objectForKey:faceID]; if (faceBoundsValue) { CGRect faceBounds = [faceBoundsValue CGRectValue]; CGPoint point = CGPointMake(CGRectGetMidX(faceBounds), CGRectGetMidY(faceBounds)); return point; } else { return CGPointMake(-1, -1); // An invalid point. } } /** Setting a new focus/exposure point needs high CPU usage, so we only set a new POI when we have to. This method is to return whether setting this new point if necessary. If not, there is no need to change the POI. */ - (BOOL)_shouldChangeToNewPoint:(CGPoint)newPoint withNewFaceID:(NSNumber *)newFaceID newFaceBounds:(CGRect)newFaceBounds { SCTraceODPCompatibleStart(2); BOOL shouldChange = NO; if (!newFaceID || !self.targetingFaceID || ![newFaceID isEqualToNumber:self.targetingFaceID]) { // Return YES if it is a new face. shouldChange = YES; } else if (CGRectEqualToRect(self.targetingFaceBounds, CGRectZero) || !CGRectContainsPoint(self.targetingFaceBounds, newPoint)) { // Return YES if the new point if out of the current face bounds. shouldChange = YES; } else { CGFloat currentBoundsArea = CGRectGetWidth(self.targetingFaceBounds) * CGRectGetHeight(self.targetingFaceBounds); CGFloat newBoundsArea = CGRectGetWidth(newFaceBounds) * CGRectGetHeight(newFaceBounds); if (newBoundsArea >= currentBoundsArea * 1.2 || newBoundsArea <= currentBoundsArea * 0.8) { // Return YES if the area of new bounds if over 20% more or 20% less than the current one. shouldChange = YES; } } return shouldChange; } - (NSNumber *)_getFaceIDOfFaceBoundsContainingPoint:(CGPoint)point fromFaceBounds:(NSDictionary *)faceBoundsByFaceID { SC_GUARD_ELSE_RETURN_VALUE(faceBoundsByFaceID.count > 0, nil); __block NSNumber *faceID = nil; [faceBoundsByFaceID enumerateKeysAndObjectsUsingBlock:^(NSNumber *_Nonnull key, NSValue *_Nonnull obj, BOOL *_Nonnull stop) { CGRect faceBounds = [obj CGRectValue]; if (CGRectContainsPoint(faceBounds, point)) { faceID = key; *stop = YES; } }]; return faceID; } @end ================================================ FILE: ManagedCapturer/SCManagedCapturePreviewLayerController.h ================================================ // // SCManagedCapturePreviewLayerController.h // Snapchat // // Created by Liu Liu on 5/5/15. // Copyright (c) 2015 Snapchat, Inc. All rights reserved. // #import #import #import #import #import #import #import @protocol SCCapturer; @class LSAGLView, SCBlackCameraDetector, SCManagedCapturePreviewLayerController; @protocol SCManagedCapturePreviewLayerControllerDelegate - (SCBlackCameraDetector *)blackCameraDetectorForManagedCapturePreviewLayerController: (SCManagedCapturePreviewLayerController *)controller; - (sc_create_g2s_ticket_f)g2sTicketForManagedCapturePreviewLayerController: (SCManagedCapturePreviewLayerController *)controller; @end /** * SCManagedCapturePreviewLayerController controls display of frame in a view. The controller has 3 * different methods for this. * AVCaptureVideoPreviewLayer: This is a feed coming straight from the camera and does not allow any * image processing or modification of the frames displayed. * LSAGLView: OpenGL based video for displaying video that is being processed (Lenses etc.) * CAMetalLayer: Metal layer drawing textures on a vertex quad for display on screen. */ @interface SCManagedCapturePreviewLayerController : NSObject @property (nonatomic, strong, readonly) UIView *view; @property (nonatomic, strong, readonly) AVCaptureVideoPreviewLayer *videoPreviewLayer; @property (nonatomic, strong, readonly) LSAGLView *videoPreviewGLView; @property (nonatomic, weak) id delegate; + (instancetype)sharedInstance; - (void)pause; - (void)resume; - (UIView *)newStandInViewWithRect:(CGRect)rect; - (void)setManagedCapturer:(id)managedCapturer; // This method returns a token that you can hold on to. As long as the token is hold, // an outdated view will be hold unless the app backgrounded. - (NSString *)keepDisplayingOutdatedPreview; // End displaying the outdated frame with an issued keep token. If there is no one holds // any token any more, this outdated view will be flushed. - (void)endDisplayingOutdatedPreview:(NSString *)keepToken; // Create views for Metal, this method need to be called on the main thread. - (void)setupPreviewLayer; // Create render pipeline state, setup shaders for Metal, this need to be called off the main thread. - (void)setupRenderPipeline; - (void)applicationDidEnterBackground; - (void)applicationWillEnterForeground; - (void)applicationWillResignActive; - (void)applicationDidBecomeActive; @end ================================================ FILE: ManagedCapturer/SCManagedCapturePreviewLayerController.m ================================================ // // SCManagedCapturePreviewLayerController.m // Snapchat // // Created by Liu Liu on 5/5/15. // Copyright (c) 2015 Snapchat, Inc. All rights reserved. // #import "SCManagedCapturePreviewLayerController.h" #import "SCBlackCameraDetector.h" #import "SCCameraTweaks.h" #import "SCManagedCapturePreviewView.h" #import "SCManagedCapturer.h" #import "SCManagedCapturerListener.h" #import "SCManagedCapturerUtils.h" #import "SCMetalUtils.h" #import #import #import #import #import #import #import #import #import #import #define SCLogPreviewLayerInfo(fmt, ...) SCLogCoreCameraInfo(@"[PreviewLayerController] " fmt, ##__VA_ARGS__) #define SCLogPreviewLayerWarning(fmt, ...) SCLogCoreCameraWarning(@"[PreviewLayerController] " fmt, ##__VA_ARGS__) #define SCLogPreviewLayerError(fmt, ...) SCLogCoreCameraError(@"[PreviewLayerController] " fmt, ##__VA_ARGS__) const static CGSize kSCManagedCapturePreviewDefaultRenderSize = { .width = 720, .height = 1280, }; const static CGSize kSCManagedCapturePreviewRenderSize1080p = { .width = 1080, .height = 1920, }; #if !TARGET_IPHONE_SIMULATOR static NSInteger const kSCMetalCannotAcquireDrawableLimit = 2; @interface CAMetalLayer (SCSecretFature) // Call discardContents. - (void)sc_secretFeature; @end @implementation CAMetalLayer (SCSecretFature) - (void)sc_secretFeature { // "discardContents" char buffer[] = {0x9b, 0x96, 0x8c, 0x9c, 0x9e, 0x8d, 0x9b, 0xbc, 0x90, 0x91, 0x8b, 0x9a, 0x91, 0x8b, 0x8c, 0}; unsigned long len = strlen(buffer); for (unsigned idx = 0; idx < len; ++idx) { buffer[idx] = ~buffer[idx]; } SEL selector = NSSelectorFromString([NSString stringWithUTF8String:buffer]); if ([self respondsToSelector:selector]) { NSMethodSignature *signature = [self methodSignatureForSelector:selector]; NSInvocation *invocation = [NSInvocation invocationWithMethodSignature:signature]; [invocation setTarget:self]; [invocation setSelector:selector]; [invocation invoke]; } // For anyone curious, here is the actual implementation for discardContents in 10.3 (With Hopper v4, arm64) // From glance, this seems pretty safe to call. // void -[CAMetalLayer(CAMetalLayerPrivate) discardContents](int arg0) // { // *(r31 + 0xffffffffffffffe0) = r20; // *(0xfffffffffffffff0 + r31) = r19; // r31 = r31 + 0xffffffffffffffe0; // *(r31 + 0x10) = r29; // *(0x20 + r31) = r30; // r29 = r31 + 0x10; // r19 = *(arg0 + sign_extend_64(*(int32_t *)0x1a6300510)); // if (r19 != 0x0) { // r0 = loc_1807079dc(*0x1a7811fc8, r19); // r0 = _CAImageQueueConsumeUnconsumed(*(r19 + 0x10)); // r0 = _CAImageQueueFlush(*(r19 + 0x10)); // r29 = *(r31 + 0x10); // r30 = *(0x20 + r31); // r20 = *r31; // r19 = *(r31 + 0x10); // r31 = r31 + 0x20; // r0 = loc_1807079dc(*0x1a7811fc8, zero_extend_64(0x0)); // } else { // r29 = *(r31 + 0x10); // r30 = *(0x20 + r31); // r20 = *r31; // r19 = *(r31 + 0x10); // r31 = r31 + 0x20; // } // return; // } } @end #endif @interface SCManagedCapturePreviewLayerController () @property (nonatomic) BOOL renderSuspended; @end @implementation SCManagedCapturePreviewLayerController { SCManagedCapturePreviewView *_view; CGSize _drawableSize; SCQueuePerformer *_performer; FBKVOController *_renderingKVO; #if !TARGET_IPHONE_SIMULATOR CAMetalLayer *_metalLayer; id _commandQueue; id _renderPipelineState; CVMetalTextureCacheRef _textureCache; dispatch_semaphore_t _commandBufferSemaphore; // If the current view contains an outdated display (or any display) BOOL _containOutdatedPreview; // If we called empty outdated display already, but for some reason, hasn't emptied it yet. BOOL _requireToFlushOutdatedPreview; NSMutableSet *_tokenSet; NSUInteger _cannotAcquireDrawable; #endif } + (instancetype)sharedInstance { static dispatch_once_t onceToken; static SCManagedCapturePreviewLayerController *managedCapturePreviewLayerController; dispatch_once(&onceToken, ^{ managedCapturePreviewLayerController = [[SCManagedCapturePreviewLayerController alloc] init]; }); return managedCapturePreviewLayerController; } - (instancetype)init { self = [super init]; if (self) { #if !TARGET_IPHONE_SIMULATOR // We only allow one renders at a time (Sorry, no double / triple buffering). // It has to be created early here, otherwise integrity of other parts of the code is not // guaranteed. // TODO: I need to reason more about the initialization sequence. _commandBufferSemaphore = dispatch_semaphore_create(1); // Set _renderSuspended to be YES so that we won't render until it is fully setup. _renderSuspended = YES; _tokenSet = [NSMutableSet set]; #endif // If the screen is less than default size, we should fallback. CGFloat nativeScale = [UIScreen mainScreen].nativeScale; CGSize screenSize = [UIScreen mainScreen].fixedCoordinateSpace.bounds.size; CGSize renderSize = [SCDeviceName isIphoneX] ? kSCManagedCapturePreviewRenderSize1080p : kSCManagedCapturePreviewDefaultRenderSize; if (screenSize.width * nativeScale < renderSize.width) { _drawableSize = CGSizeMake(screenSize.width * nativeScale, screenSize.height * nativeScale); } else { _drawableSize = SCSizeIntegral( SCSizeCropToAspectRatio(renderSize, SCSizeGetAspectRatio(SCManagedCapturerAllScreenSize()))); } _performer = [[SCQueuePerformer alloc] initWithLabel:"SCManagedCapturePreviewLayerController" qualityOfService:QOS_CLASS_USER_INITIATED queueType:DISPATCH_QUEUE_SERIAL context:SCQueuePerformerContextCoreCamera]; _renderingKVO = [[FBKVOController alloc] initWithObserver:self]; [_renderingKVO observe:self keyPath:@keypath(self, renderSuspended) options:NSKeyValueObservingOptionNew | NSKeyValueObservingOptionOld block:^(id observer, id object, NSDictionary *change) { BOOL oldValue = [change[NSKeyValueChangeOldKey] boolValue]; BOOL newValue = [change[NSKeyValueChangeNewKey] boolValue]; if (oldValue != newValue) { [[_delegate blackCameraDetectorForManagedCapturePreviewLayerController:self] capturePreviewDidBecomeVisible:!newValue]; } }]; } return self; } - (void)pause { #if !TARGET_IPHONE_SIMULATOR SCTraceStart(); SCLogPreviewLayerInfo(@"pause Metal rendering performer waiting"); [_performer performAndWait:^() { self.renderSuspended = YES; }]; SCLogPreviewLayerInfo(@"pause Metal rendering performer finished"); #endif } - (void)resume { #if !TARGET_IPHONE_SIMULATOR SCTraceStart(); SCLogPreviewLayerInfo(@"resume Metal rendering performer waiting"); [_performer performAndWait:^() { self.renderSuspended = NO; }]; SCLogPreviewLayerInfo(@"resume Metal rendering performer finished"); #endif } - (void)setupPreviewLayer { #if !TARGET_IPHONE_SIMULATOR SCTraceStart(); SCAssertMainThread(); SC_GUARD_ELSE_RETURN(SCDeviceSupportsMetal()); if (!_metalLayer) { _metalLayer = [CAMetalLayer new]; SCLogPreviewLayerInfo(@"setup metalLayer:%@", _metalLayer); if (!_view) { // Create capture preview view and setup the metal layer [self view]; } else { [_view setupMetalLayer:_metalLayer]; } } #endif } - (UIView *)newStandInViewWithRect:(CGRect)rect { return [self.view resizableSnapshotViewFromRect:rect afterScreenUpdates:YES withCapInsets:UIEdgeInsetsZero]; } - (void)setupRenderPipeline { #if !TARGET_IPHONE_SIMULATOR SCTraceStart(); SC_GUARD_ELSE_RETURN(SCDeviceSupportsMetal()); SCAssertNotMainThread(); id device = SCGetManagedCaptureMetalDevice(); id shaderLibrary = [device newDefaultLibrary]; _commandQueue = [device newCommandQueue]; MTLRenderPipelineDescriptor *renderPipelineDescriptor = [MTLRenderPipelineDescriptor new]; renderPipelineDescriptor.colorAttachments[0].pixelFormat = MTLPixelFormatBGRA8Unorm; renderPipelineDescriptor.vertexFunction = [shaderLibrary newFunctionWithName:@"yuv_vertex_reshape"]; renderPipelineDescriptor.fragmentFunction = [shaderLibrary newFunctionWithName:@"yuv_fragment_texture"]; MTLVertexDescriptor *vertexDescriptor = [MTLVertexDescriptor vertexDescriptor]; vertexDescriptor.attributes[0].format = MTLVertexFormatFloat2; // position vertexDescriptor.attributes[0].offset = 0; vertexDescriptor.attributes[0].bufferIndex = 0; vertexDescriptor.attributes[1].format = MTLVertexFormatFloat2; // texCoords vertexDescriptor.attributes[1].offset = 2 * sizeof(float); vertexDescriptor.attributes[1].bufferIndex = 0; vertexDescriptor.layouts[0].stepRate = 1; vertexDescriptor.layouts[0].stepFunction = MTLVertexStepFunctionPerVertex; vertexDescriptor.layouts[0].stride = 4 * sizeof(float); renderPipelineDescriptor.vertexDescriptor = vertexDescriptor; _renderPipelineState = [device newRenderPipelineStateWithDescriptor:renderPipelineDescriptor error:nil]; CVMetalTextureCacheCreate(kCFAllocatorDefault, nil, device, nil, &_textureCache); _metalLayer.device = device; _metalLayer.drawableSize = _drawableSize; _metalLayer.pixelFormat = MTLPixelFormatBGRA8Unorm; _metalLayer.framebufferOnly = YES; // It is default to Yes. [_performer performAndWait:^() { self.renderSuspended = NO; }]; SCLogPreviewLayerInfo(@"did setup render pipeline"); #endif } - (UIView *)view { SCTraceStart(); SCAssertMainThread(); if (!_view) { #if TARGET_IPHONE_SIMULATOR _view = [[SCManagedCapturePreviewView alloc] initWithFrame:[UIScreen mainScreen].fixedCoordinateSpace.bounds aspectRatio:SCSizeGetAspectRatio(_drawableSize) metalLayer:nil]; #else _view = [[SCManagedCapturePreviewView alloc] initWithFrame:[UIScreen mainScreen].fixedCoordinateSpace.bounds aspectRatio:SCSizeGetAspectRatio(_drawableSize) metalLayer:_metalLayer]; SCLogPreviewLayerInfo(@"created SCManagedCapturePreviewView:%@", _view); #endif } return _view; } - (void)setManagedCapturer:(id)managedCapturer { SCTraceStart(); SCLogPreviewLayerInfo(@"setManagedCapturer:%@", managedCapturer); if (SCDeviceSupportsMetal()) { [managedCapturer addSampleBufferDisplayController:self context:SCCapturerContext]; } [managedCapturer addListener:self]; } - (void)applicationDidEnterBackground { #if !TARGET_IPHONE_SIMULATOR SCTraceStart(); SCAssertMainThread(); SC_GUARD_ELSE_RETURN(SCDeviceSupportsMetal()); SCLogPreviewLayerInfo(@"applicationDidEnterBackground waiting for performer"); [_performer performAndWait:^() { CVMetalTextureCacheFlush(_textureCache, 0); [_tokenSet removeAllObjects]; self.renderSuspended = YES; }]; SCLogPreviewLayerInfo(@"applicationDidEnterBackground signal performer finishes"); #endif } - (void)applicationWillResignActive { SC_GUARD_ELSE_RETURN(SCDeviceSupportsMetal()); SCTraceStart(); SCAssertMainThread(); #if !TARGET_IPHONE_SIMULATOR SCLogPreviewLayerInfo(@"pause Metal rendering"); [_performer performAndWait:^() { self.renderSuspended = YES; }]; #endif } - (void)applicationDidBecomeActive { SC_GUARD_ELSE_RETURN(SCDeviceSupportsMetal()); SCTraceStart(); SCAssertMainThread(); #if !TARGET_IPHONE_SIMULATOR SCLogPreviewLayerInfo(@"resume Metal rendering waiting for performer"); [_performer performAndWait:^() { self.renderSuspended = NO; }]; SCLogPreviewLayerInfo(@"resume Metal rendering performer finished"); #endif } - (void)applicationWillEnterForeground { #if !TARGET_IPHONE_SIMULATOR SCTraceStart(); SCAssertMainThread(); SC_GUARD_ELSE_RETURN(SCDeviceSupportsMetal()); SCLogPreviewLayerInfo(@"applicationWillEnterForeground waiting for performer"); [_performer performAndWait:^() { self.renderSuspended = NO; if (_containOutdatedPreview && _tokenSet.count == 0) { [self _flushOutdatedPreview]; } }]; SCLogPreviewLayerInfo(@"applicationWillEnterForeground performer finished"); #endif } - (NSString *)keepDisplayingOutdatedPreview { SCTraceStart(); NSString *token = [NSData randomBase64EncodedStringOfLength:8]; #if !TARGET_IPHONE_SIMULATOR SCLogPreviewLayerInfo(@"keepDisplayingOutdatedPreview waiting for performer"); [_performer performAndWait:^() { [_tokenSet addObject:token]; }]; SCLogPreviewLayerInfo(@"keepDisplayingOutdatedPreview performer finished"); #endif return token; } - (void)endDisplayingOutdatedPreview:(NSString *)keepToken { #if !TARGET_IPHONE_SIMULATOR SC_GUARD_ELSE_RETURN(SCDeviceSupportsMetal()); // I simply use a lock for this. If it becomes a bottleneck, I can figure something else out. SCTraceStart(); SCLogPreviewLayerInfo(@"endDisplayingOutdatedPreview waiting for performer"); [_performer performAndWait:^() { [_tokenSet removeObject:keepToken]; if (_tokenSet.count == 0 && _requireToFlushOutdatedPreview && _containOutdatedPreview && !_renderSuspended) { [self _flushOutdatedPreview]; } }]; SCLogPreviewLayerInfo(@"endDisplayingOutdatedPreview performer finished"); #endif } #pragma mark - SCManagedSampleBufferDisplayController - (void)enqueueSampleBuffer:(CMSampleBufferRef)sampleBuffer { #if !TARGET_IPHONE_SIMULATOR // Just drop the frame if it is rendering. SC_GUARD_ELSE_RUN_AND_RETURN_VALUE(dispatch_semaphore_wait(_commandBufferSemaphore, DISPATCH_TIME_NOW) == 0, SCLogPreviewLayerInfo(@"waiting for commandBufferSemaphore signaled"), ); // Just drop the frame, simple. [_performer performAndWait:^() { if (_renderSuspended) { SCLogGeneralInfo(@"Preview rendering suspends and current sample buffer is dropped"); dispatch_semaphore_signal(_commandBufferSemaphore); return; } @autoreleasepool { const BOOL isFirstPreviewFrame = !_containOutdatedPreview; if (isFirstPreviewFrame) { // Signal that we receieved the first frame (otherwise this will be YES already). SCGhostToSnappableSignalDidReceiveFirstPreviewFrame(); sc_create_g2s_ticket_f func = [_delegate g2sTicketForManagedCapturePreviewLayerController:self]; SCG2SActivateManiphestTicketQueueWithTicketCreationFunction(func); } CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); CVPixelBufferLockBaseAddress(imageBuffer, kCVPixelBufferLock_ReadOnly); size_t pixelWidth = CVPixelBufferGetWidth(imageBuffer); size_t pixelHeight = CVPixelBufferGetHeight(imageBuffer); id yTexture = SCMetalTextureFromPixelBuffer(imageBuffer, 0, MTLPixelFormatR8Unorm, _textureCache); id cbCrTexture = SCMetalTextureFromPixelBuffer(imageBuffer, 1, MTLPixelFormatRG8Unorm, _textureCache); CVPixelBufferUnlockBaseAddress(imageBuffer, kCVPixelBufferLock_ReadOnly); SC_GUARD_ELSE_RUN_AND_RETURN(yTexture && cbCrTexture, dispatch_semaphore_signal(_commandBufferSemaphore)); id commandBuffer = _commandQueue.commandBuffer; id drawable = _metalLayer.nextDrawable; if (!drawable) { // Count how many times I cannot acquire drawable. ++_cannotAcquireDrawable; if (_cannotAcquireDrawable >= kSCMetalCannotAcquireDrawableLimit) { // Calling [_metalLayer discardContents] to flush the CAImageQueue SCLogGeneralInfo(@"Cannot acquire drawable, reboot Metal .."); [_metalLayer sc_secretFeature]; } dispatch_semaphore_signal(_commandBufferSemaphore); return; } _cannotAcquireDrawable = 0; // Reset to 0 in case we can acquire drawable. MTLRenderPassDescriptor *renderPassDescriptor = [MTLRenderPassDescriptor new]; renderPassDescriptor.colorAttachments[0].texture = drawable.texture; id renderEncoder = [commandBuffer renderCommandEncoderWithDescriptor:renderPassDescriptor]; [renderEncoder setRenderPipelineState:_renderPipelineState]; [renderEncoder setFragmentTexture:yTexture atIndex:0]; [renderEncoder setFragmentTexture:cbCrTexture atIndex:1]; // TODO: Prob this out of the image buffer. // 90 clock-wise rotated texture coordinate. // Also do aspect fill. float normalizedHeight, normalizedWidth; if (pixelWidth * _drawableSize.width > _drawableSize.height * pixelHeight) { normalizedHeight = 1.0; normalizedWidth = pixelWidth * (_drawableSize.width / pixelHeight) / _drawableSize.height; } else { normalizedHeight = pixelHeight * (_drawableSize.height / pixelWidth) / _drawableSize.width; normalizedWidth = 1.0; } const float vertices[] = { -normalizedHeight, -normalizedWidth, 1, 1, // lower left -> upper right normalizedHeight, -normalizedWidth, 1, 0, // lower right -> lower right -normalizedHeight, normalizedWidth, 0, 1, // upper left -> upper left normalizedHeight, normalizedWidth, 0, 0, // upper right -> lower left }; [renderEncoder setVertexBytes:vertices length:sizeof(vertices) atIndex:0]; [renderEncoder drawPrimitives:MTLPrimitiveTypeTriangleStrip vertexStart:0 vertexCount:4]; [renderEncoder endEncoding]; // I need to set a minimum duration for the drawable. // There is a bug on iOS 10.3, if I present as soon as I can, I am keeping the GPU // at 30fps even you swipe between views, that causes undesirable visual jarring. // By set a minimum duration, even it is incrediably small (I tried 10ms, and here 60fps works), // the OS seems can adjust the frame rate much better when swiping. // This is an iOS 10.3 new method. if ([commandBuffer respondsToSelector:@selector(presentDrawable:afterMinimumDuration:)]) { [(id)commandBuffer presentDrawable:drawable afterMinimumDuration:(1.0 / 60)]; } else { [commandBuffer presentDrawable:drawable]; } [commandBuffer addCompletedHandler:^(id commandBuffer) { dispatch_semaphore_signal(_commandBufferSemaphore); }]; if (isFirstPreviewFrame) { if ([drawable respondsToSelector:@selector(addPresentedHandler:)] && [drawable respondsToSelector:@selector(presentedTime)]) { [(id)drawable addPresentedHandler:^(id presentedDrawable) { SCGhostToSnappableSignalDidRenderFirstPreviewFrame([(id)presentedDrawable presentedTime]); }]; } else { [commandBuffer addCompletedHandler:^(id commandBuffer) { // Using CACurrentMediaTime to approximate. SCGhostToSnappableSignalDidRenderFirstPreviewFrame(CACurrentMediaTime()); }]; } } // We enqueued an sample buffer to display, therefore, it contains an outdated display (to be clean up). _containOutdatedPreview = YES; [commandBuffer commit]; } }]; #endif } - (void)flushOutdatedPreview { SCTraceStart(); #if !TARGET_IPHONE_SIMULATOR // This method cannot drop frames (otherwise we will have residual on the screen). SCLogPreviewLayerInfo(@"flushOutdatedPreview waiting for performer"); [_performer performAndWait:^() { _requireToFlushOutdatedPreview = YES; SC_GUARD_ELSE_RETURN(!_renderSuspended); // Have to make sure we have no token left before return. SC_GUARD_ELSE_RETURN(_tokenSet.count == 0); [self _flushOutdatedPreview]; }]; SCLogPreviewLayerInfo(@"flushOutdatedPreview performer finished"); #endif } - (void)_flushOutdatedPreview { SCTraceStart(); SCAssertPerformer(_performer); #if !TARGET_IPHONE_SIMULATOR SCLogPreviewLayerInfo(@"flushOutdatedPreview containOutdatedPreview:%d", _containOutdatedPreview); // I don't care if this has renderSuspended or not, assuming I did the right thing. // Emptied, no need to do this any more on foregrounding. SC_GUARD_ELSE_RETURN(_containOutdatedPreview); _containOutdatedPreview = NO; _requireToFlushOutdatedPreview = NO; [_metalLayer sc_secretFeature]; #endif } #pragma mark - SCManagedCapturerListener - (void)managedCapturer:(id)managedCapturer didChangeVideoPreviewLayer:(AVCaptureVideoPreviewLayer *)videoPreviewLayer { SCTraceStart(); SCAssertMainThread(); // Force to load the view [self view]; _view.videoPreviewLayer = videoPreviewLayer; SCLogPreviewLayerInfo(@"didChangeVideoPreviewLayer:%@", videoPreviewLayer); } - (void)managedCapturer:(id)managedCapturer didChangeVideoPreviewGLView:(LSAGLView *)videoPreviewGLView { SCTraceStart(); SCAssertMainThread(); // Force to load the view [self view]; _view.videoPreviewGLView = videoPreviewGLView; SCLogPreviewLayerInfo(@"didChangeVideoPreviewGLView:%@", videoPreviewGLView); } @end ================================================ FILE: ManagedCapturer/SCManagedCapturePreviewView.h ================================================ // // SCManagedCapturePreviewView.h // Snapchat // // Created by Liu Liu on 5/5/15. // Copyright (c) 2015 Snapchat, Inc. All rights reserved. // #import #import @class LSAGLView; @interface SCManagedCapturePreviewView : UIView - (instancetype)initWithFrame:(CGRect)frame NS_UNAVAILABLE; - (instancetype)initWithFrame:(CGRect)frame aspectRatio:(CGFloat)aspectRatio metalLayer:(CALayer *)metalLayer; // This method is called only once in case the metalLayer is nil previously. - (void)setupMetalLayer:(CALayer *)metalLayer; @property (nonatomic, strong) AVCaptureVideoPreviewLayer *videoPreviewLayer; @property (nonatomic, strong) LSAGLView *videoPreviewGLView; @end ================================================ FILE: ManagedCapturer/SCManagedCapturePreviewView.m ================================================ // // SCManagedCapturePreviewView.m // Snapchat // // Created by Liu Liu on 5/5/15. // Copyright (c) 2015 Snapchat, Inc. All rights reserved. // #import "SCManagedCapturePreviewView.h" #import "SCCameraTweaks.h" #import "SCManagedCapturePreviewLayerController.h" #import "SCManagedCapturePreviewViewDebugView.h" #import "SCMetalUtils.h" #import #import #import #import @implementation SCManagedCapturePreviewView { CGFloat _aspectRatio; CALayer *_containerLayer; CALayer *_metalLayer; SCManagedCapturePreviewViewDebugView *_debugView; } - (instancetype)initWithFrame:(CGRect)frame aspectRatio:(CGFloat)aspectRatio metalLayer:(CALayer *)metalLayer { SCTraceStart(); SCAssertMainThread(); self = [super initWithFrame:frame]; if (self) { _aspectRatio = aspectRatio; if (SCDeviceSupportsMetal()) { [CATransaction begin]; [CATransaction setDisableActions:YES]; _metalLayer = metalLayer; _metalLayer.frame = [self _layerFrame]; [self.layer insertSublayer:_metalLayer below:[self.layer sublayers][0]]; [CATransaction commit]; } else { _containerLayer = [[CALayer alloc] init]; _containerLayer.frame = [self _layerFrame]; // Using a container layer such that the software zooming is happening on this layer [self.layer insertSublayer:_containerLayer below:[self.layer sublayers][0]]; } if ([self _shouldShowDebugView]) { _debugView = [[SCManagedCapturePreviewViewDebugView alloc] init]; [self addSubview:_debugView]; } } return self; } - (void)_layoutVideoPreviewLayer { SCAssertMainThread(); [CATransaction begin]; [CATransaction setDisableActions:YES]; if (SCDeviceSupportsMetal()) { _metalLayer.frame = [self _layerFrame]; } else { if (_videoPreviewLayer) { SCLogGeneralInfo(@"container layer frame %@, video preview layer frame %@", NSStringFromCGRect(_containerLayer.frame), NSStringFromCGRect(_videoPreviewLayer.frame)); } // Using bounds because we don't really care about the position at this point. _containerLayer.frame = [self _layerFrame]; _videoPreviewLayer.frame = _containerLayer.bounds; _videoPreviewLayer.position = CGPointMake(CGRectGetWidth(_containerLayer.bounds) * 0.5, CGRectGetHeight(_containerLayer.bounds) * 0.5); } [CATransaction commit]; } - (void)_layoutVideoPreviewGLView { SCCAssertMainThread(); _videoPreviewGLView.frame = [self _layerFrame]; } - (CGRect)_layerFrame { CGRect frame = SCRectMakeWithCenterAndSize( SCRectGetMid(self.bounds), SCSizeIntegral(SCSizeExpandToAspectRatio(self.bounds.size, _aspectRatio))); CGFloat x = frame.origin.x; x = isnan(x) ? 0.0 : (isfinite(x) ? x : INFINITY); CGFloat y = frame.origin.y; y = isnan(y) ? 0.0 : (isfinite(y) ? y : INFINITY); CGFloat width = frame.size.width; width = isnan(width) ? 0.0 : (isfinite(width) ? width : INFINITY); CGFloat height = frame.size.height; height = isnan(height) ? 0.0 : (isfinite(height) ? height : INFINITY); return CGRectMake(x, y, width, height); } - (void)setVideoPreviewLayer:(AVCaptureVideoPreviewLayer *)videoPreviewLayer { SCAssertMainThread(); if (_videoPreviewLayer != videoPreviewLayer) { [_videoPreviewLayer removeFromSuperlayer]; _videoPreviewLayer = videoPreviewLayer; [_containerLayer addSublayer:_videoPreviewLayer]; [self _layoutVideoPreviewLayer]; } } - (void)setupMetalLayer:(CALayer *)metalLayer { SCAssert(!_metalLayer, @"_metalLayer should be nil."); SCAssert(metalLayer, @"metalLayer must exists."); SCAssertMainThread(); _metalLayer = metalLayer; [self.layer insertSublayer:_metalLayer below:[self.layer sublayers][0]]; [self _layoutVideoPreviewLayer]; } - (void)setVideoPreviewGLView:(LSAGLView *)videoPreviewGLView { SCAssertMainThread(); if (_videoPreviewGLView != videoPreviewGLView) { [_videoPreviewGLView removeFromSuperview]; _videoPreviewGLView = videoPreviewGLView; [self addSubview:_videoPreviewGLView]; [self _layoutVideoPreviewGLView]; } } #pragma mark - Overridden methods - (void)layoutSubviews { SCAssertMainThread(); [super layoutSubviews]; [self _layoutVideoPreviewLayer]; [self _layoutVideoPreviewGLView]; [self _layoutDebugViewIfNeeded]; } - (void)setHidden:(BOOL)hidden { SCAssertMainThread(); [super setHidden:hidden]; if (hidden) { SCLogGeneralInfo(@"[SCManagedCapturePreviewView] - isHidden is being set to YES"); } } #pragma mark - Debug View - (BOOL)_shouldShowDebugView { // Only show debug view in internal builds and tweak settings are turned on. return SCIsInternalBuild() && (SCCameraTweaksEnableFocusPointObservation() || SCCameraTweaksEnableExposurePointObservation()); } - (void)_layoutDebugViewIfNeeded { SCAssertMainThread(); SC_GUARD_ELSE_RETURN([self _shouldShowDebugView]); _debugView.frame = self.bounds; [self bringSubviewToFront:_debugView]; } @end ================================================ FILE: ManagedCapturer/SCManagedCapturePreviewViewDebugView.h ================================================ // // SCManagedCapturePreviewViewDebugView.h // Snapchat // // Created by Jiyang Zhu on 1/19/18. // Copyright © 2018 Snapchat, Inc. All rights reserved. // #import #import @interface SCManagedCapturePreviewViewDebugView : UIView @end ================================================ FILE: ManagedCapturer/SCManagedCapturePreviewViewDebugView.m ================================================ // // SCManagedCapturePreviewViewDebugView.m // Snapchat // // Created by Jiyang Zhu on 1/19/18. // Copyright © 2018 Snapchat, Inc. All rights reserved. // #import "SCManagedCapturePreviewViewDebugView.h" #import "SCManagedCapturer.h" #import "SCManagedCapturerListener.h" #import #import #import @import CoreText; static CGFloat const kSCManagedCapturePreviewViewDebugViewCrossHairLineWidth = 1.0; static CGFloat const kSCManagedCapturePreviewViewDebugViewCrossHairWidth = 20.0; @interface SCManagedCapturePreviewViewDebugView () @property (assign, nonatomic) CGPoint focusPoint; @property (assign, nonatomic) CGPoint exposurePoint; @property (strong, nonatomic) NSDictionary *faceBoundsByFaceID; @end @implementation SCManagedCapturePreviewViewDebugView - (instancetype)initWithFrame:(CGRect)frame { self = [super initWithFrame:frame]; if (self) { self.userInteractionEnabled = NO; self.backgroundColor = [UIColor clearColor]; _focusPoint = [self _convertPointOfInterest:CGPointMake(0.5, 0.5)]; _exposurePoint = [self _convertPointOfInterest:CGPointMake(0.5, 0.5)]; [[SCManagedCapturer sharedInstance] addListener:self]; } return self; } - (void)drawRect:(CGRect)rect { CGContextRef context = UIGraphicsGetCurrentContext(); if (self.focusPoint.x > 0 || self.focusPoint.y > 0) { [self _drawCrossHairAtPoint:self.focusPoint inContext:context withColor:[UIColor greenColor] isXShaped:YES]; } if (self.exposurePoint.x > 0 || self.exposurePoint.y > 0) { [self _drawCrossHairAtPoint:self.exposurePoint inContext:context withColor:[UIColor yellowColor] isXShaped:NO]; } if (self.faceBoundsByFaceID.count > 0) { [self.faceBoundsByFaceID enumerateKeysAndObjectsUsingBlock:^(NSNumber *_Nonnull key, NSValue *_Nonnull obj, BOOL *_Nonnull stop) { CGRect faceRect = [obj CGRectValue]; NSInteger faceID = [key integerValue]; [self _drawRectangle:faceRect text:[NSString sc_stringWithFormat:@"ID: %@", key] inContext:context withColor:[UIColor colorWithRed:((faceID % 3) == 0) green:((faceID % 3) == 1) blue:((faceID % 3) == 2) alpha:1.0]]; }]; } } - (void)dealloc { [[SCManagedCapturer sharedInstance] removeListener:self]; } /** Draw a crosshair with center point, context, color and shape. @param isXShaped "X" or "+" */ - (void)_drawCrossHairAtPoint:(CGPoint)center inContext:(CGContextRef)context withColor:(UIColor *)color isXShaped:(BOOL)isXShaped { CGFloat width = kSCManagedCapturePreviewViewDebugViewCrossHairWidth; CGContextSetStrokeColorWithColor(context, color.CGColor); CGContextSetLineWidth(context, kSCManagedCapturePreviewViewDebugViewCrossHairLineWidth); CGContextBeginPath(context); if (isXShaped) { CGContextMoveToPoint(context, center.x - width / 2, center.y - width / 2); CGContextAddLineToPoint(context, center.x + width / 2, center.y + width / 2); CGContextMoveToPoint(context, center.x + width / 2, center.y - width / 2); CGContextAddLineToPoint(context, center.x - width / 2, center.y + width / 2); } else { CGContextMoveToPoint(context, center.x - width / 2, center.y); CGContextAddLineToPoint(context, center.x + width / 2, center.y); CGContextMoveToPoint(context, center.x, center.y - width / 2); CGContextAddLineToPoint(context, center.x, center.y + width / 2); } CGContextStrokePath(context); } /** Draw a rectangle, with a text on the top left. */ - (void)_drawRectangle:(CGRect)rect text:(NSString *)text inContext:(CGContextRef)context withColor:(UIColor *)color { CGContextSetStrokeColorWithColor(context, color.CGColor); CGContextSetLineWidth(context, kSCManagedCapturePreviewViewDebugViewCrossHairLineWidth); CGContextBeginPath(context); CGContextMoveToPoint(context, CGRectGetMinX(rect), CGRectGetMinY(rect)); CGContextAddLineToPoint(context, CGRectGetMinX(rect), CGRectGetMaxY(rect)); CGContextAddLineToPoint(context, CGRectGetMaxX(rect), CGRectGetMaxY(rect)); CGContextAddLineToPoint(context, CGRectGetMaxX(rect), CGRectGetMinY(rect)); CGContextAddLineToPoint(context, CGRectGetMinX(rect), CGRectGetMinY(rect)); NSMutableParagraphStyle *textStyle = [[NSMutableParagraphStyle alloc] init]; textStyle.alignment = NSTextAlignmentLeft; NSDictionary *attributes = @{ NSFontAttributeName : [UIFont boldSystemFontOfSize:16], NSForegroundColorAttributeName : color, NSParagraphStyleAttributeName : textStyle }; [text drawInRect:rect withAttributes:attributes]; CGContextStrokePath(context); } - (CGPoint)_convertPointOfInterest:(CGPoint)point { SCAssertMainThread(); CGPoint convertedPoint = CGPointMake((1 - point.y) * CGRectGetWidth(self.bounds), point.x * CGRectGetHeight(self.bounds)); if ([[SCManagedCapturer sharedInstance] isVideoMirrored]) { convertedPoint.x = CGRectGetWidth(self.bounds) - convertedPoint.x; } return convertedPoint; } - (NSDictionary *)_convertFaceBounds:(NSDictionary *)faceBoundsByFaceID { SCAssertMainThread(); NSMutableDictionary *convertedFaceBoundsByFaceID = [NSMutableDictionary dictionaryWithCapacity:faceBoundsByFaceID.count]; for (NSNumber *key in faceBoundsByFaceID.allKeys) { CGRect faceBounds = [[faceBoundsByFaceID objectForKey:key] CGRectValue]; CGRect convertedBounds = CGRectMake(CGRectGetMinY(faceBounds) * CGRectGetWidth(self.bounds), CGRectGetMinX(faceBounds) * CGRectGetHeight(self.bounds), CGRectGetHeight(faceBounds) * CGRectGetWidth(self.bounds), CGRectGetWidth(faceBounds) * CGRectGetHeight(self.bounds)); if (![[SCManagedCapturer sharedInstance] isVideoMirrored]) { convertedBounds.origin.x = CGRectGetWidth(self.bounds) - CGRectGetMaxX(convertedBounds); } [convertedFaceBoundsByFaceID setObject:[NSValue valueWithCGRect:convertedBounds] forKey:key]; } return convertedFaceBoundsByFaceID; } #pragma mark - SCManagedCapturerListener - (void)managedCapturer:(id)managedCapturer didChangeExposurePoint:(CGPoint)exposurePoint { runOnMainThreadAsynchronouslyIfNecessary(^{ self.exposurePoint = [self _convertPointOfInterest:exposurePoint]; [self setNeedsDisplay]; }); } - (void)managedCapturer:(id)managedCapturer didChangeFocusPoint:(CGPoint)focusPoint { runOnMainThreadAsynchronouslyIfNecessary(^{ self.focusPoint = [self _convertPointOfInterest:focusPoint]; [self setNeedsDisplay]; }); } - (void)managedCapturer:(id)managedCapturer didDetectFaceBounds:(NSDictionary *)faceBoundsByFaceID { runOnMainThreadAsynchronouslyIfNecessary(^{ self.faceBoundsByFaceID = [self _convertFaceBounds:faceBoundsByFaceID]; [self setNeedsDisplay]; }); } - (void)managedCapturer:(id)managedCapturer didChangeCaptureDevicePosition:(SCManagedCapturerState *)state { runOnMainThreadAsynchronouslyIfNecessary(^{ self.faceBoundsByFaceID = nil; self.focusPoint = [self _convertPointOfInterest:CGPointMake(0.5, 0.5)]; self.exposurePoint = [self _convertPointOfInterest:CGPointMake(0.5, 0.5)]; [self setNeedsDisplay]; }); } @end ================================================ FILE: ManagedCapturer/SCManagedCaptureSession.h ================================================ // // SCManagedCaptureSession.h // Snapchat // // Created by Derek Wang on 02/03/2018. // #import #import #import /** `SCManagedCaptureSession` is a wrapper class of `AVCaptureSession`. The purpose of this class is to provide additional functionalities to `AVCaptureSession`. For example, for black camera detection, we need to monitor when some method is called. Another example is that we can treat it as a more stable version of `AVCaptureSession` by moving some `AVCaptureSession` fixing logic to this class, and it provides reliable interfaces to the outside. That would be the next step. It also tries to mimic the `AVCaptureSession` by implmenting some methods in `AVCaptureSession`. The original methods in `AVCaptureSession` should not be used anymore */ @class SCBlackCameraDetector; NS_ASSUME_NONNULL_BEGIN @interface SCManagedCaptureSession : NSObject /** Expose avSession property */ @property (nonatomic, strong, readonly) AVCaptureSession *avSession; /** Expose avSession isRunning property for convenience. */ @property (nonatomic, readonly, assign) BOOL isRunning; /** Wrap [AVCaptureSession startRunning] method. Monitor startRunning method. [AVCaptureSession startRunning] should not be called */ - (void)startRunning; /** Wrap [AVCaptureSession stopRunning] method. Monitor stopRunning method. [AVCaptureSession stopRunning] should not be called */ - (void)stopRunning; /** Wrap [AVCaptureSession beginConfiguration]. Monitor beginConfiguration method */ - (void)beginConfiguration; /** Wrap [AVCaptureSession commitConfiguration]. Monitor commitConfiguration method */ - (void)commitConfiguration; /** Configurate internal AVCaptureSession with block @params block. configuration block with AVCaptureSession as parameter */ - (void)performConfiguration:(void (^)(void))block; - (instancetype)initWithBlackCameraDetector:(SCBlackCameraDetector *)detector NS_DESIGNATED_INITIALIZER; SC_INIT_AND_NEW_UNAVAILABLE @end NS_ASSUME_NONNULL_END ================================================ FILE: ManagedCapturer/SCManagedCaptureSession.m ================================================ // // SCManagedCaptureSession.m // Snapchat // // Created by Derek Wang on 02/03/2018. // #import "SCManagedCaptureSession.h" #import "SCBlackCameraDetector.h" #import @interface SCManagedCaptureSession () { SCBlackCameraDetector *_blackCameraDetector; } @end @implementation SCManagedCaptureSession - (instancetype)initWithBlackCameraDetector:(SCBlackCameraDetector *)detector { self = [super init]; if (self) { _avSession = [[AVCaptureSession alloc] init]; _blackCameraDetector = detector; } return self; } - (void)startRunning { SCTraceODPCompatibleStart(2); [_blackCameraDetector sessionWillCallStartRunning]; [_avSession startRunning]; [_blackCameraDetector sessionDidCallStartRunning]; } - (void)stopRunning { SCTraceODPCompatibleStart(2); [_blackCameraDetector sessionWillCallStopRunning]; [_avSession stopRunning]; [_blackCameraDetector sessionDidCallStopRunning]; } - (void)performConfiguration:(nonnull void (^)(void))block { SC_GUARD_ELSE_RETURN(block); [self beginConfiguration]; block(); [self commitConfiguration]; } - (void)beginConfiguration { [_avSession beginConfiguration]; } - (void)commitConfiguration { SCTraceODPCompatibleStart(2); [_blackCameraDetector sessionWillCommitConfiguration]; [_avSession commitConfiguration]; [_blackCameraDetector sessionDidCommitConfiguration]; } - (BOOL)isRunning { return _avSession.isRunning; } @end ================================================ FILE: ManagedCapturer/SCManagedCapturer.h ================================================ // SCManagedCapturer.h // Snapchat // // Created by Liu Liu on 4/20/15. #import "SCCapturer.h" #import "SCManagedCapturerListener.h" #import "SCManagedCapturerUtils.h" #import /* SCManagedCapturer is a shell class. Its job is to provide an singleton instance which follows protocol of SCManagedCapturerImpl. The reason we use this pattern is because we are building SCManagedCapturerV2. This setup offers possbility for us to code V2 without breaking the existing app, and can test the new implementation via Tweak. */ @interface SCManagedCapturer : NSObject + (id)sharedInstance; @end ================================================ FILE: ManagedCapturer/SCManagedCapturer.m ================================================ // // SCManagedCapturer.m // Snapchat // // Created by Lin Jia on 9/28/17. // #import "SCManagedCapturer.h" #import "SCCameraTweaks.h" #import "SCCaptureCore.h" #import "SCManagedCapturerV1.h" @implementation SCManagedCapturer + (id)sharedInstance { static dispatch_once_t onceToken; static id managedCapturer; dispatch_once(&onceToken, ^{ managedCapturer = [[SCCaptureCore alloc] init]; }); return managedCapturer; } @end ================================================ FILE: ManagedCapturer/SCManagedCapturerARSessionHandler.h ================================================ // // SCManagedCapturerARSessionHandler.h // Snapchat // // Created by Xiaokang Liu on 16/03/2018. // // This class is used to handle the AVCaptureSession event when ARSession is enabled. // The stopARSessionRunning will be blocked till the AVCaptureSessionDidStopRunningNotification event has been received // successfully, // after then we can restart AVCaptureSession gracefully. #import #import @class SCCaptureResource; @interface SCManagedCapturerARSessionHandler : NSObject SC_INIT_AND_NEW_UNAVAILABLE - (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource NS_DESIGNATED_INITIALIZER; - (void)stopObserving; - (void)stopARSessionRunning NS_AVAILABLE_IOS(11_0); @end ================================================ FILE: ManagedCapturer/SCManagedCapturerARSessionHandler.m ================================================ // // SCManagedCapturerARSessionHandler.m // Snapchat // // Created by Xiaokang Liu on 16/03/2018. // #import "SCManagedCapturerARSessionHandler.h" #import "SCCaptureResource.h" #import "SCManagedCaptureSession.h" #import #import #import @import ARKit; static CGFloat const kSCManagedCapturerARKitShutdownTimeoutDuration = 2; @interface SCManagedCapturerARSessionHandler () { SCCaptureResource *__weak _captureResource; dispatch_semaphore_t _arSesssionShutdownSemaphore; } @end @implementation SCManagedCapturerARSessionHandler - (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource { self = [super init]; if (self) { SCAssert(captureResource, @""); _captureResource = captureResource; _arSesssionShutdownSemaphore = dispatch_semaphore_create(0); } return self; } - (void)stopObserving { [[NSNotificationCenter defaultCenter] removeObserver:self name:AVCaptureSessionDidStopRunningNotification object:nil]; } - (void)stopARSessionRunning { SCAssertPerformer(_captureResource.queuePerformer); SCAssert(SC_AT_LEAST_IOS_11, @"Shoule be only call from iOS 11+"); if (@available(iOS 11.0, *)) { // ARSession stops its internal AVCaptureSession asynchronously. We listen for its callback and actually restart // our own capture session once it's finished shutting down so the two ARSessions don't conflict. [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(_completeARSessionShutdown:) name:AVCaptureSessionDidStopRunningNotification object:nil]; [_captureResource.arSession pause]; dispatch_semaphore_wait( _arSesssionShutdownSemaphore, dispatch_time(DISPATCH_TIME_NOW, (int64_t)(kSCManagedCapturerARKitShutdownTimeoutDuration * NSEC_PER_SEC))); } } - (void)_completeARSessionShutdown:(NSNotification *)note { // This notification is only registered for IMMEDIATELY before arkit shutdown. // Explicitly guard that the notification object IS NOT the main session's. SC_GUARD_ELSE_RETURN(![note.object isEqual:_captureResource.managedSession.avSession]); [[NSNotificationCenter defaultCenter] removeObserver:self name:AVCaptureSessionDidStopRunningNotification object:nil]; dispatch_semaphore_signal(_arSesssionShutdownSemaphore); } @end ================================================ FILE: ManagedCapturer/SCManagedCapturerListener.h ================================================ //#!announcer.rb // // SCManagedCaptuerListener // Snapchat // // Created by Liu Liu on 4/23/15. // Copyright (c) 2015 Liu Liu. All rights reserved. // #import "SCCapturer.h" #import "SCManagedCaptureDevice.h" #import "SCManagedRecordedVideo.h" #import "SCVideoCaptureSessionInfo.h" #import #import #import @class SCManagedCapturer; @class SCManagedCapturerState; @class LSAGLView; @class SCManagedCapturerSampleMetadata; @protocol SCManagedCapturerListener @optional // All these calbacks are invoked on main queue // Start / stop / reset - (void)managedCapturer:(id)managedCapturer didStartRunning:(SCManagedCapturerState *)state; - (void)managedCapturer:(id)managedCapturer didStopRunning:(SCManagedCapturerState *)state; - (void)managedCapturer:(id)managedCapturer didResetFromRuntimeError:(SCManagedCapturerState *)state; // Change state methods - (void)managedCapturer:(id)managedCapturer didChangeState:(SCManagedCapturerState *)state; - (void)managedCapturer:(id)managedCapturer didChangeNightModeActive:(SCManagedCapturerState *)state; - (void)managedCapturer:(id)managedCapturer didChangePortraitModeActive:(SCManagedCapturerState *)state; - (void)managedCapturer:(id)managedCapturer didChangeFlashActive:(SCManagedCapturerState *)state; - (void)managedCapturer:(id)managedCapturer didChangeLensesActive:(SCManagedCapturerState *)state; - (void)managedCapturer:(id)managedCapturer didChangeARSessionActive:(SCManagedCapturerState *)state; - (void)managedCapturer:(id)managedCapturer didChangeFlashSupportedAndTorchSupported:(SCManagedCapturerState *)state; - (void)managedCapturer:(id)managedCapturer didChangeZoomFactor:(SCManagedCapturerState *)state; - (void)managedCapturer:(id)managedCapturer didChangeLowLightCondition:(SCManagedCapturerState *)state; - (void)managedCapturer:(id)managedCapturer didChangeAdjustingExposure:(SCManagedCapturerState *)state; - (void)managedCapturer:(id)managedCapturer didChangeCaptureDevicePosition:(SCManagedCapturerState *)state; // The video preview layer is not maintained as a state, therefore, its change is not related to the state of // the camera at all, listener show only manage the setup of the videoPreviewLayer. // Since the AVCaptureVideoPreviewLayer can only attach to one AVCaptureSession per app, it is recommended you // have a view and controller which manages the video preview layer, and for upper layer, only manage that view // or view controller, which maintains the pointer consistency. The video preview layer is required to recreate // every now and then because otherwise we will have cases that the old video preview layer may contain // residual images. - (void)managedCapturer:(id)managedCapturer didChangeVideoPreviewLayer:(AVCaptureVideoPreviewLayer *)videoPreviewLayer; - (void)managedCapturer:(id)managedCapturer didChangeVideoPreviewGLView:(LSAGLView *)videoPreviewGLView; // Video recording-related methods - (void)managedCapturer:(id)managedCapturer didBeginVideoRecording:(SCManagedCapturerState *)state session:(SCVideoCaptureSessionInfo)session; - (void)managedCapturer:(id)managedCapturer didBeginAudioRecording:(SCManagedCapturerState *)state session:(SCVideoCaptureSessionInfo)session; - (void)managedCapturer:(id)managedCapturer willFinishRecording:(SCManagedCapturerState *)state session:(SCVideoCaptureSessionInfo)session recordedVideoFuture:(SCFuture> *)recordedVideoFuture videoSize:(CGSize)videoSize placeholderImage:(UIImage *)placeholderImage; - (void)managedCapturer:(id)managedCapturer didFinishRecording:(SCManagedCapturerState *)state session:(SCVideoCaptureSessionInfo)session recordedVideo:(SCManagedRecordedVideo *)recordedVideo; - (void)managedCapturer:(id)managedCapturer didFailRecording:(SCManagedCapturerState *)state session:(SCVideoCaptureSessionInfo)session error:(NSError *)error; - (void)managedCapturer:(id)managedCapturer didCancelRecording:(SCManagedCapturerState *)state session:(SCVideoCaptureSessionInfo)session; - (void)managedCapturer:(id)managedCapturer didGetError:(NSError *)error forType:(SCManagedVideoCapturerInfoType)type session:(SCVideoCaptureSessionInfo)session; - (void)managedCapturerDidCallLenseResume:(id)managedCapturer session:(SCVideoCaptureSessionInfo)session; - (void)managedCapturer:(id)managedCapturer didAppendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer sampleMetadata:(SCManagedCapturerSampleMetadata *)sampleMetadata; // Photo methods - (void)managedCapturer:(id)managedCapturer willCapturePhoto:(SCManagedCapturerState *)state sampleMetadata:(SCManagedCapturerSampleMetadata *)sampleMetadata; - (void)managedCapturer:(id)managedCapturer didCapturePhoto:(SCManagedCapturerState *)state; - (BOOL)managedCapturer:(id)managedCapturer isUnderDeviceMotion:(SCManagedCapturerState *)state; - (BOOL)managedCapturer:(id)managedCapturer shouldProcessFileInput:(SCManagedCapturerState *)state; // Face detection - (void)managedCapturer:(id)managedCapturer didDetectFaceBounds:(NSDictionary *)faceBoundsByFaceID; - (void)managedCapturer:(id)managedCapturer didChangeExposurePoint:(CGPoint)exposurePoint; - (void)managedCapturer:(id)managedCapturer didChangeFocusPoint:(CGPoint)focusPoint; @end ================================================ FILE: ManagedCapturer/SCManagedCapturerListenerAnnouncer.h ================================================ // Generated by the announcer.rb DO NOT EDIT!! #import "SCManagedCapturerListener.h" #import @interface SCManagedCapturerListenerAnnouncer : NSObject - (BOOL)addListener:(id)listener; - (void)removeListener:(id)listener; @end ================================================ FILE: ManagedCapturer/SCManagedCapturerListenerAnnouncer.mm ================================================ // Generated by the announcer.rb DO NOT EDIT!! #import "SCManagedCapturerListenerAnnouncer.h" #include using std::lock_guard; using std::mutex; #include using std::find; using std::make_shared; using std::shared_ptr; using std::vector; @implementation SCManagedCapturerListenerAnnouncer { mutex _mutex; shared_ptr>> _listeners; } - (NSString *)description { auto listeners = atomic_load(&self->_listeners); NSMutableString *desc = [NSMutableString string]; [desc appendFormat:@": [", self]; for (int i = 0; i < listeners->size(); ++i) { [desc appendFormat:@"%@", (*listeners)[i]]; if (i != listeners->size() - 1) { [desc appendString:@", "]; } } [desc appendString:@"]"]; return desc; } - (BOOL)addListener:(id)listener { lock_guard lock(_mutex); auto listeners = make_shared>>(); if (_listeners != nil) { // The listener we want to add already exists if (find(_listeners->begin(), _listeners->end(), listener) != _listeners->end()) { return NO; } for (auto &one : *_listeners) { if (one != nil) { listeners->push_back(one); } } listeners->push_back(listener); atomic_store(&self->_listeners, listeners); } else { listeners->push_back(listener); atomic_store(&self->_listeners, listeners); } return YES; } - (void)removeListener:(id)listener { lock_guard lock(_mutex); if (_listeners == nil) { return; } // If the only item in the listener list is the one we want to remove, store it back to nil again if (_listeners->size() == 1 && (*_listeners)[0] == listener) { atomic_store(&self->_listeners, shared_ptr>>()); return; } auto listeners = make_shared>>(); for (auto &one : *_listeners) { if (one != nil && one != listener) { listeners->push_back(one); } } atomic_store(&self->_listeners, listeners); } - (void)managedCapturer:(id)managedCapturer didStartRunning:(SCManagedCapturerState *)state { auto listeners = atomic_load(&self->_listeners); if (listeners) { for (id listener : *listeners) { if ([listener respondsToSelector:@selector(managedCapturer:didStartRunning:)]) { [listener managedCapturer:managedCapturer didStartRunning:state]; } } } } - (void)managedCapturer:(id)managedCapturer didStopRunning:(SCManagedCapturerState *)state { auto listeners = atomic_load(&self->_listeners); if (listeners) { for (id listener : *listeners) { if ([listener respondsToSelector:@selector(managedCapturer:didStopRunning:)]) { [listener managedCapturer:managedCapturer didStopRunning:state]; } } } } - (void)managedCapturer:(id)managedCapturer didResetFromRuntimeError:(SCManagedCapturerState *)state { auto listeners = atomic_load(&self->_listeners); if (listeners) { for (id listener : *listeners) { if ([listener respondsToSelector:@selector(managedCapturer:didResetFromRuntimeError:)]) { [listener managedCapturer:managedCapturer didResetFromRuntimeError:state]; } } } } - (void)managedCapturer:(id)managedCapturer didChangeState:(SCManagedCapturerState *)state { auto listeners = atomic_load(&self->_listeners); if (listeners) { for (id listener : *listeners) { if ([listener respondsToSelector:@selector(managedCapturer:didChangeState:)]) { [listener managedCapturer:managedCapturer didChangeState:state]; } } } } - (void)managedCapturer:(id)managedCapturer didChangeNightModeActive:(SCManagedCapturerState *)state { auto listeners = atomic_load(&self->_listeners); if (listeners) { for (id listener : *listeners) { if ([listener respondsToSelector:@selector(managedCapturer:didChangeNightModeActive:)]) { [listener managedCapturer:managedCapturer didChangeNightModeActive:state]; } } } } - (void)managedCapturer:(id)managedCapturer didChangePortraitModeActive:(SCManagedCapturerState *)state { auto listeners = atomic_load(&self->_listeners); if (listeners) { for (id listener : *listeners) { if ([listener respondsToSelector:@selector(managedCapturer:didChangePortraitModeActive:)]) { [listener managedCapturer:managedCapturer didChangePortraitModeActive:state]; } } } } - (void)managedCapturer:(id)managedCapturer didChangeFlashActive:(SCManagedCapturerState *)state { auto listeners = atomic_load(&self->_listeners); if (listeners) { for (id listener : *listeners) { if ([listener respondsToSelector:@selector(managedCapturer:didChangeFlashActive:)]) { [listener managedCapturer:managedCapturer didChangeFlashActive:state]; } } } } - (void)managedCapturer:(id)managedCapturer didChangeLensesActive:(SCManagedCapturerState *)state { auto listeners = atomic_load(&self->_listeners); if (listeners) { for (id listener : *listeners) { if ([listener respondsToSelector:@selector(managedCapturer:didChangeLensesActive:)]) { [listener managedCapturer:managedCapturer didChangeLensesActive:state]; } } } } - (void)managedCapturer:(id)managedCapturer didChangeARSessionActive:(SCManagedCapturerState *)state { auto listeners = atomic_load(&self->_listeners); if (listeners) { for (id listener : *listeners) { if ([listener respondsToSelector:@selector(managedCapturer:didChangeARSessionActive:)]) { [listener managedCapturer:managedCapturer didChangeARSessionActive:state]; } } } } - (void)managedCapturer:(id)managedCapturer didChangeFlashSupportedAndTorchSupported:(SCManagedCapturerState *)state { auto listeners = atomic_load(&self->_listeners); if (listeners) { for (id listener : *listeners) { if ([listener respondsToSelector:@selector(managedCapturer:didChangeFlashSupportedAndTorchSupported:)]) { [listener managedCapturer:managedCapturer didChangeFlashSupportedAndTorchSupported:state]; } } } } - (void)managedCapturer:(id)managedCapturer didChangeZoomFactor:(SCManagedCapturerState *)state { auto listeners = atomic_load(&self->_listeners); if (listeners) { for (id listener : *listeners) { if ([listener respondsToSelector:@selector(managedCapturer:didChangeZoomFactor:)]) { [listener managedCapturer:managedCapturer didChangeZoomFactor:state]; } } } } - (void)managedCapturer:(id)managedCapturer didChangeLowLightCondition:(SCManagedCapturerState *)state { auto listeners = atomic_load(&self->_listeners); if (listeners) { for (id listener : *listeners) { if ([listener respondsToSelector:@selector(managedCapturer:didChangeLowLightCondition:)]) { [listener managedCapturer:managedCapturer didChangeLowLightCondition:state]; } } } } - (void)managedCapturer:(id)managedCapturer didChangeAdjustingExposure:(SCManagedCapturerState *)state { auto listeners = atomic_load(&self->_listeners); if (listeners) { for (id listener : *listeners) { if ([listener respondsToSelector:@selector(managedCapturer:didChangeAdjustingExposure:)]) { [listener managedCapturer:managedCapturer didChangeAdjustingExposure:state]; } } } } - (void)managedCapturer:(id)managedCapturer didChangeCaptureDevicePosition:(SCManagedCapturerState *)state { auto listeners = atomic_load(&self->_listeners); if (listeners) { for (id listener : *listeners) { if ([listener respondsToSelector:@selector(managedCapturer:didChangeCaptureDevicePosition:)]) { [listener managedCapturer:managedCapturer didChangeCaptureDevicePosition:state]; } } } } - (void)managedCapturer:(id)managedCapturer didChangeVideoPreviewLayer:(AVCaptureVideoPreviewLayer *)videoPreviewLayer { auto listeners = atomic_load(&self->_listeners); if (listeners) { for (id listener : *listeners) { if ([listener respondsToSelector:@selector(managedCapturer:didChangeVideoPreviewLayer:)]) { [listener managedCapturer:managedCapturer didChangeVideoPreviewLayer:videoPreviewLayer]; } } } } - (void)managedCapturer:(id)managedCapturer didChangeVideoPreviewGLView:(LSAGLView *)videoPreviewGLView { auto listeners = atomic_load(&self->_listeners); if (listeners) { for (id listener : *listeners) { if ([listener respondsToSelector:@selector(managedCapturer:didChangeVideoPreviewGLView:)]) { [listener managedCapturer:managedCapturer didChangeVideoPreviewGLView:videoPreviewGLView]; } } } } - (void)managedCapturer:(id)managedCapturer didBeginVideoRecording:(SCManagedCapturerState *)state session:(SCVideoCaptureSessionInfo)session { auto listeners = atomic_load(&self->_listeners); if (listeners) { for (id listener : *listeners) { if ([listener respondsToSelector:@selector(managedCapturer:didBeginVideoRecording:session:)]) { [listener managedCapturer:managedCapturer didBeginVideoRecording:state session:session]; } } } } - (void)managedCapturer:(id)managedCapturer didBeginAudioRecording:(SCManagedCapturerState *)state session:(SCVideoCaptureSessionInfo)session { auto listeners = atomic_load(&self->_listeners); if (listeners) { for (id listener : *listeners) { if ([listener respondsToSelector:@selector(managedCapturer:didBeginAudioRecording:session:)]) { [listener managedCapturer:managedCapturer didBeginAudioRecording:state session:session]; } } } } - (void)managedCapturer:(id)managedCapturer willFinishRecording:(SCManagedCapturerState *)state session:(SCVideoCaptureSessionInfo)session recordedVideoFuture:(SCFuture> *)recordedVideoFuture videoSize:(CGSize)videoSize placeholderImage:(UIImage *)placeholderImage { auto listeners = atomic_load(&self->_listeners); if (listeners) { for (id listener : *listeners) { if ([listener respondsToSelector:@selector(managedCapturer: willFinishRecording: session: recordedVideoFuture: videoSize: placeholderImage:)]) { [listener managedCapturer:managedCapturer willFinishRecording:state session:session recordedVideoFuture:recordedVideoFuture videoSize:videoSize placeholderImage:placeholderImage]; } } } } - (void)managedCapturer:(id)managedCapturer didFinishRecording:(SCManagedCapturerState *)state session:(SCVideoCaptureSessionInfo)session recordedVideo:(SCManagedRecordedVideo *)recordedVideo { auto listeners = atomic_load(&self->_listeners); if (listeners) { for (id listener : *listeners) { if ([listener respondsToSelector:@selector(managedCapturer:didFinishRecording:session:recordedVideo:)]) { [listener managedCapturer:managedCapturer didFinishRecording:state session:session recordedVideo:recordedVideo]; } } } } - (void)managedCapturer:(id)managedCapturer didFailRecording:(SCManagedCapturerState *)state session:(SCVideoCaptureSessionInfo)session error:(NSError *)error { auto listeners = atomic_load(&self->_listeners); if (listeners) { for (id listener : *listeners) { if ([listener respondsToSelector:@selector(managedCapturer:didFailRecording:session:error:)]) { [listener managedCapturer:managedCapturer didFailRecording:state session:session error:error]; } } } } - (void)managedCapturer:(id)managedCapturer didCancelRecording:(SCManagedCapturerState *)state session:(SCVideoCaptureSessionInfo)session { auto listeners = atomic_load(&self->_listeners); if (listeners) { for (id listener : *listeners) { if ([listener respondsToSelector:@selector(managedCapturer:didCancelRecording:session:)]) { [listener managedCapturer:managedCapturer didCancelRecording:state session:session]; } } } } - (void)managedCapturer:(id)managedCapturer didGetError:(NSError *)error forType:(SCManagedVideoCapturerInfoType)type session:(SCVideoCaptureSessionInfo)session { auto listeners = atomic_load(&self->_listeners); if (listeners) { for (id listener : *listeners) { if ([listener respondsToSelector:@selector(managedCapturer:didGetError:forType:session:)]) { [listener managedCapturer:managedCapturer didGetError:error forType:type session:session]; } } } } - (void)managedCapturerDidCallLenseResume:(id)managedCapturer session:(SCVideoCaptureSessionInfo)session { auto listeners = atomic_load(&self->_listeners); if (listeners) { for (id listener : *listeners) { if ([listener respondsToSelector:@selector(managedCapturerDidCallLenseResume:session:)]) { [listener managedCapturerDidCallLenseResume:managedCapturer session:session]; } } } } - (void)managedCapturer:(id)managedCapturer didAppendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer sampleMetadata:(SCManagedCapturerSampleMetadata *)sampleMetadata { auto listeners = atomic_load(&self->_listeners); if (listeners) { for (id listener : *listeners) { if ([listener respondsToSelector:@selector(managedCapturer:didAppendVideoSampleBuffer:sampleMetadata:)]) { [listener managedCapturer:managedCapturer didAppendVideoSampleBuffer:sampleBuffer sampleMetadata:sampleMetadata]; } } } } - (void)managedCapturer:(id)managedCapturer willCapturePhoto:(SCManagedCapturerState *)state sampleMetadata:(SCManagedCapturerSampleMetadata *)sampleMetadata { auto listeners = atomic_load(&self->_listeners); if (listeners) { for (id listener : *listeners) { if ([listener respondsToSelector:@selector(managedCapturer:willCapturePhoto:sampleMetadata:)]) { [listener managedCapturer:managedCapturer willCapturePhoto:state sampleMetadata:sampleMetadata]; } } } } - (void)managedCapturer:(id)managedCapturer didCapturePhoto:(SCManagedCapturerState *)state { auto listeners = atomic_load(&self->_listeners); if (listeners) { for (id listener : *listeners) { if ([listener respondsToSelector:@selector(managedCapturer:didCapturePhoto:)]) { [listener managedCapturer:managedCapturer didCapturePhoto:state]; } } } } - (BOOL)managedCapturer:(id)managedCapturer isUnderDeviceMotion:(SCManagedCapturerState *)state { auto listeners = atomic_load(&self->_listeners); if (listeners) { for (id listener : *listeners) { if ([listener respondsToSelector:@selector(managedCapturer:didCapturePhoto:)]) { return [listener managedCapturer:managedCapturer isUnderDeviceMotion:state]; } } } return NO; } - (BOOL)managedCapturer:(id)managedCapturer shouldProcessFileInput:(SCManagedCapturerState *)state { auto listeners = atomic_load(&self->_listeners); if (listeners) { for (id listener : *listeners) { if ([listener respondsToSelector:@selector(managedCapturer:didCapturePhoto:)]) { return [listener managedCapturer:managedCapturer isUnderDeviceMotion:state]; } } } return NO; } - (void)managedCapturer:(id)managedCapturer didDetectFaceBounds:(NSDictionary *)faceBoundsByFaceID { auto listeners = atomic_load(&self->_listeners); if (listeners) { for (id listener : *listeners) { if ([listener respondsToSelector:@selector(managedCapturer:didDetectFaceBounds:)]) { [listener managedCapturer:managedCapturer didDetectFaceBounds:faceBoundsByFaceID]; } } } } - (void)managedCapturer:(id)managedCapturer didChangeExposurePoint:(CGPoint)exposurePoint { auto listeners = atomic_load(&self->_listeners); if (listeners) { for (id listener : *listeners) { if ([listener respondsToSelector:@selector(managedCapturer:didChangeExposurePoint:)]) { [listener managedCapturer:managedCapturer didChangeExposurePoint:exposurePoint]; } } } } - (void)managedCapturer:(id)managedCapturer didChangeFocusPoint:(CGPoint)focusPoint { auto listeners = atomic_load(&self->_listeners); if (listeners) { for (id listener : *listeners) { if ([listener respondsToSelector:@selector(managedCapturer:didChangeFocusPoint:)]) { [listener managedCapturer:managedCapturer didChangeFocusPoint:focusPoint]; } } } } @end ================================================ FILE: ManagedCapturer/SCManagedCapturerSampleMetadata.h ================================================ // // SCRecordingMetadata.h // Snapchat // #import #import #import NS_ASSUME_NONNULL_BEGIN @interface SCManagedCapturerSampleMetadata : NSObject SC_INIT_AND_NEW_UNAVAILABLE - (instancetype)initWithPresentationTimestamp:(CMTime)presentationTimestamp fieldOfView:(float)fieldOfView NS_DESIGNATED_INITIALIZER; @property (nonatomic, readonly) CMTime presentationTimestamp; @property (nonatomic, readonly) float fieldOfView; @end NS_ASSUME_NONNULL_END ================================================ FILE: ManagedCapturer/SCManagedCapturerSampleMetadata.m ================================================ // // SCRecordingMetadata.m // Snapchat // #import "SCManagedCapturerSampleMetadata.h" NS_ASSUME_NONNULL_BEGIN @implementation SCManagedCapturerSampleMetadata - (instancetype)initWithPresentationTimestamp:(CMTime)presentationTimestamp fieldOfView:(float)fieldOfView { self = [super init]; if (self) { _presentationTimestamp = presentationTimestamp; _fieldOfView = fieldOfView; } return self; } @end NS_ASSUME_NONNULL_END ================================================ FILE: ManagedCapturer/SCManagedCapturerState.h ================================================ // 49126048c3d19dd5b676b8d39844cf133833b67a // Generated by the value-object.rb DO NOT EDIT!! #import "SCManagedCaptureDevice.h" #import #import #import @protocol SCManagedCapturerState @property (nonatomic, assign, readonly) BOOL isRunning; @property (nonatomic, assign, readonly) BOOL isNightModeActive; @property (nonatomic, assign, readonly) BOOL isPortraitModeActive; @property (nonatomic, assign, readonly) BOOL lowLightCondition; @property (nonatomic, assign, readonly) BOOL adjustingExposure; @property (nonatomic, assign, readonly) SCManagedCaptureDevicePosition devicePosition; @property (nonatomic, assign, readonly) CGFloat zoomFactor; @property (nonatomic, assign, readonly) BOOL flashSupported; @property (nonatomic, assign, readonly) BOOL torchSupported; @property (nonatomic, assign, readonly) BOOL flashActive; @property (nonatomic, assign, readonly) BOOL torchActive; @property (nonatomic, assign, readonly) BOOL lensesActive; @property (nonatomic, assign, readonly) BOOL arSessionActive; @property (nonatomic, assign, readonly) BOOL liveVideoStreaming; @property (nonatomic, assign, readonly) BOOL lensProcessorReady; @end @interface SCManagedCapturerState : NSObject @property (nonatomic, assign, readonly) BOOL isRunning; @property (nonatomic, assign, readonly) BOOL isNightModeActive; @property (nonatomic, assign, readonly) BOOL isPortraitModeActive; @property (nonatomic, assign, readonly) BOOL lowLightCondition; @property (nonatomic, assign, readonly) BOOL adjustingExposure; @property (nonatomic, assign, readonly) SCManagedCaptureDevicePosition devicePosition; @property (nonatomic, assign, readonly) CGFloat zoomFactor; @property (nonatomic, assign, readonly) BOOL flashSupported; @property (nonatomic, assign, readonly) BOOL torchSupported; @property (nonatomic, assign, readonly) BOOL flashActive; @property (nonatomic, assign, readonly) BOOL torchActive; @property (nonatomic, assign, readonly) BOOL lensesActive; @property (nonatomic, assign, readonly) BOOL arSessionActive; @property (nonatomic, assign, readonly) BOOL liveVideoStreaming; @property (nonatomic, assign, readonly) BOOL lensProcessorReady; - (instancetype)initWithIsRunning:(BOOL)isRunning isNightModeActive:(BOOL)isNightModeActive isPortraitModeActive:(BOOL)isPortraitModeActive lowLightCondition:(BOOL)lowLightCondition adjustingExposure:(BOOL)adjustingExposure devicePosition:(SCManagedCaptureDevicePosition)devicePosition zoomFactor:(CGFloat)zoomFactor flashSupported:(BOOL)flashSupported torchSupported:(BOOL)torchSupported flashActive:(BOOL)flashActive torchActive:(BOOL)torchActive lensesActive:(BOOL)lensesActive arSessionActive:(BOOL)arSessionActive liveVideoStreaming:(BOOL)liveVideoStreaming lensProcessorReady:(BOOL)lensProcessorReady; @end ================================================ FILE: ManagedCapturer/SCManagedCapturerState.m ================================================ // 49126048c3d19dd5b676b8d39844cf133833b67a // Generated by the value-object.rb DO NOT EDIT!! #import "SCManagedCapturerState.h" #import #import @implementation SCManagedCapturerState static ptrdiff_t sSCManagedCapturerStateOffsets[0]; static BOOL sSCManagedCapturerStateHasOffsets; - (instancetype)initWithIsRunning:(BOOL)isRunning isNightModeActive:(BOOL)isNightModeActive isPortraitModeActive:(BOOL)isPortraitModeActive lowLightCondition:(BOOL)lowLightCondition adjustingExposure:(BOOL)adjustingExposure devicePosition:(SCManagedCaptureDevicePosition)devicePosition zoomFactor:(CGFloat)zoomFactor flashSupported:(BOOL)flashSupported torchSupported:(BOOL)torchSupported flashActive:(BOOL)flashActive torchActive:(BOOL)torchActive lensesActive:(BOOL)lensesActive arSessionActive:(BOOL)arSessionActive liveVideoStreaming:(BOOL)liveVideoStreaming lensProcessorReady:(BOOL)lensProcessorReady { self = [super init]; if (self) { _isRunning = isRunning; _isNightModeActive = isNightModeActive; _isPortraitModeActive = isPortraitModeActive; _lowLightCondition = lowLightCondition; _adjustingExposure = adjustingExposure; _devicePosition = devicePosition; _zoomFactor = zoomFactor; _flashSupported = flashSupported; _torchSupported = torchSupported; _flashActive = flashActive; _torchActive = torchActive; _lensesActive = lensesActive; _arSessionActive = arSessionActive; _liveVideoStreaming = liveVideoStreaming; _lensProcessorReady = lensProcessorReady; } return self; } #pragma mark - NSCopying - (instancetype)copyWithZone:(NSZone *)zone { // Immutable object, bypass copy return self; } #pragma mark - NSCoding - (instancetype)initWithCoder:(NSCoder *)aDecoder { self = [super init]; if (self) { _isRunning = [aDecoder decodeBoolForKey:@"isRunning"]; _isNightModeActive = [aDecoder decodeBoolForKey:@"isNightModeActive"]; _isPortraitModeActive = [aDecoder decodeBoolForKey:@"isPortraitModeActive"]; _lowLightCondition = [aDecoder decodeBoolForKey:@"lowLightCondition"]; _adjustingExposure = [aDecoder decodeBoolForKey:@"adjustingExposure"]; _devicePosition = (SCManagedCaptureDevicePosition)[aDecoder decodeIntegerForKey:@"devicePosition"]; _zoomFactor = [aDecoder decodeFloatForKey:@"zoomFactor"]; _flashSupported = [aDecoder decodeBoolForKey:@"flashSupported"]; _torchSupported = [aDecoder decodeBoolForKey:@"torchSupported"]; _flashActive = [aDecoder decodeBoolForKey:@"flashActive"]; _torchActive = [aDecoder decodeBoolForKey:@"torchActive"]; _lensesActive = [aDecoder decodeBoolForKey:@"lensesActive"]; _arSessionActive = [aDecoder decodeBoolForKey:@"arSessionActive"]; _liveVideoStreaming = [aDecoder decodeBoolForKey:@"liveVideoStreaming"]; _lensProcessorReady = [aDecoder decodeBoolForKey:@"lensProcessorReady"]; } return self; } - (void)encodeWithCoder:(NSCoder *)aCoder { [aCoder encodeBool:_isRunning forKey:@"isRunning"]; [aCoder encodeBool:_isNightModeActive forKey:@"isNightModeActive"]; [aCoder encodeBool:_isPortraitModeActive forKey:@"isPortraitModeActive"]; [aCoder encodeBool:_lowLightCondition forKey:@"lowLightCondition"]; [aCoder encodeBool:_adjustingExposure forKey:@"adjustingExposure"]; [aCoder encodeInteger:(NSInteger)_devicePosition forKey:@"devicePosition"]; [aCoder encodeFloat:_zoomFactor forKey:@"zoomFactor"]; [aCoder encodeBool:_flashSupported forKey:@"flashSupported"]; [aCoder encodeBool:_torchSupported forKey:@"torchSupported"]; [aCoder encodeBool:_flashActive forKey:@"flashActive"]; [aCoder encodeBool:_torchActive forKey:@"torchActive"]; [aCoder encodeBool:_lensesActive forKey:@"lensesActive"]; [aCoder encodeBool:_arSessionActive forKey:@"arSessionActive"]; [aCoder encodeBool:_liveVideoStreaming forKey:@"liveVideoStreaming"]; [aCoder encodeBool:_lensProcessorReady forKey:@"lensProcessorReady"]; } #pragma mark - FasterCoding - (BOOL)preferFasterCoding { return YES; } - (void)encodeWithFasterCoder:(id)fasterCoder { [fasterCoder encodeBool:_adjustingExposure]; [fasterCoder encodeBool:_arSessionActive]; [fasterCoder encodeSInt32:_devicePosition]; [fasterCoder encodeBool:_flashActive]; [fasterCoder encodeBool:_flashSupported]; [fasterCoder encodeBool:_isNightModeActive]; [fasterCoder encodeBool:_isPortraitModeActive]; [fasterCoder encodeBool:_isRunning]; [fasterCoder encodeBool:_lensProcessorReady]; [fasterCoder encodeBool:_lensesActive]; [fasterCoder encodeBool:_liveVideoStreaming]; [fasterCoder encodeBool:_lowLightCondition]; [fasterCoder encodeBool:_torchActive]; [fasterCoder encodeBool:_torchSupported]; [fasterCoder encodeFloat64:_zoomFactor]; } - (void)decodeWithFasterDecoder:(id)fasterDecoder { _adjustingExposure = (BOOL)[fasterDecoder decodeBool]; _arSessionActive = (BOOL)[fasterDecoder decodeBool]; _devicePosition = (SCManagedCaptureDevicePosition)[fasterDecoder decodeSInt32]; _flashActive = (BOOL)[fasterDecoder decodeBool]; _flashSupported = (BOOL)[fasterDecoder decodeBool]; _isNightModeActive = (BOOL)[fasterDecoder decodeBool]; _isPortraitModeActive = (BOOL)[fasterDecoder decodeBool]; _isRunning = (BOOL)[fasterDecoder decodeBool]; _lensProcessorReady = (BOOL)[fasterDecoder decodeBool]; _lensesActive = (BOOL)[fasterDecoder decodeBool]; _liveVideoStreaming = (BOOL)[fasterDecoder decodeBool]; _lowLightCondition = (BOOL)[fasterDecoder decodeBool]; _torchActive = (BOOL)[fasterDecoder decodeBool]; _torchSupported = (BOOL)[fasterDecoder decodeBool]; _zoomFactor = (CGFloat)[fasterDecoder decodeFloat64]; } - (void)setBool:(BOOL)val forUInt64Key:(uint64_t)key { switch (key) { case 15633755733674300ULL: _adjustingExposure = (BOOL)val; break; case 11461798188076803ULL: _arSessionActive = (BOOL)val; break; case 12833337784991002ULL: _flashActive = (BOOL)val; break; case 51252237764061994ULL: _flashSupported = (BOOL)val; break; case 1498048848502287ULL: _isNightModeActive = (BOOL)val; break; case 56151582267629469ULL: _isPortraitModeActive = (BOOL)val; break; case 12346172623874083ULL: _isRunning = (BOOL)val; break; case 67168377441917657ULL: _lensProcessorReady = (BOOL)val; break; case 5791542045168142ULL: _lensesActive = (BOOL)val; break; case 28486888710545224ULL: _liveVideoStreaming = (BOOL)val; break; case 24071673583499455ULL: _lowLightCondition = (BOOL)val; break; case 40774429934225315ULL: _torchActive = (BOOL)val; break; case 41333098301057670ULL: _torchSupported = (BOOL)val; break; } } - (void)setSInt32:(int32_t)val forUInt64Key:(uint64_t)key { switch (key) { case 66264093189780655ULL: _devicePosition = (SCManagedCaptureDevicePosition)val; break; } } - (void)setFloat64:(double)val forUInt64Key:(uint64_t)key { switch (key) { case 61340640993537628ULL: _zoomFactor = (CGFloat)val; break; } } + (uint64_t)fasterCodingVersion { return 10319810232046341562ULL; } + (uint64_t *)fasterCodingKeys { static uint64_t keys[] = { 15 /* Total */, FC_ENCODE_KEY_TYPE(15633755733674300, FCEncodeTypeBool), FC_ENCODE_KEY_TYPE(11461798188076803, FCEncodeTypeBool), FC_ENCODE_KEY_TYPE(66264093189780655, FCEncodeTypeSInt32), FC_ENCODE_KEY_TYPE(12833337784991002, FCEncodeTypeBool), FC_ENCODE_KEY_TYPE(51252237764061994, FCEncodeTypeBool), FC_ENCODE_KEY_TYPE(1498048848502287, FCEncodeTypeBool), FC_ENCODE_KEY_TYPE(56151582267629469, FCEncodeTypeBool), FC_ENCODE_KEY_TYPE(12346172623874083, FCEncodeTypeBool), FC_ENCODE_KEY_TYPE(67168377441917657, FCEncodeTypeBool), FC_ENCODE_KEY_TYPE(5791542045168142, FCEncodeTypeBool), FC_ENCODE_KEY_TYPE(28486888710545224, FCEncodeTypeBool), FC_ENCODE_KEY_TYPE(24071673583499455, FCEncodeTypeBool), FC_ENCODE_KEY_TYPE(40774429934225315, FCEncodeTypeBool), FC_ENCODE_KEY_TYPE(41333098301057670, FCEncodeTypeBool), FC_ENCODE_KEY_TYPE(61340640993537628, FCEncodeTypeFloat64), }; return keys; } #pragma mark - isEqual - (BOOL)isEqual:(id)object { if (!SCObjectsIsEqual(self, object, &sSCManagedCapturerStateHasOffsets, sSCManagedCapturerStateOffsets, 15, 0)) { return NO; } SCManagedCapturerState *other = (SCManagedCapturerState *)object; if (other->_isRunning != _isRunning) { return NO; } if (other->_isNightModeActive != _isNightModeActive) { return NO; } if (other->_isPortraitModeActive != _isPortraitModeActive) { return NO; } if (other->_lowLightCondition != _lowLightCondition) { return NO; } if (other->_adjustingExposure != _adjustingExposure) { return NO; } if (other->_devicePosition != _devicePosition) { return NO; } if (other->_zoomFactor != _zoomFactor) { return NO; } if (other->_flashSupported != _flashSupported) { return NO; } if (other->_torchSupported != _torchSupported) { return NO; } if (other->_flashActive != _flashActive) { return NO; } if (other->_torchActive != _torchActive) { return NO; } if (other->_lensesActive != _lensesActive) { return NO; } if (other->_arSessionActive != _arSessionActive) { return NO; } if (other->_liveVideoStreaming != _liveVideoStreaming) { return NO; } if (other->_lensProcessorReady != _lensProcessorReady) { return NO; } return YES; } - (NSUInteger)hash { NSUInteger subhashes[] = { (NSUInteger)_isRunning, (NSUInteger)_isNightModeActive, (NSUInteger)_isPortraitModeActive, (NSUInteger)_lowLightCondition, (NSUInteger)_adjustingExposure, (NSUInteger)_devicePosition, (NSUInteger)_zoomFactor, (NSUInteger)_flashSupported, (NSUInteger)_torchSupported, (NSUInteger)_flashActive, (NSUInteger)_torchActive, (NSUInteger)_lensesActive, (NSUInteger)_arSessionActive, (NSUInteger)_liveVideoStreaming, (NSUInteger)_lensProcessorReady}; NSUInteger result = subhashes[0]; for (int i = 1; i < 15; i++) { unsigned long long base = (((unsigned long long)result) << 32 | subhashes[i]); base = (~base) + (base << 18); base ^= (base >> 31); base *= 21; base ^= (base >> 11); base += (base << 6); base ^= (base >> 22); result = (NSUInteger)base; } return result; } #pragma mark - Print description in console: lldb> po #{variable name} - (NSString *)description { NSMutableString *desc = [NSMutableString string]; [desc appendString:@"{\n"]; [desc appendFormat:@"\tisRunning:%@\n", [@(_isRunning) description]]; [desc appendFormat:@"\tisNightModeActive:%@\n", [@(_isNightModeActive) description]]; [desc appendFormat:@"\tisPortraitModeActive:%@\n", [@(_isPortraitModeActive) description]]; [desc appendFormat:@"\tlowLightCondition:%@\n", [@(_lowLightCondition) description]]; [desc appendFormat:@"\tadjustingExposure:%@\n", [@(_adjustingExposure) description]]; [desc appendFormat:@"\tdevicePosition:%@\n", [@(_devicePosition) description]]; [desc appendFormat:@"\tzoomFactor:%@\n", [@(_zoomFactor) description]]; [desc appendFormat:@"\tflashSupported:%@\n", [@(_flashSupported) description]]; [desc appendFormat:@"\ttorchSupported:%@\n", [@(_torchSupported) description]]; [desc appendFormat:@"\tflashActive:%@\n", [@(_flashActive) description]]; [desc appendFormat:@"\ttorchActive:%@\n", [@(_torchActive) description]]; [desc appendFormat:@"\tlensesActive:%@\n", [@(_lensesActive) description]]; [desc appendFormat:@"\tarSessionActive:%@\n", [@(_arSessionActive) description]]; [desc appendFormat:@"\tliveVideoStreaming:%@\n", [@(_liveVideoStreaming) description]]; [desc appendFormat:@"\tlensProcessorReady:%@\n", [@(_lensProcessorReady) description]]; [desc appendString:@"}\n"]; return [desc copy]; } @end ================================================ FILE: ManagedCapturer/SCManagedCapturerState.value ================================================ #import #import "SCManagedCaptureDevice.h" interface SCManagedCapturerState BOOL isRunning BOOL isNightModeActive BOOL isPortraitModeActive BOOL lowLightCondition BOOL adjustingExposure enum SCManagedCaptureDevicePosition devicePosition CGFloat zoomFactor BOOL flashSupported BOOL torchSupported BOOL flashActive BOOL torchActive BOOL lensesActive BOOL arSessionActive BOOL liveVideoStreaming BOOL lensProcessorReady end ================================================ FILE: ManagedCapturer/SCManagedCapturerStateBuilder.h ================================================ // 49126048c3d19dd5b676b8d39844cf133833b67a // Generated by the value-object.rb DO NOT EDIT!! #import "SCManagedCapturerState.h" #import #import @interface SCManagedCapturerStateBuilder : NSObject + (instancetype)withManagedCapturerState:(id)managedCapturerState; - (SCManagedCapturerState *)build; - (instancetype)setIsRunning:(BOOL)isRunning; - (instancetype)setIsNightModeActive:(BOOL)isNightModeActive; - (instancetype)setIsPortraitModeActive:(BOOL)isPortraitModeActive; - (instancetype)setLowLightCondition:(BOOL)lowLightCondition; - (instancetype)setAdjustingExposure:(BOOL)adjustingExposure; - (instancetype)setDevicePosition:(SCManagedCaptureDevicePosition)devicePosition; - (instancetype)setZoomFactor:(CGFloat)zoomFactor; - (instancetype)setFlashSupported:(BOOL)flashSupported; - (instancetype)setTorchSupported:(BOOL)torchSupported; - (instancetype)setFlashActive:(BOOL)flashActive; - (instancetype)setTorchActive:(BOOL)torchActive; - (instancetype)setLensesActive:(BOOL)lensesActive; - (instancetype)setArSessionActive:(BOOL)arSessionActive; - (instancetype)setLiveVideoStreaming:(BOOL)liveVideoStreaming; - (instancetype)setLensProcessorReady:(BOOL)lensProcessorReady; @end ================================================ FILE: ManagedCapturer/SCManagedCapturerStateBuilder.m ================================================ // 49126048c3d19dd5b676b8d39844cf133833b67a // Generated by the value-object.rb DO NOT EDIT!! #import "SCManagedCapturerStateBuilder.h" #import #import @implementation SCManagedCapturerStateBuilder { BOOL _isRunning; BOOL _isNightModeActive; BOOL _isPortraitModeActive; BOOL _lowLightCondition; BOOL _adjustingExposure; SCManagedCaptureDevicePosition _devicePosition; CGFloat _zoomFactor; BOOL _flashSupported; BOOL _torchSupported; BOOL _flashActive; BOOL _torchActive; BOOL _lensesActive; BOOL _arSessionActive; BOOL _liveVideoStreaming; BOOL _lensProcessorReady; } + (instancetype)withManagedCapturerState:(id)managedCapturerState { SCManagedCapturerStateBuilder *builder = [[SCManagedCapturerStateBuilder alloc] init]; builder->_isRunning = managedCapturerState.isRunning; builder->_isNightModeActive = managedCapturerState.isNightModeActive; builder->_isPortraitModeActive = managedCapturerState.isPortraitModeActive; builder->_lowLightCondition = managedCapturerState.lowLightCondition; builder->_adjustingExposure = managedCapturerState.adjustingExposure; builder->_devicePosition = managedCapturerState.devicePosition; builder->_zoomFactor = managedCapturerState.zoomFactor; builder->_flashSupported = managedCapturerState.flashSupported; builder->_torchSupported = managedCapturerState.torchSupported; builder->_flashActive = managedCapturerState.flashActive; builder->_torchActive = managedCapturerState.torchActive; builder->_lensesActive = managedCapturerState.lensesActive; builder->_arSessionActive = managedCapturerState.arSessionActive; builder->_liveVideoStreaming = managedCapturerState.liveVideoStreaming; builder->_lensProcessorReady = managedCapturerState.lensProcessorReady; return builder; } - (SCManagedCapturerState *)build { return [[SCManagedCapturerState alloc] initWithIsRunning:_isRunning isNightModeActive:_isNightModeActive isPortraitModeActive:_isPortraitModeActive lowLightCondition:_lowLightCondition adjustingExposure:_adjustingExposure devicePosition:_devicePosition zoomFactor:_zoomFactor flashSupported:_flashSupported torchSupported:_torchSupported flashActive:_flashActive torchActive:_torchActive lensesActive:_lensesActive arSessionActive:_arSessionActive liveVideoStreaming:_liveVideoStreaming lensProcessorReady:_lensProcessorReady]; } - (instancetype)setIsRunning:(BOOL)isRunning { _isRunning = isRunning; return self; } - (instancetype)setIsNightModeActive:(BOOL)isNightModeActive { _isNightModeActive = isNightModeActive; return self; } - (instancetype)setIsPortraitModeActive:(BOOL)isPortraitModeActive { _isPortraitModeActive = isPortraitModeActive; return self; } - (instancetype)setLowLightCondition:(BOOL)lowLightCondition { _lowLightCondition = lowLightCondition; return self; } - (instancetype)setAdjustingExposure:(BOOL)adjustingExposure { _adjustingExposure = adjustingExposure; return self; } - (instancetype)setDevicePosition:(SCManagedCaptureDevicePosition)devicePosition { _devicePosition = devicePosition; return self; } - (instancetype)setZoomFactor:(CGFloat)zoomFactor { _zoomFactor = zoomFactor; return self; } - (instancetype)setFlashSupported:(BOOL)flashSupported { _flashSupported = flashSupported; return self; } - (instancetype)setTorchSupported:(BOOL)torchSupported { _torchSupported = torchSupported; return self; } - (instancetype)setFlashActive:(BOOL)flashActive { _flashActive = flashActive; return self; } - (instancetype)setTorchActive:(BOOL)torchActive { _torchActive = torchActive; return self; } - (instancetype)setLensesActive:(BOOL)lensesActive { _lensesActive = lensesActive; return self; } - (instancetype)setArSessionActive:(BOOL)arSessionActive { _arSessionActive = arSessionActive; return self; } - (instancetype)setLiveVideoStreaming:(BOOL)liveVideoStreaming { _liveVideoStreaming = liveVideoStreaming; return self; } - (instancetype)setLensProcessorReady:(BOOL)lensProcessorReady { _lensProcessorReady = lensProcessorReady; return self; } @end ================================================ FILE: ManagedCapturer/SCManagedCapturerUtils.h ================================================ // // SCManagedCapturerUtils.h // Snapchat // // Created by Chao Pang on 10/4/17. // #import #import #import SC_EXTERN_C_BEGIN extern const CGFloat kSCIPhoneXCapturedImageVideoCropRatio; extern CGFloat SCManagedCapturedImageAndVideoAspectRatio(void); extern CGSize SCManagedCapturerAllScreenSize(void); extern CGSize SCAsyncImageCapturePlaceholderViewSize(void); extern CGFloat SCAdjustedAspectRatio(UIImageOrientation orientation, CGFloat aspectRatio); extern UIImage *SCCropImageToTargetAspectRatio(UIImage *image, CGFloat targetAspectRatio); extern void SCCropImageSizeToAspectRatio(size_t inputWidth, size_t inputHeight, UIImageOrientation orientation, CGFloat aspectRatio, size_t *outputWidth, size_t *outputHeight); extern BOOL SCNeedsCropImageToAspectRatio(CGImageRef image, UIImageOrientation orientation, CGFloat aspectRatio); extern CGRect SCCalculateRectToCrop(size_t imageWidth, size_t imageHeight, size_t croppedWidth, size_t croppedHeight); extern CGImageRef SCCreateCroppedImageToAspectRatio(CGImageRef image, UIImageOrientation orientation, CGFloat aspectRatio); SC_EXTERN_C_END ================================================ FILE: ManagedCapturer/SCManagedCapturerUtils.m ================================================ // // SCManagedCapturerUtils.m // Snapchat // // Created by Chao Pang on 10/4/17. // #import "SCManagedCapturerUtils.h" #import "SCCaptureCommon.h" #import #import #import #import // This is to calculate the crop ratio for generating the image shown in Preview page // Check https://snapchat.quip.com/lU3kAoDxaAFG for our design. const CGFloat kSCIPhoneXCapturedImageVideoCropRatio = (397.0 * 739.0) / (375.0 * 812.0); CGFloat SCManagedCapturedImageAndVideoAspectRatio(void) { static dispatch_once_t onceToken; static CGFloat aspectRatio; dispatch_once(&onceToken, ^{ CGSize screenSize = [UIScreen mainScreen].fixedCoordinateSpace.bounds.size; UIEdgeInsets safeAreaInsets = [UIScreen sc_safeAreaInsets]; aspectRatio = SCSizeGetAspectRatio( CGSizeMake(screenSize.width, screenSize.height - safeAreaInsets.top - safeAreaInsets.bottom)); }); return aspectRatio; } CGSize SCManagedCapturerAllScreenSize(void) { static CGSize size; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ CGSize screenSize = [UIScreen mainScreen].fixedCoordinateSpace.bounds.size; // This logic is complicated because we need to handle iPhone X properly. // See https://snapchat.quip.com/lU3kAoDxaAFG for our design. UIEdgeInsets safeAreaInsets = [UIScreen sc_safeAreaInsets]; UIEdgeInsets visualSafeInsets = [UIScreen sc_visualSafeInsets]; // This really is just some coordinate computations: // We know in preview, our size is (screenWidth, screenHeight - topInset - bottomInset) // We know that when the preview image is in the camera screen, the height is screenHeight - visualTopInset, // thus, we need to figure out in camera screen, what's the bleed-over width should be // (screenWidth * (screenHeight - visualTopInset) / (screenHeight - topInset - bottomInset) size = CGSizeMake(roundf(screenSize.width * (screenSize.height - visualSafeInsets.top) / (screenSize.height - safeAreaInsets.top - safeAreaInsets.bottom)), screenSize.height); }); return size; } CGSize SCAsyncImageCapturePlaceholderViewSize(void) { static CGSize size; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ CGSize screenSize = [UIScreen mainScreen].fixedCoordinateSpace.bounds.size; UIEdgeInsets safeAreaInsets = [UIScreen sc_safeAreaInsets]; UIEdgeInsets visualSafeInsets = [UIScreen sc_visualSafeInsets]; size = CGSizeMake(roundf((screenSize.height - visualSafeInsets.top) * screenSize.width / (screenSize.height - safeAreaInsets.top - safeAreaInsets.bottom)), screenSize.height - visualSafeInsets.top); }); return size; } CGFloat SCAdjustedAspectRatio(UIImageOrientation orientation, CGFloat aspectRatio) { SCCAssert(aspectRatio != kSCManagedCapturerAspectRatioUnspecified, @""); switch (orientation) { case UIImageOrientationLeft: case UIImageOrientationRight: case UIImageOrientationLeftMirrored: case UIImageOrientationRightMirrored: return 1.0 / aspectRatio; default: return aspectRatio; } } UIImage *SCCropImageToTargetAspectRatio(UIImage *image, CGFloat targetAspectRatio) { if (SCNeedsCropImageToAspectRatio(image.CGImage, image.imageOrientation, targetAspectRatio)) { CGImageRef croppedImageRef = SCCreateCroppedImageToAspectRatio(image.CGImage, image.imageOrientation, targetAspectRatio); UIImage *croppedImage = [UIImage imageWithCGImage:croppedImageRef scale:image.scale orientation:image.imageOrientation]; CGImageRelease(croppedImageRef); return croppedImage; } else { return image; } } void SCCropImageSizeToAspectRatio(size_t inputWidth, size_t inputHeight, UIImageOrientation orientation, CGFloat aspectRatio, size_t *outputWidth, size_t *outputHeight) { SCCAssert(outputWidth != NULL && outputHeight != NULL, @""); aspectRatio = SCAdjustedAspectRatio(orientation, aspectRatio); if (inputWidth > roundf(inputHeight * aspectRatio)) { *outputHeight = inputHeight; *outputWidth = roundf(*outputHeight * aspectRatio); } else { *outputWidth = inputWidth; *outputHeight = roundf(*outputWidth / aspectRatio); } } BOOL SCNeedsCropImageToAspectRatio(CGImageRef image, UIImageOrientation orientation, CGFloat aspectRatio) { if (aspectRatio == kSCManagedCapturerAspectRatioUnspecified) { return NO; } aspectRatio = SCAdjustedAspectRatio(orientation, aspectRatio); size_t width = CGImageGetWidth(image); size_t height = CGImageGetHeight(image); return (width != roundf(height * aspectRatio)); } CGRect SCCalculateRectToCrop(size_t imageWidth, size_t imageHeight, size_t croppedWidth, size_t croppedHeight) { if ([SCDeviceName isIphoneX]) { // X is pushed all the way over to crop out top section but none of bottom CGFloat x = (imageWidth - croppedWidth); // Crop y symmetrically. CGFloat y = roundf((imageHeight - croppedHeight) / 2.0); return CGRectMake(x, y, croppedWidth, croppedHeight); } return CGRectMake((imageWidth - croppedWidth) / 2, (imageHeight - croppedHeight) / 2, croppedWidth, croppedHeight); } CGImageRef SCCreateCroppedImageToAspectRatio(CGImageRef image, UIImageOrientation orientation, CGFloat aspectRatio) { SCCAssert(aspectRatio != kSCManagedCapturerAspectRatioUnspecified, @""); size_t width = CGImageGetWidth(image); size_t height = CGImageGetHeight(image); size_t croppedWidth, croppedHeight; if ([SCDeviceName isIphoneX]) { size_t adjustedWidth = (size_t)(width * kSCIPhoneXCapturedImageVideoCropRatio); size_t adjustedHeight = (size_t)(height * kSCIPhoneXCapturedImageVideoCropRatio); SCCropImageSizeToAspectRatio(adjustedWidth, adjustedHeight, orientation, aspectRatio, &croppedWidth, &croppedHeight); } else { SCCropImageSizeToAspectRatio(width, height, orientation, aspectRatio, &croppedWidth, &croppedHeight); } CGRect cropRect = SCCalculateRectToCrop(width, height, croppedWidth, croppedHeight); return CGImageCreateWithImageInRect(image, cropRect); } ================================================ FILE: ManagedCapturer/SCManagedCapturerV1.h ================================================ // // SCManagedCapturer.h // Snapchat // // Created by Liu Liu on 4/20/15. // Copyright (c) 2015 Liu Liu. All rights reserved. // #import "SCCaptureCommon.h" #import "SCCapturer.h" #import #import #import /** * Manage AVCaptureSession with SCManagedCapturerV1 * * In phantom, there are a lot of places we use AVCaptureSession. However, since for each app, only one session * can run at the same time, we need some kind of management for the capture session. * * SCManagedCapturerV1 manages the state of capture session in following ways: * * All operations in SCManagedCapturerV1 are handled on a serial queue, to ensure its sequence. All callbacks (either * on the listener or the completion handler) are on the main thread. The state of SCManagedCapturerV1 are conveniently * maintained in a SCManagedCapturerState object, which is immutable and can be passed across threads, it mains a * consistent view of the capture session, if it is not delayed (thus, the state may deliver as current active device * is back camera on main thread, but in reality, on the serial queue, the active device switched to the front camera * already. However, this is OK because state.devicePosition will be back camera and with all its setup at that time. * Note that it is impossible to have an on-time view of the state across threads without blocking each other). * * For main use cases, you setup the capturer, add the preview layer, and then can call capture still image * or record video, and SCManagedCapturerV1 will do the rest (make sure it actually captures image / video, recover * from error, or setup our more advanced image / video post-process). * * The key classes that drive the recording flow are SCManagedVideoStreamer and SCManagedVideoFileStreamer which * conform to SCManagedVideoDataSource. They will stream images to consumers conforming to * SCManagedVideoDataSourceListener * such as SCManagedLensesProcessor, SCManagedDeviceCapacityAnalyzer, SCManagedVideoScanner and ultimately * SCManagedVideoCapturer and SCManagedStillImageCapturer which record the final output. * */ @class SCCaptureResource; extern NSString *const kSCLensesTweaksDidChangeFileInput; @interface SCManagedCapturerV1 : NSObject + (SCManagedCapturerV1 *)sharedInstance; /* The following APIs are reserved to be only used for SCCaptureCore aka managedCapturerV2. */ - (instancetype)initWithResource:(SCCaptureResource *)resource; @end ================================================ FILE: ManagedCapturer/SCManagedCapturerV1.m ================================================ // // SCManagedCapturer.m // Snapchat // // Created by Liu Liu on 4/20/15. // Copyright (c) 2015 Liu Liu. All rights reserved. // #import "SCManagedCapturerV1.h" #import "SCManagedCapturerV1_Private.h" #import "ARConfiguration+SCConfiguration.h" #import "NSURL+Asset.h" #import "SCBlackCameraDetector.h" #import "SCBlackCameraNoOutputDetector.h" #import "SCCameraTweaks.h" #import "SCCaptureResource.h" #import "SCCaptureSessionFixer.h" #import "SCCaptureUninitializedState.h" #import "SCCaptureWorker.h" #import "SCCapturerToken.h" #import "SCManagedAudioStreamer.h" #import "SCManagedCaptureDevice+SCManagedCapturer.h" #import "SCManagedCaptureDeviceDefaultZoomHandler.h" #import "SCManagedCaptureDeviceHandler.h" #import "SCManagedCaptureDeviceSubjectAreaHandler.h" #import "SCManagedCapturePreviewLayerController.h" #import "SCManagedCaptureSession.h" #import "SCManagedCapturerARImageCaptureProvider.h" #import "SCManagedCapturerGLViewManagerAPI.h" #import "SCManagedCapturerLSAComponentTrackerAPI.h" #import "SCManagedCapturerLensAPI.h" #import "SCManagedCapturerListenerAnnouncer.h" #import "SCManagedCapturerLogging.h" #import "SCManagedCapturerSampleMetadata.h" #import "SCManagedCapturerState.h" #import "SCManagedCapturerStateBuilder.h" #import "SCManagedDeviceCapacityAnalyzer.h" #import "SCManagedDroppedFramesReporter.h" #import "SCManagedFrameHealthChecker.h" #import "SCManagedFrontFlashController.h" #import "SCManagedStillImageCapturer.h" #import "SCManagedStillImageCapturerHandler.h" #import "SCManagedVideoARDataSource.h" #import "SCManagedVideoCapturer.h" #import "SCManagedVideoFileStreamer.h" #import "SCManagedVideoFrameSampler.h" #import "SCManagedVideoScanner.h" #import "SCManagedVideoStreamReporter.h" #import "SCManagedVideoStreamer.h" #import "SCMetalUtils.h" #import "SCProcessingPipeline.h" #import "SCProcessingPipelineBuilder.h" #import "SCScanConfiguration.h" #import "SCSingleFrameStreamCapturer.h" #import "SCSnapCreationTriggers.h" #import "SCTimedTask.h" #import #import #import #import #import #import #import #import #import #import #import #import #import #import #import #import #import #import #import #import @import ARKit; static NSUInteger const kSCManagedCapturerFixInconsistencyMaxRetriesWithCurrentSession = 22; static CGFloat const kSCManagedCapturerFixInconsistencyARSessionDelayThreshold = 2; static CGFloat const kSCManagedCapturerFixInconsistencyARSessionHungInitThreshold = 5; static NSTimeInterval const kMinFixAVSessionRunningInterval = 1; // Interval to run _fixAVSessionIfNecessary static NSTimeInterval const kMinFixSessionRuntimeErrorInterval = 1; // Min interval that RuntimeError calls _startNewSession static NSString *const kSCManagedCapturerErrorDomain = @"kSCManagedCapturerErrorDomain"; NSString *const kSCLensesTweaksDidChangeFileInput = @"kSCLensesTweaksDidChangeFileInput"; @implementation SCManagedCapturerV1 { // No ivars for CapturerV1 please, they should be in resource. SCCaptureResource *_captureResource; } + (SCManagedCapturerV1 *)sharedInstance { static dispatch_once_t onceToken; static SCManagedCapturerV1 *managedCapturerV1; dispatch_once(&onceToken, ^{ managedCapturerV1 = [[SCManagedCapturerV1 alloc] init]; }); return managedCapturerV1; } - (instancetype)init { SCTraceStart(); SCAssertMainThread(); SCCaptureResource *resource = [SCCaptureWorker generateCaptureResource]; return [self initWithResource:resource]; } - (instancetype)initWithResource:(SCCaptureResource *)resource { SCTraceODPCompatibleStart(2); SCAssertMainThread(); self = [super init]; if (self) { // Assuming I am not in background. I can be more defensive here and fetch the app state. // But to avoid potential problems, won't do that until later. SCLogCapturerInfo(@"======================= cool startup ======================="); // Initialization of capture resource should be done in worker to be shared between V1 and V2. _captureResource = resource; _captureResource.handleAVSessionStatusChange = @selector(_handleAVSessionStatusChange:); _captureResource.sessionRuntimeError = @selector(_sessionRuntimeError:); _captureResource.livenessConsistency = @selector(_livenessConsistency:); _captureResource.deviceSubjectAreaHandler = [[SCManagedCaptureDeviceSubjectAreaHandler alloc] initWithCaptureResource:_captureResource]; _captureResource.snapCreationTriggers = [SCSnapCreationTriggers new]; if (SCIsMasterBuild()) { // We call _sessionRuntimeError to reset _captureResource.videoDataSource if input changes [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(_sessionRuntimeError:) name:kSCLensesTweaksDidChangeFileInput object:nil]; } } return self; } - (SCBlackCameraDetector *)blackCameraDetector { return _captureResource.blackCameraDetector; } - (void)recreateAVCaptureSession { SCTraceODPCompatibleStart(2); [self _startRunningWithNewCaptureSessionIfNecessary]; } - (void)_handleAVSessionStatusChange:(NSDictionary *)change { SCTraceODPCompatibleStart(2); SC_GUARD_ELSE_RETURN(!_captureResource.state.arSessionActive); SC_GUARD_ELSE_RETURN(!_captureResource.appInBackground); BOOL wasRunning = [change[NSKeyValueChangeOldKey] boolValue]; BOOL isRunning = [change[NSKeyValueChangeNewKey] boolValue]; SCLogCapturerInfo(@"avSession running status changed: %@ -> %@", wasRunning ? @"running" : @"stopped", isRunning ? @"running" : @"stopped"); [_captureResource.blackCameraDetector sessionDidChangeIsRunning:isRunning]; if (_captureResource.isRecreateSessionFixScheduled) { SCLogCapturerInfo(@"Scheduled AVCaptureSession recreation, return"); return; } if (wasRunning != isRunning) { runOnMainThreadAsynchronously(^{ if (isRunning) { [_captureResource.announcer managedCapturer:self didStartRunning:_captureResource.state]; } else { [_captureResource.announcer managedCapturer:self didStopRunning:_captureResource.state]; } }); } if (!isRunning) { [_captureResource.queuePerformer perform:^{ [self _fixAVSessionIfNecessary]; }]; } else { if (!SCDeviceSupportsMetal()) { [self _fixNonMetalSessionPreviewInconsistency]; } } } - (void)_fixAVSessionIfNecessary { SCTraceODPCompatibleStart(2); SCAssert([_captureResource.queuePerformer isCurrentPerformer], @""); SC_GUARD_ELSE_RETURN(!_captureResource.appInBackground); SC_GUARD_ELSE_RETURN(_captureResource.status == SCManagedCapturerStatusRunning); [[SCLogger sharedInstance] logStepToEvent:kSCCameraFixAVCaptureSession uniqueId:@"" stepName:@"startConsistencyCheckAndFix"]; NSTimeInterval timeNow = [NSDate timeIntervalSinceReferenceDate]; if (timeNow - _captureResource.lastFixSessionTimestamp < kMinFixAVSessionRunningInterval) { SCLogCoreCameraInfo(@"Fixing session in less than %f, skip", kMinFixAVSessionRunningInterval); return; } _captureResource.lastFixSessionTimestamp = timeNow; if (!_captureResource.managedSession.isRunning) { SCTraceStartSection("Fix AVSession") { _captureResource.numRetriesFixAVCaptureSessionWithCurrentSession++; SCGhostToSnappableSignalCameraFixInconsistency(); if (_captureResource.numRetriesFixAVCaptureSessionWithCurrentSession <= kSCManagedCapturerFixInconsistencyARSessionDelayThreshold) { SCLogCapturerInfo(@"Fixing AVSession"); [_captureResource.managedSession startRunning]; SCLogCapturerInfo(@"Fixed AVSession, success : %@", @(_captureResource.managedSession.isRunning)); [[SCLogger sharedInstance] logStepToEvent:kSCCameraFixAVCaptureSession uniqueId:@"" stepName:@"finishCaptureSessionFix"]; } else { // start running with new capture session if the inconsistency fixing not succeeds SCLogCapturerInfo(@"*** Recreate and run new capture session to fix the inconsistency ***"); [self _startRunningWithNewCaptureSessionIfNecessary]; [[SCLogger sharedInstance] logStepToEvent:kSCCameraFixAVCaptureSession uniqueId:@"" stepName:@"finishNewCaptureSessionCreation"]; } } SCTraceEndSection(); [[SCLogger sharedInstance] logTimedEventEnd:kSCCameraFixAVCaptureSession uniqueId:@"" parameters:@{ @"success" : @(_captureResource.managedSession.isRunning), @"count" : @(_captureResource.numRetriesFixAVCaptureSessionWithCurrentSession) }]; } else { _captureResource.numRetriesFixAVCaptureSessionWithCurrentSession = 0; [[SCLogger sharedInstance] cancelLogTimedEvent:kSCCameraFixAVCaptureSession uniqueId:@""]; } if (_captureResource.managedSession.isRunning) { // If it is fixed, we signal received the first frame. SCGhostToSnappableSignalDidReceiveFirstPreviewFrame(); // For non-metal preview render, we need to make sure preview is not hidden if (!SCDeviceSupportsMetal()) { [self _fixNonMetalSessionPreviewInconsistency]; } runOnMainThreadAsynchronously(^{ [_captureResource.announcer managedCapturer:self didStartRunning:_captureResource.state]; // To approximate this did render timer, it is not accurate. SCGhostToSnappableSignalDidRenderFirstPreviewFrame(CACurrentMediaTime()); }); } else { [_captureResource.queuePerformer perform:^{ [self _fixAVSessionIfNecessary]; } after:1]; } [_captureResource.blackCameraDetector sessionDidChangeIsRunning:_captureResource.managedSession.isRunning]; } - (void)_fixNonMetalSessionPreviewInconsistency { SCTraceODPCompatibleStart(2); SC_GUARD_ELSE_RETURN(_captureResource.status == SCManagedCapturerStatusRunning); if ((!_captureResource.videoPreviewLayer.hidden) != _captureResource.managedSession.isRunning) { SCTraceStartSection("Fix non-Metal VideoPreviewLayer"); { [CATransaction begin]; [CATransaction setDisableActions:YES]; [SCCaptureWorker setupVideoPreviewLayer:_captureResource]; [CATransaction commit]; } SCTraceEndSection(); } } - (SCCaptureResource *)captureResource { SCTraceODPCompatibleStart(2); return _captureResource; } - (id)lensProcessingCore { SCTraceODPCompatibleStart(2); @weakify(self); return (id)[[SCLazyLoadingProxy alloc] initWithInitializationBlock:^id { @strongify(self); SCReportErrorIf(self.captureResource.state.lensProcessorReady, @"[Lenses] Lens processing core is not ready"); return self.captureResource.lensProcessingCore; }]; } - (SCVideoCaptureSessionInfo)activeSession { SCTraceODPCompatibleStart(2); return [SCCaptureWorker activeSession:_captureResource]; } - (BOOL)isLensApplied { SCTraceODPCompatibleStart(2); return [SCCaptureWorker isLensApplied:_captureResource]; } - (BOOL)isVideoMirrored { SCTraceODPCompatibleStart(2); return [SCCaptureWorker isVideoMirrored:_captureResource]; } #pragma mark - Setup, Start & Stop - (void)_updateHRSIEnabled { SCTraceODPCompatibleStart(2); // Since night mode is low-res, we set high resolution still image output when night mode is enabled // SoftwareZoom requires higher resolution image to get better zooming result too. // We also want a higher resolution on newer devices BOOL is1080pSupported = [SCManagedCaptureDevice is1080pSupported]; BOOL shouldHRSIEnabled = (_captureResource.device.isNightModeActive || _captureResource.device.softwareZoom || is1080pSupported); SCLogCapturerInfo(@"Setting HRSIEnabled to: %d. isNightModeActive:%d softwareZoom:%d is1080pSupported:%d", shouldHRSIEnabled, _captureResource.device.isNightModeActive, _captureResource.device.softwareZoom, is1080pSupported); [_captureResource.stillImageCapturer setHighResolutionStillImageOutputEnabled:shouldHRSIEnabled]; } - (void)_updateStillImageStabilizationEnabled { SCTraceODPCompatibleStart(2); SCLogCapturerInfo(@"Enabling still image stabilization"); [_captureResource.stillImageCapturer enableStillImageStabilization]; } - (void)setupWithDevicePositionAsynchronously:(SCManagedCaptureDevicePosition)devicePosition completionHandler:(dispatch_block_t)completionHandler context:(NSString *)context { SCTraceODPCompatibleStart(2); SCLogCapturerInfo(@"Setting up with devicePosition:%lu", (unsigned long)devicePosition); SCTraceResumeToken token = SCTraceCapture(); [[SCManagedCapturePreviewLayerController sharedInstance] setupPreviewLayer]; [_captureResource.queuePerformer perform:^{ SCTraceResume(token); [self setupWithDevicePosition:devicePosition completionHandler:completionHandler]; }]; } - (void)setupWithDevicePosition:(SCManagedCaptureDevicePosition)devicePosition completionHandler:(dispatch_block_t)completionHandler { SCTraceODPCompatibleStart(2); SCAssertPerformer(_captureResource.queuePerformer); [SCCaptureWorker setupWithCaptureResource:_captureResource devicePosition:devicePosition]; [self addListener:_captureResource.stillImageCapturer]; [self addListener:_captureResource.blackCameraDetector.blackCameraNoOutputDetector]; [self addListener:_captureResource.lensProcessingCore]; [self _updateHRSIEnabled]; [self _updateStillImageStabilizationEnabled]; [SCCaptureWorker updateLensesFieldOfViewTracking:_captureResource]; if (!SCDeviceSupportsMetal()) { [SCCaptureWorker makeVideoPreviewLayer:_captureResource]; } // I need to do this setup now. Thus, it is off the main thread. This also means my preview layer controller is // entangled with the capturer. [[SCManagedCapturePreviewLayerController sharedInstance] setupRenderPipeline]; [[SCManagedCapturePreviewLayerController sharedInstance] setManagedCapturer:self]; _captureResource.status = SCManagedCapturerStatusReady; SCManagedCapturerState *state = [_captureResource.state copy]; AVCaptureVideoPreviewLayer *videoPreviewLayer = _captureResource.videoPreviewLayer; runOnMainThreadAsynchronously(^{ SCLogCapturerInfo(@"Did setup with devicePosition:%lu", (unsigned long)devicePosition); [_captureResource.announcer managedCapturer:self didChangeState:state]; [_captureResource.announcer managedCapturer:self didChangeCaptureDevicePosition:state]; if (!SCDeviceSupportsMetal()) { [_captureResource.announcer managedCapturer:self didChangeVideoPreviewLayer:videoPreviewLayer]; } if (completionHandler) { completionHandler(); } }); } - (void)addSampleBufferDisplayController:(id)sampleBufferDisplayController context:(NSString *)context { SCTraceODPCompatibleStart(2); [_captureResource.queuePerformer perform:^{ _captureResource.sampleBufferDisplayController = sampleBufferDisplayController; [_captureResource.videoDataSource addSampleBufferDisplayController:sampleBufferDisplayController]; }]; } - (SCCapturerToken *)startRunningAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler context:(NSString *)context { SCTraceODPCompatibleStart(2); SCTraceResumeToken resumeToken = SCTraceCapture(); [[SCLogger sharedInstance] updateLogTimedEventStart:kSCCameraMetricsOpen uniqueId:@""]; SCCapturerToken *token = [[SCCapturerToken alloc] initWithIdentifier:context]; SCLogCapturerInfo(@"startRunningAsynchronouslyWithCompletionHandler called. token: %@", token); [_captureResource.queuePerformer perform:^{ SCTraceResume(resumeToken); [SCCaptureWorker startRunningWithCaptureResource:_captureResource token:token completionHandler:completionHandler]; // After startRunning, we need to make sure _fixAVSessionIfNecessary start running. // The problem: with the new KVO fix strategy, it may happen that AVCaptureSession is in stopped state, thus no // KVO callback is triggered. // And calling startRunningAsynchronouslyWithCompletionHandler has no effect because SCManagedCapturerStatus is // in SCManagedCapturerStatusRunning state [self _fixAVSessionIfNecessary]; }]; return token; } - (BOOL)stopRunningWithCaptureToken:(SCCapturerToken *)token completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler context:(NSString *)context { SCTraceODPCompatibleStart(2); SCAssertPerformer(_captureResource.queuePerformer); SCLogCapturerInfo(@"Stop running. token:%@ context:%@", token, context); return [SCCaptureWorker stopRunningWithCaptureResource:_captureResource token:token completionHandler:completionHandler]; } - (void)stopRunningAsynchronously:(SCCapturerToken *)token completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler context:(NSString *)context { SCTraceODPCompatibleStart(2); SCLogCapturerInfo(@"Stop running asynchronously. token:%@ context:%@", token, context); SCTraceResumeToken resumeToken = SCTraceCapture(); [_captureResource.queuePerformer perform:^{ SCTraceResume(resumeToken); [SCCaptureWorker stopRunningWithCaptureResource:_captureResource token:token completionHandler:completionHandler]; }]; } - (void)stopRunningAsynchronously:(SCCapturerToken *)token completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler after:(NSTimeInterval)delay context:(NSString *)context { SCTraceODPCompatibleStart(2); SCLogCapturerInfo(@"Stop running asynchronously. token:%@ delay:%f", token, delay); NSTimeInterval startTime = CACurrentMediaTime(); [_captureResource.queuePerformer perform:^{ NSTimeInterval elapsedTime = CACurrentMediaTime() - startTime; [_captureResource.queuePerformer perform:^{ SCTraceStart(); // If we haven't started a new running sequence yet, stop running now [SCCaptureWorker stopRunningWithCaptureResource:_captureResource token:token completionHandler:completionHandler]; } after:MAX(delay - elapsedTime, 0)]; }]; } - (void)startStreamingAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler context:(NSString *)context { SCTraceODPCompatibleStart(2); SCLogCapturerInfo(@"Start streaming asynchronously"); [_captureResource.queuePerformer perform:^{ SCTraceStart(); [SCCaptureWorker startStreaming:_captureResource]; if (completionHandler) { runOnMainThreadAsynchronously(completionHandler); } }]; } #pragma mark - Recording / Capture - (void)captureStillImageAsynchronouslyWithAspectRatio:(CGFloat)aspectRatio captureSessionID:(NSString *)captureSessionID completionHandler: (sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler context:(NSString *)context { SCTraceODPCompatibleStart(2); [_captureResource.queuePerformer perform:^{ [SCCaptureWorker captureStillImageWithCaptureResource:_captureResource aspectRatio:aspectRatio captureSessionID:captureSessionID shouldCaptureFromVideo:[self _shouldCaptureImageFromVideo] completionHandler:completionHandler context:context]; }]; } - (void)captureSingleVideoFrameAsynchronouslyWithCompletionHandler: (sc_managed_capturer_capture_video_frame_completion_handler_t)completionHandler context:(NSString *)context { SCTraceODPCompatibleStart(2); [_captureResource.queuePerformer perform:^{ SCTraceStart(); SCLogCapturerInfo(@"Start capturing single video frame"); _captureResource.frameCap = [[SCSingleFrameStreamCapturer alloc] initWithCompletion:^void(UIImage *image) { [_captureResource.queuePerformer perform:^{ [_captureResource.videoDataSource removeListener:_captureResource.frameCap]; _captureResource.frameCap = nil; }]; runOnMainThreadAsynchronously(^{ [_captureResource.device setTorchActive:NO]; SCLogCapturerInfo(@"End capturing single video frame"); completionHandler(image); }); }]; BOOL waitForTorch = NO; if (!_captureResource.state.torchActive) { if (_captureResource.state.flashActive) { waitForTorch = YES; [_captureResource.device setTorchActive:YES]; } } [_captureResource.queuePerformer perform:^{ [_captureResource.videoDataSource addListener:_captureResource.frameCap]; [SCCaptureWorker startStreaming:_captureResource]; } after:(waitForTorch ? 0.5 : 0)]; }]; } - (void)prepareForRecordingAsynchronouslyWithContext:(NSString *)context audioConfiguration:(SCAudioConfiguration *)configuration { SCTraceODPCompatibleStart(2); [_captureResource.queuePerformer perform:^{ SCLogCapturerInfo(@"prepare for recording"); [_captureResource.videoCapturer prepareForRecordingWithAudioConfiguration:configuration]; }]; } - (void)startRecordingAsynchronouslyWithOutputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings audioConfiguration:(SCAudioConfiguration *)configuration maxDuration:(NSTimeInterval)maxDuration fileURL:(NSURL *)fileURL captureSessionID:(NSString *)captureSessionID completionHandler: (sc_managed_capturer_start_recording_completion_handler_t)completionHandler context:(NSString *)context { SCTraceODPCompatibleStart(2); [_captureResource.queuePerformer perform:^{ [SCCaptureWorker startRecordingWithCaptureResource:_captureResource outputSettings:outputSettings audioConfiguration:configuration maxDuration:maxDuration fileURL:fileURL captureSessionID:captureSessionID completionHandler:completionHandler]; }]; } - (void)stopRecordingAsynchronouslyWithContext:(NSString *)context { SCTraceODPCompatibleStart(2); [_captureResource.queuePerformer perform:^{ [SCCaptureWorker stopRecordingWithCaptureResource:_captureResource]; }]; } - (void)cancelRecordingAsynchronouslyWithContext:(NSString *)context { SCTraceODPCompatibleStart(2); [_captureResource.queuePerformer perform:^{ [SCCaptureWorker cancelRecordingWithCaptureResource:_captureResource]; }]; } - (void)startScanAsynchronouslyWithScanConfiguration:(SCScanConfiguration *)configuration context:(NSString *)context { SCTraceODPCompatibleStart(2); [_captureResource.queuePerformer perform:^{ SCTraceStart(); [SCCaptureWorker startScanWithScanConfiguration:configuration resource:_captureResource]; }]; } - (void)stopScanAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler context:(NSString *)context { SCTraceODPCompatibleStart(2); [_captureResource.queuePerformer perform:^{ SCTraceStart(); [SCCaptureWorker stopScanWithCompletionHandler:completionHandler resource:_captureResource]; }]; } - (void)sampleFrameWithCompletionHandler:(void (^)(UIImage *frame, CMTime presentationTime))completionHandler context:(NSString *)context { SCTraceODPCompatibleStart(2); // Previously _captureResource.videoFrameSampler was conditionally created when setting up, but if this method is // called it is a // safe assumption the client wants it to run instead of failing silently, so always create // _captureResource.videoFrameSampler if (!_captureResource.videoFrameSampler) { _captureResource.videoFrameSampler = [SCManagedVideoFrameSampler new]; [_captureResource.announcer addListener:_captureResource.videoFrameSampler]; } SCLogCapturerInfo(@"Sampling next frame"); [_captureResource.videoFrameSampler sampleNextFrame:completionHandler]; } - (void)addTimedTask:(SCTimedTask *)task context:(NSString *)context { SCTraceODPCompatibleStart(2); SCLogCapturerInfo(@"Adding timed task:%@", task); [_captureResource.queuePerformer perform:^{ [_captureResource.videoCapturer addTimedTask:task]; }]; } - (void)clearTimedTasksWithContext:(NSString *)context { SCTraceODPCompatibleStart(2); [_captureResource.queuePerformer perform:^{ [_captureResource.videoCapturer clearTimedTasks]; }]; } #pragma mark - Utilities - (void)convertViewCoordinates:(CGPoint)viewCoordinates completionHandler:(sc_managed_capturer_convert_view_coordniates_completion_handler_t)completionHandler context:(NSString *)context { SCTraceODPCompatibleStart(2); SCAssert(completionHandler, @"completionHandler shouldn't be nil"); [_captureResource.queuePerformer perform:^{ SCTraceStart(); if (SCDeviceSupportsMetal()) { CGSize viewSize = [UIScreen mainScreen].fixedCoordinateSpace.bounds.size; CGPoint pointOfInterest = [_captureResource.device convertViewCoordinates:viewCoordinates viewSize:viewSize videoGravity:AVLayerVideoGravityResizeAspectFill]; runOnMainThreadAsynchronously(^{ completionHandler(pointOfInterest); }); } else { CGSize viewSize = _captureResource.videoPreviewLayer.bounds.size; CGPoint pointOfInterest = [_captureResource.device convertViewCoordinates:viewCoordinates viewSize:viewSize videoGravity:_captureResource.videoPreviewLayer.videoGravity]; runOnMainThreadAsynchronously(^{ completionHandler(pointOfInterest); }); } }]; } - (void)detectLensCategoryOnNextFrame:(CGPoint)point lenses:(NSArray *)lenses completion:(sc_managed_lenses_processor_category_point_completion_handler_t)completion context:(NSString *)context { SCTraceODPCompatibleStart(2); SCAssert(completion, @"completionHandler shouldn't be nil"); SCAssertMainThread(); [_captureResource.queuePerformer perform:^{ SCTraceStart(); SCLogCapturerInfo(@"Detecting lens category on next frame. point:%@, lenses:%@", NSStringFromCGPoint(point), [lenses valueForKey:NSStringFromSelector(@selector(lensId))]); [_captureResource.lensProcessingCore detectLensCategoryOnNextFrame:point videoOrientation:_captureResource.videoDataSource.videoOrientation lenses:lenses completion:^(SCLensCategory *_Nullable category, NSInteger categoriesCount) { runOnMainThreadAsynchronously(^{ if (completion) { completion(category, categoriesCount); } }); }]; }]; } #pragma mark - Configurations - (void)setDevicePositionAsynchronously:(SCManagedCaptureDevicePosition)devicePosition completionHandler:(dispatch_block_t)completionHandler context:(NSString *)context { SCTraceODPCompatibleStart(2); SCLogCapturerInfo(@"Setting device position asynchronously to: %lu", (unsigned long)devicePosition); [_captureResource.queuePerformer perform:^{ SCTraceStart(); BOOL devicePositionChanged = NO; BOOL nightModeChanged = NO; BOOL portraitModeChanged = NO; BOOL zoomFactorChanged = NO; BOOL flashSupportedOrTorchSupportedChanged = NO; SCManagedCapturerState *state = [_captureResource.state copy]; if (_captureResource.state.devicePosition != devicePosition) { SCManagedCaptureDevice *device = [SCManagedCaptureDevice deviceWithPosition:devicePosition]; if (device) { if (!device.delegate) { device.delegate = _captureResource.captureDeviceHandler; } SCManagedCaptureDevice *prevDevice = _captureResource.device; [SCCaptureWorker turnARSessionOff:_captureResource]; BOOL isStreaming = _captureResource.videoDataSource.isStreaming; if (!SCDeviceSupportsMetal()) { if (isStreaming) { [_captureResource.videoDataSource stopStreaming]; } } SCLogCapturerInfo(@"Set device position beginConfiguration"); [_captureResource.videoDataSource beginConfiguration]; [_captureResource.managedSession beginConfiguration]; // Turn off flash for the current device in case it is active [_captureResource.device setTorchActive:NO]; if (_captureResource.state.devicePosition == SCManagedCaptureDevicePositionFront) { _captureResource.frontFlashController.torchActive = NO; } [_captureResource.deviceCapacityAnalyzer removeFocusListener]; [_captureResource.device removeDeviceAsInput:_captureResource.managedSession.avSession]; _captureResource.device = device; BOOL deviceSet = [_captureResource.device setDeviceAsInput:_captureResource.managedSession.avSession]; // If we are toggling while recording, set the night mode back to not // active if (_captureResource.videoRecording) { [self _setNightModeActive:NO]; } // Sync night mode, torch and flash state with the current device devicePositionChanged = (_captureResource.state.devicePosition != devicePosition); nightModeChanged = (_captureResource.state.isNightModeActive != _captureResource.device.isNightModeActive); portraitModeChanged = devicePositionChanged && (devicePosition == SCManagedCaptureDevicePositionBackDualCamera || _captureResource.state.devicePosition == SCManagedCaptureDevicePositionBackDualCamera); zoomFactorChanged = (_captureResource.state.zoomFactor != _captureResource.device.zoomFactor); if (zoomFactorChanged && _captureResource.device.softwareZoom) { [SCCaptureWorker softwareZoomWithDevice:_captureResource.device resource:_captureResource]; } if (_captureResource.state.flashActive != _captureResource.device.flashActive) { // preserve flashActive across devices _captureResource.device.flashActive = _captureResource.state.flashActive; } if (_captureResource.state.liveVideoStreaming != device.liveVideoStreamingActive) { // preserve liveVideoStreaming state across devices [_captureResource.device setLiveVideoStreaming:_captureResource.state.liveVideoStreaming session:_captureResource.managedSession.avSession]; } if (devicePosition == SCManagedCaptureDevicePositionBackDualCamera && _captureResource.state.isNightModeActive != _captureResource.device.isNightModeActive) { // preserve nightMode when switching from back camera to back dual camera [self _setNightModeActive:_captureResource.state.isNightModeActive]; } flashSupportedOrTorchSupportedChanged = (_captureResource.state.flashSupported != _captureResource.device.isFlashSupported || _captureResource.state.torchSupported != _captureResource.device.isTorchSupported); SCLogCapturerInfo(@"Set device position: %lu -> %lu, night mode: %d -> %d, zoom " @"factor: %f -> %f, flash supported: %d -> %d, torch supported: %d -> %d", (unsigned long)_captureResource.state.devicePosition, (unsigned long)devicePosition, _captureResource.state.isNightModeActive, _captureResource.device.isNightModeActive, _captureResource.state.zoomFactor, _captureResource.device.zoomFactor, _captureResource.state.flashSupported, _captureResource.device.isFlashSupported, _captureResource.state.torchSupported, _captureResource.device.isTorchSupported); _captureResource.state = [[[[[[[[SCManagedCapturerStateBuilder withManagedCapturerState:_captureResource.state] setDevicePosition:devicePosition] setIsNightModeActive:_captureResource.device.isNightModeActive] setZoomFactor:_captureResource.device.zoomFactor] setFlashSupported:_captureResource.device.isFlashSupported] setTorchSupported:_captureResource.device.isTorchSupported] setIsPortraitModeActive:devicePosition == SCManagedCaptureDevicePositionBackDualCamera] build]; [self _updateHRSIEnabled]; [self _updateStillImageStabilizationEnabled]; // This needs to be done after we have finished configure everything // for session otherwise we // may set it up without hooking up the video input yet, and will set // wrong parameter for the // output. [_captureResource.videoDataSource setDevicePosition:devicePosition]; if (@available(ios 11.0, *)) { if (portraitModeChanged) { [_captureResource.videoDataSource setDepthCaptureEnabled:_captureResource.state.isPortraitModeActive]; [_captureResource.device setCaptureDepthData:_captureResource.state.isPortraitModeActive session:_captureResource.managedSession.avSession]; [_captureResource.stillImageCapturer setPortraitModeCaptureEnabled:_captureResource.state.isPortraitModeActive]; if (_captureResource.state.isPortraitModeActive) { SCProcessingPipelineBuilder *processingPipelineBuilder = [[SCProcessingPipelineBuilder alloc] init]; processingPipelineBuilder.portraitModeEnabled = YES; SCProcessingPipeline *pipeline = [processingPipelineBuilder build]; SCLogCapturerInfo(@"Adding processing pipeline:%@", pipeline); [_captureResource.videoDataSource addProcessingPipeline:pipeline]; } else { [_captureResource.videoDataSource removeProcessingPipeline]; } } } [_captureResource.deviceCapacityAnalyzer setAsFocusListenerForDevice:_captureResource.device]; [SCCaptureWorker updateLensesFieldOfViewTracking:_captureResource]; [_captureResource.managedSession commitConfiguration]; [_captureResource.videoDataSource commitConfiguration]; // Checks if the flash is activated and if so switches the flash along // with the camera view. Setting device's torch mode has to be called after -[AVCaptureSession // commitConfiguration], otherwise flash may be not working, especially for iPhone 8/8 Plus. if (_captureResource.state.torchActive || (_captureResource.state.flashActive && _captureResource.videoRecording)) { [_captureResource.device setTorchActive:YES]; if (devicePosition == SCManagedCaptureDevicePositionFront) { _captureResource.frontFlashController.torchActive = YES; } } SCLogCapturerInfo(@"Set device position commitConfiguration"); [_captureResource.droppedFramesReporter didChangeCaptureDevicePosition]; if (!SCDeviceSupportsMetal()) { if (isStreaming) { [SCCaptureWorker startStreaming:_captureResource]; } } NSArray *inputs = _captureResource.managedSession.avSession.inputs; if (!deviceSet) { [self _logFailureSetDevicePositionFrom:_captureResource.state.devicePosition to:devicePosition reason:@"setDeviceForInput failed"]; } else if (inputs.count == 0) { [self _logFailureSetDevicePositionFrom:_captureResource.state.devicePosition to:devicePosition reason:@"no input"]; } else if (inputs.count > 1) { [self _logFailureSetDevicePositionFrom:_captureResource.state.devicePosition to:devicePosition reason:[NSString sc_stringWithFormat:@"multiple inputs: %@", inputs]]; } else { AVCaptureDeviceInput *input = [inputs firstObject]; AVCaptureDevice *resultDevice = input.device; if (resultDevice == prevDevice.device) { [self _logFailureSetDevicePositionFrom:_captureResource.state.devicePosition to:devicePosition reason:@"stayed on previous device"]; } else if (resultDevice != _captureResource.device.device) { [self _logFailureSetDevicePositionFrom:_captureResource.state.devicePosition to:devicePosition reason:[NSString sc_stringWithFormat:@"unknown input device: %@", resultDevice]]; } } } else { [self _logFailureSetDevicePositionFrom:_captureResource.state.devicePosition to:devicePosition reason:@"no device"]; } } else { SCLogCapturerInfo(@"Device position did not change"); if (_captureResource.device.position != _captureResource.state.devicePosition) { [self _logFailureSetDevicePositionFrom:state.devicePosition to:devicePosition reason:@"state position set incorrectly"]; } } BOOL stateChanged = ![_captureResource.state isEqual:state]; state = [_captureResource.state copy]; runOnMainThreadAsynchronously(^{ if (stateChanged) { [_captureResource.announcer managedCapturer:self didChangeState:state]; } if (devicePositionChanged) { [_captureResource.announcer managedCapturer:self didChangeCaptureDevicePosition:state]; } if (nightModeChanged) { [_captureResource.announcer managedCapturer:self didChangeNightModeActive:state]; } if (portraitModeChanged) { [_captureResource.announcer managedCapturer:self didChangePortraitModeActive:state]; } if (zoomFactorChanged) { [_captureResource.announcer managedCapturer:self didChangeZoomFactor:state]; } if (flashSupportedOrTorchSupportedChanged) { [_captureResource.announcer managedCapturer:self didChangeFlashSupportedAndTorchSupported:state]; } if (completionHandler) { completionHandler(); } }); }]; } - (void)_logFailureSetDevicePositionFrom:(SCManagedCaptureDevicePosition)start to:(SCManagedCaptureDevicePosition)end reason:(NSString *)reason { SCTraceODPCompatibleStart(2); SCLogCapturerInfo(@"Device position change failed: %@", reason); [[SCLogger sharedInstance] logEvent:kSCCameraMetricsCameraFlipFailure parameters:@{ @"start" : @(start), @"end" : @(end), @"reason" : reason, }]; } - (void)setFlashActive:(BOOL)flashActive completionHandler:(dispatch_block_t)completionHandler context:(NSString *)context { SCTraceODPCompatibleStart(2); [_captureResource.queuePerformer perform:^{ SCTraceStart(); BOOL flashActiveOrFrontFlashEnabledChanged = NO; if (_captureResource.state.flashActive != flashActive) { [_captureResource.device setFlashActive:flashActive]; SCLogCapturerInfo(@"Set flash active: %d -> %d", _captureResource.state.flashActive, flashActive); _captureResource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:_captureResource.state] setFlashActive:flashActive] build]; flashActiveOrFrontFlashEnabledChanged = YES; } SCManagedCapturerState *state = [_captureResource.state copy]; runOnMainThreadAsynchronously(^{ if (flashActiveOrFrontFlashEnabledChanged) { [_captureResource.announcer managedCapturer:self didChangeState:state]; [_captureResource.announcer managedCapturer:self didChangeFlashActive:state]; } if (completionHandler) { completionHandler(); } }); }]; } - (void)setLensesActive:(BOOL)lensesActive completionHandler:(dispatch_block_t)completionHandler context:(NSString *)context { SCTraceODPCompatibleStart(2); [self _setLensesActive:lensesActive liveVideoStreaming:NO filterFactory:nil completionHandler:completionHandler context:context]; } - (void)setLensesActive:(BOOL)lensesActive filterFactory:(SCLookseryFilterFactory *)filterFactory completionHandler:(dispatch_block_t)completionHandler context:(NSString *)context { [self _setLensesActive:lensesActive liveVideoStreaming:NO filterFactory:filterFactory completionHandler:completionHandler context:context]; } - (void)setLensesInTalkActive:(BOOL)lensesActive completionHandler:(dispatch_block_t)completionHandler context:(NSString *)context { // Talk requires liveVideoStreaming to be turned on BOOL liveVideoStreaming = lensesActive; dispatch_block_t activationBlock = ^{ [self _setLensesActive:lensesActive liveVideoStreaming:liveVideoStreaming filterFactory:nil completionHandler:completionHandler context:context]; }; @weakify(self); [_captureResource.queuePerformer perform:^{ @strongify(self); SC_GUARD_ELSE_RETURN(self); // If lenses are enabled in TV3 and it was enabled not from TV3. We have to turn off lenses off at first. BOOL shouldTurnOffBeforeActivation = liveVideoStreaming && !self->_captureResource.state.liveVideoStreaming && self->_captureResource.state.lensesActive; if (shouldTurnOffBeforeActivation) { [self _setLensesActive:NO liveVideoStreaming:NO filterFactory:nil completionHandler:activationBlock context:context]; } else { activationBlock(); } }]; } - (void)_setLensesActive:(BOOL)lensesActive liveVideoStreaming:(BOOL)liveVideoStreaming filterFactory:(SCLookseryFilterFactory *)filterFactory completionHandler:(dispatch_block_t)completionHandler context:(NSString *)context { SCTraceODPCompatibleStart(2); SCLogCapturerInfo(@"Setting lenses active to: %d", lensesActive); [_captureResource.queuePerformer perform:^{ SCTraceStart(); BOOL lensesActiveChanged = NO; if (_captureResource.state.lensesActive != lensesActive) { SCLogCapturerInfo(@"Set lenses active: %d -> %d", _captureResource.state.lensesActive, lensesActive); _captureResource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:_captureResource.state] setLensesActive:lensesActive] build]; // Update capturer settings(orientation and resolution) after changing state, because // _setLiveVideoStreaming logic is depends on it [self _setLiveVideoStreaming:liveVideoStreaming]; [SCCaptureWorker turnARSessionOff:_captureResource]; // Only enable sample buffer display when lenses is not active. [_captureResource.videoDataSource setSampleBufferDisplayEnabled:!lensesActive]; [_captureResource.debugInfoDict setObject:!lensesActive ? @"True" : @"False" forKey:@"sampleBufferDisplayEnabled"]; lensesActiveChanged = YES; [_captureResource.lensProcessingCore setAspectRatio:_captureResource.state.liveVideoStreaming]; [_captureResource.lensProcessingCore setLensesActive:_captureResource.state.lensesActive videoOrientation:_captureResource.videoDataSource.videoOrientation filterFactory:filterFactory]; BOOL modifySource = _captureResource.state.liveVideoStreaming || _captureResource.videoRecording; [_captureResource.lensProcessingCore setModifySource:modifySource]; [_captureResource.lensProcessingCore setShouldMuteAllSounds:_captureResource.state.liveVideoStreaming]; if (_captureResource.fileInputDecider.shouldProcessFileInput) { [_captureResource.lensProcessingCore setLensesActive:YES videoOrientation:_captureResource.videoDataSource.videoOrientation filterFactory:filterFactory]; } [_captureResource.videoDataSource setVideoStabilizationEnabledIfSupported:!_captureResource.state.lensesActive]; if (SCIsMasterBuild()) { // Check that connection configuration is correct if (_captureResource.state.lensesActive && _captureResource.state.devicePosition == SCManagedCaptureDevicePositionFront) { for (AVCaptureOutput *output in _captureResource.managedSession.avSession.outputs) { if ([output isKindOfClass:[AVCaptureVideoDataOutput class]]) { AVCaptureConnection *connection = [output connectionWithMediaType:AVMediaTypeVideo]; SCAssert(connection.videoMirrored && connection.videoOrientation == !_captureResource.state.liveVideoStreaming ? AVCaptureVideoOrientationLandscapeRight : AVCaptureVideoOrientationPortrait, @"Connection configuration is not correct"); } } } } } dispatch_block_t viewChangeHandler = ^{ SCManagedCapturerState *state = [_captureResource.state copy]; // update to latest state always runOnMainThreadAsynchronously(^{ [_captureResource.announcer managedCapturer:self didChangeState:state]; [_captureResource.announcer managedCapturer:self didChangeLensesActive:state]; [_captureResource.videoPreviewGLViewManager setLensesActive:state.lensesActive]; if (completionHandler) { completionHandler(); } }); }; if (lensesActiveChanged && !lensesActive && SCDeviceSupportsMetal()) { // If we are turning off lenses and have sample buffer display on. // We need to wait until new frame presented in sample buffer before // dismiss the Lenses' OpenGL view. [_captureResource.videoDataSource waitUntilSampleBufferDisplayed:_captureResource.queuePerformer.queue completionHandler:viewChangeHandler]; } else { viewChangeHandler(); } }]; } - (void)_setLiveVideoStreaming:(BOOL)liveVideoStreaming { SCAssertPerformer(_captureResource.queuePerformer); BOOL enableLiveVideoStreaming = liveVideoStreaming; if (!_captureResource.state.lensesActive && liveVideoStreaming) { SCLogLensesError(@"LiveVideoStreaming is not allowed when lenses are turned off"); enableLiveVideoStreaming = NO; } SC_GUARD_ELSE_RETURN(enableLiveVideoStreaming != _captureResource.state.liveVideoStreaming); // We will disable blackCameraNoOutputDetector if in live video streaming // In case there is some black camera when doing video call, will consider re-enable it [self _setBlackCameraNoOutputDetectorEnabled:!liveVideoStreaming]; if (!_captureResource.device.isConnected) { SCLogCapturerError(@"Can't perform configuration for live video streaming"); } SCLogCapturerInfo(@"Set live video streaming: %d -> %d", _captureResource.state.liveVideoStreaming, enableLiveVideoStreaming); _captureResource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:_captureResource.state] setLiveVideoStreaming:enableLiveVideoStreaming] build]; BOOL isStreaming = _captureResource.videoDataSource.isStreaming; if (isStreaming) { [_captureResource.videoDataSource stopStreaming]; } SCLogCapturerInfo(@"Set live video streaming beginConfiguration"); [_captureResource.managedSession performConfiguration:^{ [_captureResource.videoDataSource beginConfiguration]; // If video chat is active we should use portrait orientation, otherwise landscape right [_captureResource.videoDataSource setVideoOrientation:_captureResource.state.liveVideoStreaming ? AVCaptureVideoOrientationPortrait : AVCaptureVideoOrientationLandscapeRight]; [_captureResource.device setLiveVideoStreaming:_captureResource.state.liveVideoStreaming session:_captureResource.managedSession.avSession]; [_captureResource.videoDataSource commitConfiguration]; }]; SCLogCapturerInfo(@"Set live video streaming commitConfiguration"); if (isStreaming) { [_captureResource.videoDataSource startStreaming]; } } - (void)_setBlackCameraNoOutputDetectorEnabled:(BOOL)enabled { if (enabled) { [self addListener:_captureResource.blackCameraDetector.blackCameraNoOutputDetector]; [_captureResource.videoDataSource addListener:_captureResource.blackCameraDetector.blackCameraNoOutputDetector]; } else { [self removeListener:_captureResource.blackCameraDetector.blackCameraNoOutputDetector]; [_captureResource.videoDataSource removeListener:_captureResource.blackCameraDetector.blackCameraNoOutputDetector]; } } - (void)setTorchActiveAsynchronously:(BOOL)torchActive completionHandler:(dispatch_block_t)completionHandler context:(NSString *)context { SCTraceODPCompatibleStart(2); SCLogCapturerInfo(@"Setting torch active asynchronously to: %d", torchActive); [_captureResource.queuePerformer perform:^{ SCTraceStart(); BOOL torchActiveChanged = NO; if (_captureResource.state.torchActive != torchActive) { [_captureResource.device setTorchActive:torchActive]; if (_captureResource.state.devicePosition == SCManagedCaptureDevicePositionFront) { _captureResource.frontFlashController.torchActive = torchActive; } SCLogCapturerInfo(@"Set torch active: %d -> %d", _captureResource.state.torchActive, torchActive); _captureResource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:_captureResource.state] setTorchActive:torchActive] build]; torchActiveChanged = YES; } SCManagedCapturerState *state = [_captureResource.state copy]; runOnMainThreadAsynchronously(^{ if (torchActiveChanged) { [_captureResource.announcer managedCapturer:self didChangeState:state]; } if (completionHandler) { completionHandler(); } }); }]; } - (void)setNightModeActiveAsynchronously:(BOOL)active completionHandler:(dispatch_block_t)completionHandler context:(NSString *)context { SCTraceODPCompatibleStart(2); [_captureResource.queuePerformer perform:^{ SCTraceStart(); // Only do the configuration if current device is connected if (_captureResource.device.isConnected) { SCLogCapturerInfo(@"Set night mode beginConfiguration"); [_captureResource.managedSession performConfiguration:^{ [self _setNightModeActive:active]; [self _updateHRSIEnabled]; [self _updateStillImageStabilizationEnabled]; }]; SCLogCapturerInfo(@"Set night mode commitConfiguration"); } BOOL nightModeChanged = (_captureResource.state.isNightModeActive != active); if (nightModeChanged) { SCLogCapturerInfo(@"Set night mode active: %d -> %d", _captureResource.state.isNightModeActive, active); _captureResource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:_captureResource.state] setIsNightModeActive:active] build]; } SCManagedCapturerState *state = [_captureResource.state copy]; runOnMainThreadAsynchronously(^{ if (nightModeChanged) { [_captureResource.announcer managedCapturer:self didChangeState:state]; [_captureResource.announcer managedCapturer:self didChangeNightModeActive:state]; } if (completionHandler) { completionHandler(); } }); }]; } - (void)_setNightModeActive:(BOOL)active { SCTraceODPCompatibleStart(2); [_captureResource.device setNightModeActive:active session:_captureResource.managedSession.avSession]; if ([SCManagedCaptureDevice isEnhancedNightModeSupported]) { [self _toggleSoftwareNightmode:active]; } } - (void)_toggleSoftwareNightmode:(BOOL)active { SCTraceODPCompatibleStart(2); if (active) { SCLogCapturerInfo(@"Set enhanced night mode active"); SCProcessingPipelineBuilder *processingPipelineBuilder = [[SCProcessingPipelineBuilder alloc] init]; processingPipelineBuilder.enhancedNightMode = YES; SCProcessingPipeline *pipeline = [processingPipelineBuilder build]; SCLogCapturerInfo(@"Adding processing pipeline:%@", pipeline); [_captureResource.videoDataSource addProcessingPipeline:pipeline]; } else { SCLogCapturerInfo(@"Removing processing pipeline"); [_captureResource.videoDataSource removeProcessingPipeline]; } } - (BOOL)_shouldCaptureImageFromVideo { SCTraceODPCompatibleStart(2); BOOL isIphone5Series = [SCDeviceName isSimilarToIphone5orNewer] && ![SCDeviceName isSimilarToIphone6orNewer]; return isIphone5Series && !_captureResource.state.flashActive && ![self isLensApplied]; } - (void)lockZoomWithContext:(NSString *)context { SCTraceODPCompatibleStart(2); SCAssertMainThread(); SCLogCapturerInfo(@"Lock zoom"); _captureResource.allowsZoom = NO; } - (void)unlockZoomWithContext:(NSString *)context { SCTraceODPCompatibleStart(2); SCAssertMainThread(); SCLogCapturerInfo(@"Unlock zoom"); // Don't let anyone unlock the zoom while ARKit is active. When ARKit shuts down, it'll unlock it. SC_GUARD_ELSE_RETURN(!_captureResource.state.arSessionActive); _captureResource.allowsZoom = YES; } - (void)setZoomFactorAsynchronously:(CGFloat)zoomFactor context:(NSString *)context { SCTraceODPCompatibleStart(2); SCAssertMainThread(); SC_GUARD_ELSE_RETURN(_captureResource.allowsZoom); SCLogCapturerInfo(@"Setting zoom factor to: %f", zoomFactor); [_captureResource.deviceZoomHandler setZoomFactor:zoomFactor forDevice:_captureResource.device immediately:NO]; } - (void)resetZoomFactorAsynchronously:(CGFloat)zoomFactor devicePosition:(SCManagedCaptureDevicePosition)devicePosition context:(NSString *)context { SCTraceODPCompatibleStart(2); SCAssertMainThread(); SC_GUARD_ELSE_RETURN(_captureResource.allowsZoom); SCLogCapturerInfo(@"Setting zoom factor to: %f devicePosition:%lu", zoomFactor, (unsigned long)devicePosition); SCManagedCaptureDevice *device = [SCManagedCaptureDevice deviceWithPosition:devicePosition]; [_captureResource.deviceZoomHandler setZoomFactor:zoomFactor forDevice:device immediately:YES]; } - (void)setExposurePointOfInterestAsynchronously:(CGPoint)pointOfInterest fromUser:(BOOL)fromUser completionHandler:(dispatch_block_t)completionHandler context:(NSString *)context { SCTraceODPCompatibleStart(2); [_captureResource.queuePerformer perform:^{ SCTraceStart(); if (_captureResource.device.isConnected) { CGPoint exposurePoint; if ([self isVideoMirrored]) { exposurePoint = CGPointMake(pointOfInterest.x, 1 - pointOfInterest.y); } else { exposurePoint = pointOfInterest; } if (_captureResource.device.softwareZoom) { // Fix for the zooming factor [_captureResource.device setExposurePointOfInterest:CGPointMake( (exposurePoint.x - 0.5) / _captureResource.device.softwareZoom + 0.5, (exposurePoint.y - 0.5) / _captureResource.device.softwareZoom + 0.5) fromUser:fromUser]; } else { [_captureResource.device setExposurePointOfInterest:exposurePoint fromUser:fromUser]; } } if (completionHandler) { runOnMainThreadAsynchronously(completionHandler); } }]; } - (void)setAutofocusPointOfInterestAsynchronously:(CGPoint)pointOfInterest completionHandler:(dispatch_block_t)completionHandler context:(NSString *)context { SCTraceODPCompatibleStart(2); [_captureResource.queuePerformer perform:^{ SCTraceStart(); if (_captureResource.device.isConnected) { CGPoint focusPoint; if ([self isVideoMirrored]) { focusPoint = CGPointMake(pointOfInterest.x, 1 - pointOfInterest.y); } else { focusPoint = pointOfInterest; } if (_captureResource.device.softwareZoom) { // Fix for the zooming factor [_captureResource.device setAutofocusPointOfInterest:CGPointMake( (focusPoint.x - 0.5) / _captureResource.device.softwareZoom + 0.5, (focusPoint.y - 0.5) / _captureResource.device.softwareZoom + 0.5)]; } else { [_captureResource.device setAutofocusPointOfInterest:focusPoint]; } } if (completionHandler) { runOnMainThreadAsynchronously(completionHandler); } }]; } - (void)setPortraitModePointOfInterestAsynchronously:(CGPoint)pointOfInterest completionHandler:(dispatch_block_t)completionHandler context:(NSString *)context { SCTraceODPCompatibleStart(2); [SCCaptureWorker setPortraitModePointOfInterestAsynchronously:pointOfInterest completionHandler:completionHandler resource:_captureResource]; } - (void)continuousAutofocusAndExposureAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler context:(NSString *)context { SCTraceODPCompatibleStart(2); [_captureResource.queuePerformer perform:^{ SCTraceStart(); if (_captureResource.device.isConnected) { [_captureResource.device continuousAutofocus]; [_captureResource.device setExposurePointOfInterest:CGPointMake(0.5, 0.5) fromUser:NO]; if (SCCameraTweaksEnablePortraitModeAutofocus()) { [self setPortraitModePointOfInterestAsynchronously:CGPointMake(0.5, 0.5) completionHandler:nil context:context]; } } if (completionHandler) { runOnMainThreadAsynchronously(completionHandler); } }]; } #pragma mark - Add / Remove Listener - (void)addListener:(id)listener { SCTraceODPCompatibleStart(2); // Only do the make sure thing if I added it to announcer fresh. SC_GUARD_ELSE_RETURN([_captureResource.announcer addListener:listener]); // After added the listener, make sure we called all these methods with its // initial values [_captureResource.queuePerformer perform:^{ SCManagedCapturerState *state = [_captureResource.state copy]; AVCaptureVideoPreviewLayer *videoPreviewLayer = _captureResource.videoPreviewLayer; LSAGLView *videoPreviewGLView = _captureResource.videoPreviewGLViewManager.view; runOnMainThreadAsynchronously(^{ SCTraceStart(); if ([listener respondsToSelector:@selector(managedCapturer:didChangeState:)]) { [listener managedCapturer:self didChangeState:state]; } if ([listener respondsToSelector:@selector(managedCapturer:didChangeCaptureDevicePosition:)]) { [listener managedCapturer:self didChangeCaptureDevicePosition:state]; } if ([listener respondsToSelector:@selector(managedCapturer:didChangeNightModeActive:)]) { [listener managedCapturer:self didChangeNightModeActive:state]; } if ([listener respondsToSelector:@selector(managedCapturer:didChangeFlashActive:)]) { [listener managedCapturer:self didChangeFlashActive:state]; } if ([listener respondsToSelector:@selector(managedCapturer:didChangeFlashSupportedAndTorchSupported:)]) { [listener managedCapturer:self didChangeFlashSupportedAndTorchSupported:state]; } if ([listener respondsToSelector:@selector(managedCapturer:didChangeZoomFactor:)]) { [listener managedCapturer:self didChangeZoomFactor:state]; } if ([listener respondsToSelector:@selector(managedCapturer:didChangeLowLightCondition:)]) { [listener managedCapturer:self didChangeLowLightCondition:state]; } if ([listener respondsToSelector:@selector(managedCapturer:didChangeAdjustingExposure:)]) { [listener managedCapturer:self didChangeAdjustingExposure:state]; } if (!SCDeviceSupportsMetal()) { if ([listener respondsToSelector:@selector(managedCapturer:didChangeVideoPreviewLayer:)]) { [listener managedCapturer:self didChangeVideoPreviewLayer:videoPreviewLayer]; } } if (videoPreviewGLView && [listener respondsToSelector:@selector(managedCapturer:didChangeVideoPreviewGLView:)]) { [listener managedCapturer:self didChangeVideoPreviewGLView:videoPreviewGLView]; } if ([listener respondsToSelector:@selector(managedCapturer:didChangeLensesActive:)]) { [listener managedCapturer:self didChangeLensesActive:state]; } }); }]; } - (void)removeListener:(id)listener { SCTraceODPCompatibleStart(2); [_captureResource.announcer removeListener:listener]; } - (void)addVideoDataSourceListener:(id)listener { SCTraceODPCompatibleStart(2); [_captureResource.videoDataSource addListener:listener]; } - (void)removeVideoDataSourceListener:(id)listener { SCTraceODPCompatibleStart(2); [_captureResource.videoDataSource removeListener:listener]; } - (void)addDeviceCapacityAnalyzerListener:(id)listener { SCTraceODPCompatibleStart(2); [_captureResource.deviceCapacityAnalyzer addListener:listener]; } - (void)removeDeviceCapacityAnalyzerListener:(id)listener { SCTraceODPCompatibleStart(2); [_captureResource.deviceCapacityAnalyzer removeListener:listener]; } #pragma mark - Debug - (NSString *)debugInfo { SCTraceODPCompatibleStart(2); NSMutableString *info = [NSMutableString new]; [info appendString:@"==== SCManagedCapturer tokens ====\n"]; [_captureResource.tokenSet enumerateObjectsUsingBlock:^(SCCapturerToken *_Nonnull token, BOOL *_Nonnull stop) { [info appendFormat:@"%@\n", token.debugDescription]; }]; return info.copy; } - (NSString *)description { return [self debugDescription]; } - (NSString *)debugDescription { return [NSString sc_stringWithFormat:@"SCManagedCapturer state:\n%@\nVideo streamer info:\n%@", _captureResource.state.debugDescription, _captureResource.videoDataSource.description]; } - (CMTime)firstWrittenAudioBufferDelay { SCTraceODPCompatibleStart(2); return [SCCaptureWorker firstWrittenAudioBufferDelay:_captureResource]; } - (BOOL)audioQueueStarted { SCTraceODPCompatibleStart(2); return [SCCaptureWorker audioQueueStarted:_captureResource]; } #pragma mark - SCTimeProfilable + (SCTimeProfilerContext)context { return SCTimeProfilerContextCamera; } // We disable and re-enable liveness timer when enter background and foreground - (void)applicationDidEnterBackground { SCTraceODPCompatibleStart(2); [SCCaptureWorker destroyLivenessConsistencyTimer:_captureResource]; // Hide the view when in background. if (!SCDeviceSupportsMetal()) { [_captureResource.queuePerformer perform:^{ _captureResource.appInBackground = YES; [CATransaction begin]; [CATransaction setDisableActions:YES]; _captureResource.videoPreviewLayer.hidden = YES; [CATransaction commit]; }]; } else { [_captureResource.queuePerformer perform:^{ _captureResource.appInBackground = YES; // If it is running, stop the streaming. if (_captureResource.status == SCManagedCapturerStatusRunning) { [_captureResource.videoDataSource stopStreaming]; } }]; } [[SCManagedCapturePreviewLayerController sharedInstance] applicationDidEnterBackground]; } - (void)applicationWillEnterForeground { SCTraceODPCompatibleStart(2); if (!SCDeviceSupportsMetal()) { [_captureResource.queuePerformer perform:^{ SCTraceStart(); _captureResource.appInBackground = NO; if (!SCDeviceSupportsMetal()) { [self _fixNonMetalSessionPreviewInconsistency]; } // Doing this right now on iOS 10. It will probably work on iOS 9 as well, but need to verify. if (SC_AT_LEAST_IOS_10) { [self _runningConsistencyCheckAndFix]; // For OS version >= iOS 10, try to fix AVCaptureSession when app is entering foreground. _captureResource.numRetriesFixAVCaptureSessionWithCurrentSession = 0; [self _fixAVSessionIfNecessary]; } }]; } else { [_captureResource.queuePerformer perform:^{ SCTraceStart(); _captureResource.appInBackground = NO; if (_captureResource.status == SCManagedCapturerStatusRunning) { [_captureResource.videoDataSource startStreaming]; } // Doing this right now on iOS 10. It will probably work on iOS 9 as well, but need to verify. if (SC_AT_LEAST_IOS_10) { [self _runningConsistencyCheckAndFix]; // For OS version >= iOS 10, try to fix AVCaptureSession when app is entering foreground. _captureResource.numRetriesFixAVCaptureSessionWithCurrentSession = 0; [self _fixAVSessionIfNecessary]; } }]; } [[SCManagedCapturePreviewLayerController sharedInstance] applicationWillEnterForeground]; } - (void)applicationWillResignActive { SCTraceODPCompatibleStart(2); [[SCManagedCapturePreviewLayerController sharedInstance] applicationWillResignActive]; [_captureResource.queuePerformer perform:^{ [self _pauseCaptureSessionKVOCheck]; }]; } - (void)applicationDidBecomeActive { SCTraceODPCompatibleStart(2); [[SCManagedCapturePreviewLayerController sharedInstance] applicationDidBecomeActive]; [_captureResource.queuePerformer perform:^{ SCTraceStart(); // Since we foreground it, do the running consistency check immediately. // Reset number of retries for fixing status inconsistency _captureResource.numRetriesFixInconsistencyWithCurrentSession = 0; [self _runningConsistencyCheckAndFix]; if (!SC_AT_LEAST_IOS_10) { // For OS version < iOS 10, try to fix AVCaptureSession after app becomes active. _captureResource.numRetriesFixAVCaptureSessionWithCurrentSession = 0; [self _fixAVSessionIfNecessary]; } [self _resumeCaptureSessionKVOCheck]; if (_captureResource.status == SCManagedCapturerStatusRunning) { // Reschedule the timer if we don't have it already runOnMainThreadAsynchronously(^{ SCTraceStart(); [SCCaptureWorker setupLivenessConsistencyTimerIfForeground:_captureResource]; }); } }]; } - (void)_runningConsistencyCheckAndFix { SCTraceODPCompatibleStart(2); // Don't enforce consistency on simulator, as it'll constantly false-positive and restart session. SC_GUARD_ELSE_RETURN(![SCDeviceName isSimulator]); if (_captureResource.state.arSessionActive) { [self _runningARSessionConsistencyCheckAndFix]; } else { [self _runningAVCaptureSessionConsistencyCheckAndFix]; } } - (void)_runningARSessionConsistencyCheckAndFix { SCTraceODPCompatibleStart(2); SCAssert([_captureResource.queuePerformer isCurrentPerformer], @""); SCAssert(_captureResource.state.arSessionActive, @""); if (@available(iOS 11.0, *)) { // Occassionally the capture session will get into a weird "stuck" state. // If this happens, we'll see that the timestamp for the most recent frame is behind the current time. // Pausinging the session for a moment and restarting to attempt to jog it loose. NSTimeInterval timeSinceLastFrame = CACurrentMediaTime() - _captureResource.arSession.currentFrame.timestamp; BOOL reset = NO; if (_captureResource.arSession.currentFrame.camera.trackingStateReason == ARTrackingStateReasonInitializing) { if (timeSinceLastFrame > kSCManagedCapturerFixInconsistencyARSessionHungInitThreshold) { SCLogCapturerInfo(@"*** Found inconsistency for ARSession timestamp (possible hung init), fix now ***"); reset = YES; } } else if (timeSinceLastFrame > kSCManagedCapturerFixInconsistencyARSessionDelayThreshold) { SCLogCapturerInfo(@"*** Found inconsistency for ARSession timestamp (init complete), fix now ***"); reset = YES; } if (reset) { [SCCaptureWorker turnARSessionOff:_captureResource]; [SCCaptureWorker turnARSessionOn:_captureResource]; } } } - (void)_runningAVCaptureSessionConsistencyCheckAndFix { SCTraceODPCompatibleStart(2); SCAssert([_captureResource.queuePerformer isCurrentPerformer], @""); SCAssert(!_captureResource.state.arSessionActive, @""); [[SCLogger sharedInstance] logStepToEvent:@"CAMERA_OPEN_WITH_FIX_INCONSISTENCY" uniqueId:@"" stepName:@"startConsistencyCheckAndFix"]; // If the video preview layer's hidden status is out of sync with the // session's running status, // fix that now. Also, we don't care that much if the status is not running. if (!SCDeviceSupportsMetal()) { [self _fixNonMetalSessionPreviewInconsistency]; } // Skip the liveness consistency check if we are in background if (_captureResource.appInBackground) { SCLogCapturerInfo(@"*** Skipped liveness consistency check, as we are in the background ***"); return; } if (_captureResource.status == SCManagedCapturerStatusRunning && !_captureResource.managedSession.isRunning) { SCGhostToSnappableSignalCameraFixInconsistency(); SCLogCapturerInfo(@"*** Found status inconsistency for running, fix now ***"); _captureResource.numRetriesFixInconsistencyWithCurrentSession++; if (_captureResource.numRetriesFixInconsistencyWithCurrentSession <= kSCManagedCapturerFixInconsistencyMaxRetriesWithCurrentSession) { SCTraceStartSection("Fix non-running session") { if (!SCDeviceSupportsMetal()) { [CATransaction begin]; [CATransaction setDisableActions:YES]; [_captureResource.managedSession startRunning]; [SCCaptureWorker setupVideoPreviewLayer:_captureResource]; [CATransaction commit]; } else { [_captureResource.managedSession startRunning]; } } SCTraceEndSection(); } else { SCTraceStartSection("Create new capturer session") { // start running with new capture session if the inconsistency fixing not succeeds // after kSCManagedCapturerFixInconsistencyMaxRetriesWithCurrentSession retries SCLogCapturerInfo(@"*** Recreate and run new capture session to fix the inconsistency ***"); [self _startRunningWithNewCaptureSession]; } SCTraceEndSection(); } BOOL sessionIsRunning = _captureResource.managedSession.isRunning; if (sessionIsRunning && !SCDeviceSupportsMetal()) { // If it is fixed, we signal received the first frame. SCGhostToSnappableSignalDidReceiveFirstPreviewFrame(); runOnMainThreadAsynchronously(^{ // To approximate this did render timer, it is not accurate. SCGhostToSnappableSignalDidRenderFirstPreviewFrame(CACurrentMediaTime()); }); } SCLogCapturerInfo(@"*** Applied inconsistency fix, running state : %@ ***", sessionIsRunning ? @"YES" : @"NO"); if (_captureResource.managedSession.isRunning) { [[SCLogger sharedInstance] logStepToEvent:@"CAMERA_OPEN_WITH_FIX_INCONSISTENCY" uniqueId:@"" stepName:@"finishConsistencyCheckAndFix"]; [[SCLogger sharedInstance] logTimedEventEnd:@"CAMERA_OPEN_WITH_FIX_INCONSISTENCY" uniqueId:@"" parameters:@{ @"count" : @(_captureResource.numRetriesFixInconsistencyWithCurrentSession) }]; } } else { [[SCLogger sharedInstance] cancelLogTimedEvent:@"CAMERA_OPEN_WITH_FIX_INCONSISTENCY" uniqueId:@""]; // Reset number of retries for fixing status inconsistency _captureResource.numRetriesFixInconsistencyWithCurrentSession = 0; } [_captureResource.blackCameraDetector sessionDidChangeIsRunning:_captureResource.managedSession.isRunning]; } - (void)mediaServicesWereReset { SCTraceODPCompatibleStart(2); [self mediaServicesWereLost]; [_captureResource.queuePerformer perform:^{ /* If the current state requires the ARSession, restart it. Explicitly flip the arSessionActive flag so that `turnSessionOn` thinks it can reset itself. */ if (_captureResource.state.arSessionActive) { _captureResource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:_captureResource.state] setArSessionActive:NO] build]; [SCCaptureWorker turnARSessionOn:_captureResource]; } }]; } - (void)mediaServicesWereLost { SCTraceODPCompatibleStart(2); [_captureResource.queuePerformer perform:^{ if (!_captureResource.state.arSessionActive && !_captureResource.managedSession.isRunning) { /* If the session is running we will trigger _sessionRuntimeError: so nothing else is needed here. */ [_captureResource.videoCapturer.outputURL reloadAssetKeys]; } }]; } - (void)_livenessConsistency:(NSTimer *)timer { SCTraceODPCompatibleStart(2); SCAssertMainThread(); // We can directly check the application state because this timer is scheduled // on the main thread. if ([UIApplication sharedApplication].applicationState == UIApplicationStateActive) { [_captureResource.queuePerformer perform:^{ [self _runningConsistencyCheckAndFix]; }]; } } - (void)_sessionRuntimeError:(NSNotification *)notification { SCTraceODPCompatibleStart(2); NSError *sessionError = notification.userInfo[AVCaptureSessionErrorKey]; SCLogCapturerError(@"Encountered runtime error for capture session %@", sessionError); NSString *errorString = [sessionError.description stringByReplacingOccurrencesOfString:@" " withString:@"_"].uppercaseString ?: @"UNKNOWN_ERROR"; [[SCUserTraceLogger shared] logUserTraceEvent:[NSString sc_stringWithFormat:@"AVCAPTURESESSION_RUNTIME_ERROR_%@", errorString]]; if (sessionError.code == AVErrorMediaServicesWereReset) { // If it is a AVErrorMediaServicesWereReset error, we can just call startRunning, it is much light weighted [_captureResource.queuePerformer perform:^{ if (!SCDeviceSupportsMetal()) { [CATransaction begin]; [CATransaction setDisableActions:YES]; [_captureResource.managedSession startRunning]; [SCCaptureWorker setupVideoPreviewLayer:_captureResource]; [CATransaction commit]; } else { [_captureResource.managedSession startRunning]; } }]; } else { if (_captureResource.isRecreateSessionFixScheduled) { SCLogCoreCameraInfo(@"Fixing session runtime error is scheduled, skip"); return; } _captureResource.isRecreateSessionFixScheduled = YES; NSTimeInterval delay = 0; NSTimeInterval timeNow = [NSDate timeIntervalSinceReferenceDate]; if (timeNow - _captureResource.lastSessionRuntimeErrorTime < kMinFixSessionRuntimeErrorInterval) { SCLogCoreCameraInfo(@"Fixing runtime error session in less than %f, delay", kMinFixSessionRuntimeErrorInterval); delay = kMinFixSessionRuntimeErrorInterval; } _captureResource.lastSessionRuntimeErrorTime = timeNow; [_captureResource.queuePerformer perform:^{ SCTraceStart(); // Occasionaly _captureResource.avSession will throw out an error when shutting down. If this happens while // ARKit is starting up, // _startRunningWithNewCaptureSession will throw a wrench in ARSession startup and freeze the image. SC_GUARD_ELSE_RETURN(!_captureResource.state.arSessionActive); // Need to reset the flag before _startRunningWithNewCaptureSession _captureResource.isRecreateSessionFixScheduled = NO; [self _startRunningWithNewCaptureSession]; [self _fixAVSessionIfNecessary]; } after:delay]; } [[SCLogger sharedInstance] logUnsampledEvent:kSCCameraMetricsRuntimeError parameters:@{ @"error" : sessionError == nil ? @"Unknown error" : sessionError.description, } secretParameters:nil metrics:nil]; } - (void)_startRunningWithNewCaptureSessionIfNecessary { SCTraceODPCompatibleStart(2); if (_captureResource.isRecreateSessionFixScheduled) { SCLogCapturerInfo(@"Session recreation is scheduled, return"); return; } _captureResource.isRecreateSessionFixScheduled = YES; [_captureResource.queuePerformer perform:^{ // Need to reset the flag before _startRunningWithNewCaptureSession _captureResource.isRecreateSessionFixScheduled = NO; [self _startRunningWithNewCaptureSession]; }]; } - (void)_startRunningWithNewCaptureSession { SCTraceODPCompatibleStart(2); SCAssert([_captureResource.queuePerformer isCurrentPerformer], @""); SCLogCapturerInfo(@"Start running with new capture session. isRecording:%d isStreaming:%d status:%lu", _captureResource.videoRecording, _captureResource.videoDataSource.isStreaming, (unsigned long)_captureResource.status); // Mark the start of recreating session [_captureResource.blackCameraDetector sessionWillRecreate]; // Light weight fix gating BOOL lightWeightFix = SCCameraTweaksSessionLightWeightFixEnabled() || SCCameraTweaksBlackCameraRecoveryEnabled(); if (!lightWeightFix) { [_captureResource.deviceCapacityAnalyzer removeListener:_captureResource.stillImageCapturer]; [self removeListener:_captureResource.stillImageCapturer]; [_captureResource.videoDataSource removeListener:_captureResource.lensProcessingCore.capturerListener]; [_captureResource.videoDataSource removeListener:_captureResource.deviceCapacityAnalyzer]; [_captureResource.videoDataSource removeListener:_captureResource.stillImageCapturer]; if (SCIsMasterBuild()) { [_captureResource.videoDataSource removeListener:_captureResource.videoStreamReporter]; } [_captureResource.videoDataSource removeListener:_captureResource.videoScanner]; [_captureResource.videoDataSource removeListener:_captureResource.videoCapturer]; [_captureResource.videoDataSource removeListener:_captureResource.blackCameraDetector.blackCameraNoOutputDetector]; } [_captureResource.videoCapturer.outputURL reloadAssetKeys]; BOOL isStreaming = _captureResource.videoDataSource.isStreaming; if (_captureResource.videoRecording) { // Stop video recording prematurely [self stopRecordingAsynchronouslyWithContext:SCCapturerContext]; NSError *error = [NSError errorWithDomain:kSCManagedCapturerErrorDomain description: [NSString sc_stringWithFormat:@"Interrupt video recording to start new session. %@", @{ @"isAVSessionRunning" : @(_captureResource.managedSession.isRunning), @"numRetriesFixInconsistency" : @(_captureResource.numRetriesFixInconsistencyWithCurrentSession), @"numRetriesFixAVCaptureSession" : @(_captureResource.numRetriesFixAVCaptureSessionWithCurrentSession), @"lastSessionRuntimeErrorTime" : @(_captureResource.lastSessionRuntimeErrorTime), }] code:-1]; [[SCLogger sharedInstance] logUnsampledEvent:kSCCameraMetricsVideoRecordingInterrupted parameters:@{ @"error" : error.description } secretParameters:nil metrics:nil]; } @try { if (@available(iOS 11.0, *)) { [_captureResource.arSession pause]; if (!lightWeightFix) { [_captureResource.videoDataSource removeListener:_captureResource.arImageCapturer]; } } [_captureResource.managedSession stopRunning]; [_captureResource.device removeDeviceAsInput:_captureResource.managedSession.avSession]; } @catch (NSException *exception) { SCLogCapturerError(@"Encountered Exception %@", exception); } @finally { // Nil out device inputs from both devices [[SCManagedCaptureDevice front] resetDeviceAsInput]; [[SCManagedCaptureDevice back] resetDeviceAsInput]; } if (!SCDeviceSupportsMetal()) { // Redo the video preview to mitigate https://ph.sc-corp.net/T42584 [SCCaptureWorker redoVideoPreviewLayer:_captureResource]; } #if !TARGET_IPHONE_SIMULATOR if (@available(iOS 11.0, *)) { _captureResource.arSession = [[ARSession alloc] init]; _captureResource.arImageCapturer = [_captureResource.arImageCaptureProvider arImageCapturerWith:_captureResource.queuePerformer lensProcessingCore:_captureResource.lensProcessingCore]; } [self _resetAVCaptureSession]; #endif [_captureResource.managedSession.avSession setAutomaticallyConfiguresApplicationAudioSession:NO]; [_captureResource.device setDeviceAsInput:_captureResource.managedSession.avSession]; if (_captureResource.fileInputDecider.shouldProcessFileInput) { // Keep the same logic, always create new VideoDataSource [self _setupNewVideoFileDataSource]; } else { if (!lightWeightFix) { [self _setupNewVideoDataSource]; } else { [self _setupVideoDataSourceWithNewSession]; } } if (_captureResource.status == SCManagedCapturerStatusRunning) { if (!SCDeviceSupportsMetal()) { [CATransaction begin]; [CATransaction setDisableActions:YES]; // Set the session to be the new session before start running. _captureResource.videoPreviewLayer.session = _captureResource.managedSession.avSession; if (!_captureResource.appInBackground) { [_captureResource.managedSession startRunning]; } [SCCaptureWorker setupVideoPreviewLayer:_captureResource]; [CATransaction commit]; } else { if (!_captureResource.appInBackground) { [_captureResource.managedSession startRunning]; } } } // Since this start and stop happens in one block, we don't have to worry // about streamingSequence issues if (isStreaming) { [_captureResource.videoDataSource startStreaming]; } SCManagedCapturerState *state = [_captureResource.state copy]; AVCaptureVideoPreviewLayer *videoPreviewLayer = _captureResource.videoPreviewLayer; runOnMainThreadAsynchronously(^{ [_captureResource.announcer managedCapturer:self didResetFromRuntimeError:state]; if (!SCDeviceSupportsMetal()) { [_captureResource.announcer managedCapturer:self didChangeVideoPreviewLayer:videoPreviewLayer]; } }); // Mark the end of recreating session [_captureResource.blackCameraDetector sessionDidRecreate]; } /** * Heavy-weight session fixing approach: recreating everything */ - (void)_setupNewVideoDataSource { if (@available(iOS 11.0, *)) { _captureResource.videoDataSource = [[SCManagedVideoStreamer alloc] initWithSession:_captureResource.managedSession.avSession arSession:_captureResource.arSession devicePosition:_captureResource.state.devicePosition]; [_captureResource.videoDataSource addListener:_captureResource.arImageCapturer]; if (_captureResource.state.isPortraitModeActive) { [_captureResource.videoDataSource setDepthCaptureEnabled:YES]; SCProcessingPipelineBuilder *processingPipelineBuilder = [[SCProcessingPipelineBuilder alloc] init]; processingPipelineBuilder.portraitModeEnabled = YES; SCProcessingPipeline *pipeline = [processingPipelineBuilder build]; [_captureResource.videoDataSource addProcessingPipeline:pipeline]; } } else { _captureResource.videoDataSource = [[SCManagedVideoStreamer alloc] initWithSession:_captureResource.managedSession.avSession devicePosition:_captureResource.state.devicePosition]; } [self _setupVideoDataSourceListeners]; } - (void)_setupNewVideoFileDataSource { _captureResource.videoDataSource = [[SCManagedVideoFileStreamer alloc] initWithPlaybackForURL:_captureResource.fileInputDecider.fileURL]; [_captureResource.lensProcessingCore setLensesActive:YES videoOrientation:_captureResource.videoDataSource.videoOrientation filterFactory:nil]; runOnMainThreadAsynchronously(^{ [_captureResource.videoPreviewGLViewManager prepareViewIfNecessary]; }); [self _setupVideoDataSourceListeners]; } /** * Light-weight session fixing approach: recreating AVCaptureSession / AVCaptureOutput, and bind it to the new session */ - (void)_setupVideoDataSourceWithNewSession { if (@available(iOS 11.0, *)) { SCManagedVideoStreamer *streamer = (SCManagedVideoStreamer *)_captureResource.videoDataSource; [streamer setupWithSession:_captureResource.managedSession.avSession devicePosition:_captureResource.state.devicePosition]; [streamer setupWithARSession:_captureResource.arSession]; } else { SCManagedVideoStreamer *streamer = (SCManagedVideoStreamer *)_captureResource.videoDataSource; [streamer setupWithSession:_captureResource.managedSession.avSession devicePosition:_captureResource.state.devicePosition]; } [_captureResource.stillImageCapturer setupWithSession:_captureResource.managedSession.avSession]; } - (void)_setupVideoDataSourceListeners { if (_captureResource.videoFrameSampler) { [_captureResource.announcer addListener:_captureResource.videoFrameSampler]; } [_captureResource.videoDataSource addSampleBufferDisplayController:_captureResource.sampleBufferDisplayController]; [_captureResource.videoDataSource addListener:_captureResource.lensProcessingCore.capturerListener]; [_captureResource.videoDataSource addListener:_captureResource.deviceCapacityAnalyzer]; if (SCIsMasterBuild()) { [_captureResource.videoDataSource addListener:_captureResource.videoStreamReporter]; } [_captureResource.videoDataSource addListener:_captureResource.videoScanner]; [_captureResource.videoDataSource addListener:_captureResource.blackCameraDetector.blackCameraNoOutputDetector]; _captureResource.stillImageCapturer = [SCManagedStillImageCapturer capturerWithCaptureResource:_captureResource]; [_captureResource.deviceCapacityAnalyzer addListener:_captureResource.stillImageCapturer]; [_captureResource.videoDataSource addListener:_captureResource.stillImageCapturer]; [self addListener:_captureResource.stillImageCapturer]; } - (void)_resetAVCaptureSession { SCTraceODPCompatibleStart(2); SCAssert([_captureResource.queuePerformer isCurrentPerformer], @""); _captureResource.numRetriesFixAVCaptureSessionWithCurrentSession = 0; // lazily initialize _captureResource.kvoController on background thread if (!_captureResource.kvoController) { _captureResource.kvoController = [[FBKVOController alloc] initWithObserver:self]; } [_captureResource.kvoController unobserve:_captureResource.managedSession.avSession]; _captureResource.managedSession = [[SCManagedCaptureSession alloc] initWithBlackCameraDetector:_captureResource.blackCameraDetector]; [_captureResource.kvoController observe:_captureResource.managedSession.avSession keyPath:@keypath(_captureResource.managedSession.avSession, running) options:NSKeyValueObservingOptionNew | NSKeyValueObservingOptionOld action:_captureResource.handleAVSessionStatusChange]; } - (void)_pauseCaptureSessionKVOCheck { SCTraceODPCompatibleStart(2); SCAssert([_captureResource.queuePerformer isCurrentPerformer], @""); [_captureResource.kvoController unobserve:_captureResource.managedSession.avSession]; } - (void)_resumeCaptureSessionKVOCheck { SCTraceODPCompatibleStart(2); SCAssert([_captureResource.queuePerformer isCurrentPerformer], @""); [_captureResource.kvoController observe:_captureResource.managedSession.avSession keyPath:@keypath(_captureResource.managedSession.avSession, running) options:NSKeyValueObservingOptionNew | NSKeyValueObservingOptionOld action:_captureResource.handleAVSessionStatusChange]; } - (id)currentVideoDataSource { SCTraceODPCompatibleStart(2); return _captureResource.videoDataSource; } - (void)checkRestrictedCamera:(void (^)(BOOL, BOOL, AVAuthorizationStatus))callback { SCTraceODPCompatibleStart(2); [_captureResource.queuePerformer perform:^{ // Front and back should be available if user has no restriction on camera. BOOL front = [[SCManagedCaptureDevice front] isAvailable]; BOOL back = [[SCManagedCaptureDevice back] isAvailable]; AVAuthorizationStatus status = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo]; runOnMainThreadAsynchronously(^{ callback(front, back, status); }); }]; } - (SCSnapCreationTriggers *)snapCreationTriggers { return _captureResource.snapCreationTriggers; } - (void)setBlackCameraDetector:(SCBlackCameraDetector *)blackCameraDetector deviceMotionProvider:(id)deviceMotionProvider fileInputDecider:(id)fileInputDecider arImageCaptureProvider:(id)arImageCaptureProvider glviewManager:(id)glViewManager lensAPIProvider:(id)lensAPIProvider lsaComponentTracker:(id)lsaComponentTracker managedCapturerPreviewLayerControllerDelegate: (id)previewLayerControllerDelegate { _captureResource.blackCameraDetector = blackCameraDetector; _captureResource.deviceMotionProvider = deviceMotionProvider; _captureResource.fileInputDecider = fileInputDecider; _captureResource.arImageCaptureProvider = arImageCaptureProvider; _captureResource.videoPreviewGLViewManager = glViewManager; [_captureResource.videoPreviewGLViewManager configureWithCaptureResource:_captureResource]; _captureResource.lensAPIProvider = lensAPIProvider; _captureResource.lsaTrackingComponentHandler = lsaComponentTracker; [_captureResource.lsaTrackingComponentHandler configureWithCaptureResource:_captureResource]; _captureResource.previewLayerControllerDelegate = previewLayerControllerDelegate; [SCManagedCapturePreviewLayerController sharedInstance].delegate = previewLayerControllerDelegate; } @end ================================================ FILE: ManagedCapturer/SCManagedCapturerV1_Private.h ================================================ // // SCManagedCapturerV1_Private.h // Snapchat // // Created by Jingtian Yang on 20/12/2017. // #import "SCManagedCapturerV1.h" @interface SCManagedCapturerV1 () - (SCCaptureResource *)captureResource; - (void)setupWithDevicePosition:(SCManagedCaptureDevicePosition)devicePosition completionHandler:(dispatch_block_t)completionHandler; - (BOOL)stopRunningWithCaptureToken:(SCCapturerToken *)token completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler context:(NSString *)context; @end ================================================ FILE: ManagedCapturer/SCManagedDeviceCapacityAnalyzer.h ================================================ // // SCManagedDeviceCapacityAnalyzer.h // Snapchat // // Created by Liu Liu on 5/1/15. // Copyright (c) 2015 Liu Liu. All rights reserved. // #import "SCManagedDeviceCapacityAnalyzerListener.h" #import #import @class SCManagedCaptureDevice; @protocol SCPerforming; extern NSInteger const kSCManagedDeviceCapacityAnalyzerMaxISOPresetHigh; @interface SCManagedDeviceCapacityAnalyzer : NSObject @property (nonatomic, assign) BOOL lowLightConditionEnabled; - (instancetype)initWithPerformer:(id)performer; - (void)addListener:(id)listener; - (void)removeListener:(id)listener; - (void)setAsFocusListenerForDevice:(SCManagedCaptureDevice *)captureDevice; - (void)removeFocusListener; @end ================================================ FILE: ManagedCapturer/SCManagedDeviceCapacityAnalyzer.m ================================================ // // SCManagedDeviceCapacityAnalyzer.m // Snapchat // // Created by Liu Liu on 5/1/15. // Copyright (c) 2015 Liu Liu. All rights reserved. // #import "SCManagedDeviceCapacityAnalyzer.h" #import "SCCameraSettingUtils.h" #import "SCCameraTweaks.h" #import "SCManagedCaptureDevice+SCManagedDeviceCapacityAnalyzer.h" #import "SCManagedCaptureDevice.h" #import "SCManagedDeviceCapacityAnalyzerListenerAnnouncer.h" #import #import #import #import #import #import @import ImageIO; @import QuartzCore; NSInteger const kSCManagedDeviceCapacityAnalyzerMaxISOPresetHighFor6WithHRSI = 500; NSInteger const kSCManagedDeviceCapacityAnalyzerMaxISOPresetHighFor6S = 800; NSInteger const kSCManagedDeviceCapacityAnalyzerMaxISOPresetHighFor7 = 640; NSInteger const kSCManagedDeviceCapacityAnalyzerMaxISOPresetHighFor8 = 800; // After this much frames we haven't changed exposure time or ISO, we will assume that the adjustingExposure is ended. static NSInteger const kExposureUnchangedHighWatermark = 5; // If deadline reached, and we still haven't reached high watermark yet, we will consult the low watermark and at least // give the system a chance to take not-so-great pictures. static NSInteger const kExposureUnchangedLowWatermark = 1; static NSTimeInterval const kExposureUnchangedDeadline = 0.2; // It seems that between ISO 500 to 640, the brightness value is always somewhere around -0.4 to -0.5. // Therefore, this threshold probably will work fine. static float const kBrightnessValueThreshold = -2.25; // Give some margins between recognized as bright enough and not enough light. // If the brightness is lower than kBrightnessValueThreshold - kBrightnessValueThresholdConfidenceInterval, // and then we count the frame as low light frame. Only if the brightness is higher than // kBrightnessValueThreshold + kBrightnessValueThresholdConfidenceInterval, we think that we // have enough light, and reset low light frame count to 0. 0.5 is choosing because in dark // environment, the brightness value changes +-0.3 with minor orientation changes. static float const kBrightnessValueThresholdConfidenceInterval = 0.5; // If we are at good light condition for 5 frames, ready to change back static NSInteger const kLowLightBoostUnchangedLowWatermark = 7; // Requires we are at low light condition for ~2 seconds (assuming 20~30fps) static NSInteger const kLowLightBoostUnchangedHighWatermark = 25; static NSInteger const kSCLightingConditionDecisionWatermark = 15; // For 30 fps, it is 0.5 second static float const kSCLightingConditionNormalThreshold = 0; static float const kSCLightingConditionDarkThreshold = -3; @implementation SCManagedDeviceCapacityAnalyzer { float _lastExposureTime; int _lastISOSpeedRating; NSTimeInterval _lastAdjustingExposureStartTime; NSInteger _lowLightBoostLowLightCount; NSInteger _lowLightBoostEnoughLightCount; NSInteger _exposureUnchangedCount; NSInteger _maxISOPresetHigh; NSInteger _normalLightingConditionCount; NSInteger _darkLightingConditionCount; NSInteger _extremeDarkLightingConditionCount; SCCapturerLightingConditionType _lightingCondition; BOOL _lowLightCondition; BOOL _adjustingExposure; SCManagedDeviceCapacityAnalyzerListenerAnnouncer *_announcer; FBKVOController *_observeController; id _performer; float _lastBrightnessToLog; // Remember last logged brightness, only log again if it changes greater than a threshold } - (instancetype)initWithPerformer:(id)performer { SCTraceStart(); self = [super init]; if (self) { _performer = performer; _maxISOPresetHigh = kSCManagedDeviceCapacityAnalyzerMaxISOPresetHighFor6WithHRSI; if ([SCDeviceName isIphone] && [SCDeviceName isSimilarToIphone8orNewer]) { _maxISOPresetHigh = kSCManagedDeviceCapacityAnalyzerMaxISOPresetHighFor8; } else if ([SCDeviceName isIphone] && [SCDeviceName isSimilarToIphone7orNewer]) { _maxISOPresetHigh = kSCManagedDeviceCapacityAnalyzerMaxISOPresetHighFor7; } else if ([SCDeviceName isIphone] && [SCDeviceName isSimilarToIphone6SorNewer]) { // iPhone 6S supports higher ISO rate for video recording, accommadating that. _maxISOPresetHigh = kSCManagedDeviceCapacityAnalyzerMaxISOPresetHighFor6S; } _announcer = [[SCManagedDeviceCapacityAnalyzerListenerAnnouncer alloc] init]; _observeController = [[FBKVOController alloc] initWithObserver:self]; } return self; } - (void)addListener:(id)listener { SCTraceStart(); [_announcer addListener:listener]; } - (void)removeListener:(id)listener { SCTraceStart(); [_announcer removeListener:listener]; } - (void)setLowLightConditionEnabled:(BOOL)lowLightConditionEnabled { SCTraceStart(); if (_lowLightConditionEnabled != lowLightConditionEnabled) { _lowLightConditionEnabled = lowLightConditionEnabled; if (!lowLightConditionEnabled) { _lowLightBoostLowLightCount = 0; _lowLightBoostEnoughLightCount = 0; _lowLightCondition = NO; [_announcer managedDeviceCapacityAnalyzer:self didChangeLowLightCondition:_lowLightCondition]; } } } - (void)managedVideoDataSource:(id)managedVideoDataSource didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer devicePosition:(SCManagedCaptureDevicePosition)devicePosition { SCTraceStart(); SampleBufferMetadata metadata = { .isoSpeedRating = _lastISOSpeedRating, .brightness = 0, .exposureTime = _lastExposureTime, }; retrieveSampleBufferMetadata(sampleBuffer, &metadata); if ((SCIsDebugBuild() || SCIsMasterBuild()) // Enable this on internal build only (excluding alpha) && fabs(metadata.brightness - _lastBrightnessToLog) > 0.5f) { // Log only when brightness change is greater than 0.5 _lastBrightnessToLog = metadata.brightness; SCLogCoreCameraInfo(@"ExposureTime: %f, ISO: %ld, Brightness: %f", metadata.exposureTime, (long)metadata.isoSpeedRating, metadata.brightness); } [self _automaticallyDetectAdjustingExposure:metadata.exposureTime ISOSpeedRating:metadata.isoSpeedRating]; _lastExposureTime = metadata.exposureTime; _lastISOSpeedRating = metadata.isoSpeedRating; if (!_adjustingExposure && _lastISOSpeedRating <= _maxISOPresetHigh && _lowLightConditionEnabled) { // If we are not recording, we are not at ISO higher than we needed [self _automaticallyDetectLowLightCondition:metadata.brightness]; } [self _automaticallyDetectLightingConditionWithBrightness:metadata.brightness]; [_announcer managedDeviceCapacityAnalyzer:self didChangeBrightness:metadata.brightness]; } - (void)setAsFocusListenerForDevice:(SCManagedCaptureDevice *)captureDevice { SCTraceStart(); [_observeController observe:captureDevice.device keyPath:@keypath(captureDevice.device, adjustingFocus) options:NSKeyValueObservingOptionNew action:@selector(_adjustingFocusingChanged:)]; } - (void)removeFocusListener { SCTraceStart(); [_observeController unobserveAll]; } #pragma mark - Private methods - (void)_automaticallyDetectAdjustingExposure:(float)currentExposureTime ISOSpeedRating:(NSInteger)currentISOSpeedRating { SCTraceStart(); if (currentISOSpeedRating != _lastISOSpeedRating || fabsf(currentExposureTime - _lastExposureTime) > FLT_MIN) { _exposureUnchangedCount = 0; } else { ++_exposureUnchangedCount; } NSTimeInterval currentTime = CACurrentMediaTime(); if (_exposureUnchangedCount >= kExposureUnchangedHighWatermark || (currentTime - _lastAdjustingExposureStartTime > kExposureUnchangedDeadline && _exposureUnchangedCount >= kExposureUnchangedLowWatermark)) { // The exposure values haven't changed for kExposureUnchangedHighWatermark times, considering the adjustment // as done. Otherwise, if we waited long enough, and the exposure unchange count at least reached low // watermark, we will call it done and give it a shot. if (_adjustingExposure) { _adjustingExposure = NO; SCLogGeneralInfo(@"Adjusting exposure is done, unchanged count: %zd", _exposureUnchangedCount); [_announcer managedDeviceCapacityAnalyzer:self didChangeAdjustingExposure:_adjustingExposure]; } } else { // Otherwise signal that we have adjustments on exposure if (!_adjustingExposure) { _adjustingExposure = YES; _lastAdjustingExposureStartTime = currentTime; [_announcer managedDeviceCapacityAnalyzer:self didChangeAdjustingExposure:_adjustingExposure]; } } } - (void)_automaticallyDetectLowLightCondition:(float)brightness { SCTraceStart(); if (!_lowLightCondition && _lastISOSpeedRating == _maxISOPresetHigh) { // If we are at the stage that we need to use higher ISO (because current ISO is maxed out) // and the brightness is lower than the threshold if (brightness < kBrightnessValueThreshold - kBrightnessValueThresholdConfidenceInterval) { // Either count how many frames like this continuously we encountered // Or if reached the watermark, change the low light boost mode if (_lowLightBoostLowLightCount >= kLowLightBoostUnchangedHighWatermark) { _lowLightCondition = YES; [_announcer managedDeviceCapacityAnalyzer:self didChangeLowLightCondition:_lowLightCondition]; } else { ++_lowLightBoostLowLightCount; } } else if (brightness >= kBrightnessValueThreshold + kBrightnessValueThresholdConfidenceInterval) { // If the brightness is consistently better, reset the low light boost unchanged count to 0 _lowLightBoostLowLightCount = 0; } } else if (_lowLightCondition) { // Check the current ISO to see if we can disable low light boost if (_lastISOSpeedRating <= _maxISOPresetHigh && brightness >= kBrightnessValueThreshold + kBrightnessValueThresholdConfidenceInterval) { if (_lowLightBoostEnoughLightCount >= kLowLightBoostUnchangedLowWatermark) { _lowLightCondition = NO; [_announcer managedDeviceCapacityAnalyzer:self didChangeLowLightCondition:_lowLightCondition]; _lowLightBoostEnoughLightCount = 0; } else { ++_lowLightBoostEnoughLightCount; } } } } - (void)_adjustingFocusingChanged:(NSDictionary *)change { SCTraceStart(); BOOL adjustingFocus = [change[NSKeyValueChangeNewKey] boolValue]; [_performer perform:^{ [_announcer managedDeviceCapacityAnalyzer:self didChangeAdjustingFocus:adjustingFocus]; }]; } - (void)_automaticallyDetectLightingConditionWithBrightness:(float)brightness { if (brightness >= kSCLightingConditionNormalThreshold) { if (_normalLightingConditionCount > kSCLightingConditionDecisionWatermark) { if (_lightingCondition != SCCapturerLightingConditionTypeNormal) { _lightingCondition = SCCapturerLightingConditionTypeNormal; [_announcer managedDeviceCapacityAnalyzer:self didChangeLightingCondition:SCCapturerLightingConditionTypeNormal]; } } else { _normalLightingConditionCount++; } _darkLightingConditionCount = 0; _extremeDarkLightingConditionCount = 0; } else if (brightness >= kSCLightingConditionDarkThreshold) { if (_darkLightingConditionCount > kSCLightingConditionDecisionWatermark) { if (_lightingCondition != SCCapturerLightingConditionTypeDark) { _lightingCondition = SCCapturerLightingConditionTypeDark; [_announcer managedDeviceCapacityAnalyzer:self didChangeLightingCondition:SCCapturerLightingConditionTypeDark]; } } else { _darkLightingConditionCount++; } _normalLightingConditionCount = 0; _extremeDarkLightingConditionCount = 0; } else { if (_extremeDarkLightingConditionCount > kSCLightingConditionDecisionWatermark) { if (_lightingCondition != SCCapturerLightingConditionTypeExtremeDark) { _lightingCondition = SCCapturerLightingConditionTypeExtremeDark; [_announcer managedDeviceCapacityAnalyzer:self didChangeLightingCondition:SCCapturerLightingConditionTypeExtremeDark]; } } else { _extremeDarkLightingConditionCount++; } _normalLightingConditionCount = 0; _darkLightingConditionCount = 0; } } @end ================================================ FILE: ManagedCapturer/SCManagedDeviceCapacityAnalyzerHandler.h ================================================ // // SCManagedDeviceCapacityAnalyzerHandler.h // Snapchat // // Created by Jingtian Yang on 11/12/2017. // #import "SCManagedDeviceCapacityAnalyzerListener.h" #import @class SCCaptureResource; @interface SCManagedDeviceCapacityAnalyzerHandler : NSObject - (instancetype)init NS_UNAVAILABLE; - (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource; @end ================================================ FILE: ManagedCapturer/SCManagedDeviceCapacityAnalyzerHandler.m ================================================ // // SCManagedDeviceCapacityAnalyzerHandler.m // Snapchat // // Created by Jingtian Yang on 11/12/2017. // #import "SCManagedDeviceCapacityAnalyzerHandler.h" #import "SCCaptureResource.h" #import "SCManagedCapturer.h" #import "SCManagedCapturerLogging.h" #import "SCManagedCapturerState.h" #import "SCManagedCapturerStateBuilder.h" #import #import #import #import @interface SCManagedDeviceCapacityAnalyzerHandler () { __weak SCCaptureResource *_captureResource; } @end @implementation SCManagedDeviceCapacityAnalyzerHandler - (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource { self = [super init]; if (self) { SCAssert(captureResource, @""); _captureResource = captureResource; } return self; } - (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer didChangeLowLightCondition:(BOOL)lowLightCondition { SCTraceODPCompatibleStart(2); SCLogCapturerInfo(@"Change Low Light Condition %d", lowLightCondition); [_captureResource.queuePerformer perform:^{ _captureResource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:_captureResource.state] setLowLightCondition:lowLightCondition] build]; SCManagedCapturerState *state = [_captureResource.state copy]; runOnMainThreadAsynchronously(^{ [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didChangeState:state]; [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didChangeLowLightCondition:state]; }); }]; } - (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer didChangeAdjustingExposure:(BOOL)adjustingExposure { SCTraceODPCompatibleStart(2); SCLogCapturerInfo(@"Capacity Analyzer Changes adjustExposure %d", adjustingExposure); [_captureResource.queuePerformer perform:^{ _captureResource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:_captureResource.state] setAdjustingExposure:adjustingExposure] build]; SCManagedCapturerState *state = [_captureResource.state copy]; runOnMainThreadAsynchronously(^{ [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didChangeState:state]; [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didChangeAdjustingExposure:state]; }); }]; } @end ================================================ FILE: ManagedCapturer/SCManagedDeviceCapacityAnalyzerListener.h ================================================ //#!announcer.rb // SCManagedDeviceCapacityAnalyzerListener.h // Snapchat // // Created by Liu Liu on 5/4/15. // Copyright (c) 2015 Liu Liu. All rights reserved. // #import "SCCapturerDefines.h" #import @class SCManagedDeviceCapacityAnalyzer; @protocol SCManagedDeviceCapacityAnalyzerListener @optional // These callbacks happen on a internal queue - (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer didChangeLowLightCondition:(BOOL)lowLightCondition; - (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer didChangeAdjustingExposure:(BOOL)adjustingExposure; - (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer didChangeAdjustingFocus:(BOOL)adjustingFocus; - (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer didChangeBrightness:(float)adjustingBrightness; - (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer didChangeLightingCondition:(SCCapturerLightingConditionType)lightingCondition; @end ================================================ FILE: ManagedCapturer/SCManagedDeviceCapacityAnalyzerListenerAnnouncer.h ================================================ // Generated by the announcer.rb DO NOT EDIT!! #import "SCManagedDeviceCapacityAnalyzerListener.h" #import @interface SCManagedDeviceCapacityAnalyzerListenerAnnouncer : NSObject - (void)addListener:(id)listener; - (void)removeListener:(id)listener; @end ================================================ FILE: ManagedCapturer/SCManagedDeviceCapacityAnalyzerListenerAnnouncer.mm ================================================ // Generated by the announcer.rb DO NOT EDIT!! #import "SCManagedDeviceCapacityAnalyzerListenerAnnouncer.h" #include using std::lock_guard; using std::mutex; #include using std::find; using std::make_shared; using std::shared_ptr; using std::vector; @implementation SCManagedDeviceCapacityAnalyzerListenerAnnouncer { mutex _mutex; shared_ptr>> _listeners; } - (NSString *)description { auto listeners = atomic_load(&self->_listeners); NSMutableString *desc = [NSMutableString string]; [desc appendFormat:@": [", self]; for (int i = 0; i < listeners->size(); ++i) { [desc appendFormat:@"%@", (*listeners)[i]]; if (i != listeners->size() - 1) { [desc appendString:@", "]; } } [desc appendString:@"]"]; return desc; } - (void)addListener:(id)listener { lock_guard lock(_mutex); auto listeners = make_shared>>(); if (_listeners != nil) { // The listener we want to add already exists if (find(_listeners->begin(), _listeners->end(), listener) != _listeners->end()) { return; } for (auto &one : *_listeners) { if (one != nil) { listeners->push_back(one); } } listeners->push_back(listener); atomic_store(&self->_listeners, listeners); } else { listeners->push_back(listener); atomic_store(&self->_listeners, listeners); } } - (void)removeListener:(id)listener { lock_guard lock(_mutex); if (_listeners == nil) { return; } // If the only item in the listener list is the one we want to remove, store it back to nil again if (_listeners->size() == 1 && (*_listeners)[0] == listener) { atomic_store(&self->_listeners, shared_ptr>>()); return; } auto listeners = make_shared>>(); for (auto &one : *_listeners) { if (one != nil && one != listener) { listeners->push_back(one); } } atomic_store(&self->_listeners, listeners); } - (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer didChangeLowLightCondition:(BOOL)lowLightCondition { auto listeners = atomic_load(&self->_listeners); if (listeners) { for (id listener : *listeners) { if ([listener respondsToSelector:@selector(managedDeviceCapacityAnalyzer:didChangeLowLightCondition:)]) { [listener managedDeviceCapacityAnalyzer:managedDeviceCapacityAnalyzer didChangeLowLightCondition:lowLightCondition]; } } } } - (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer didChangeAdjustingExposure:(BOOL)adjustingExposure { auto listeners = atomic_load(&self->_listeners); if (listeners) { for (id listener : *listeners) { if ([listener respondsToSelector:@selector(managedDeviceCapacityAnalyzer:didChangeAdjustingExposure:)]) { [listener managedDeviceCapacityAnalyzer:managedDeviceCapacityAnalyzer didChangeAdjustingExposure:adjustingExposure]; } } } } - (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer didChangeAdjustingFocus:(BOOL)adjustingFocus { auto listeners = atomic_load(&self->_listeners); if (listeners) { for (id listener : *listeners) { if ([listener respondsToSelector:@selector(managedDeviceCapacityAnalyzer:didChangeAdjustingFocus:)]) { [listener managedDeviceCapacityAnalyzer:managedDeviceCapacityAnalyzer didChangeAdjustingFocus:adjustingFocus]; } } } } - (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer didChangeBrightness:(float)adjustingBrightness { auto listeners = atomic_load(&self->_listeners); if (listeners) { for (id listener : *listeners) { if ([listener respondsToSelector:@selector(managedDeviceCapacityAnalyzer:didChangeBrightness:)]) { [listener managedDeviceCapacityAnalyzer:managedDeviceCapacityAnalyzer didChangeBrightness:adjustingBrightness]; } } } } - (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer didChangeLightingCondition:(SCCapturerLightingConditionType)lightingCondition { auto listeners = atomic_load(&self->_listeners); if (listeners) { for (id listener : *listeners) { if ([listener respondsToSelector:@selector(managedDeviceCapacityAnalyzer:didChangeLightingCondition:)]) { [listener managedDeviceCapacityAnalyzer:managedDeviceCapacityAnalyzer didChangeLightingCondition:lightingCondition]; } } } } @end ================================================ FILE: ManagedCapturer/SCManagedDroppedFramesReporter.h ================================================ // // SCManagedDroppedFramesReporter.h // Snapchat // // Created by Michel Loenngren on 3/21/17. // Copyright © 2017 Snapchat, Inc. All rights reserved. // #import "SCManagedCapturerListener.h" #import #import /* Conforms to SCManagedVideoDataSourceListener and records frame rate statistics during recording. */ @interface SCManagedDroppedFramesReporter : NSObject - (void)reportWithKeepLateFrames:(BOOL)keepLateFrames lensesApplied:(BOOL)lensesApplied; - (void)didChangeCaptureDevicePosition; @end ================================================ FILE: ManagedCapturer/SCManagedDroppedFramesReporter.m ================================================ // // SCManagedDroppedFramesReporter.m // Snapchat // // Created by Michel Loenngren on 3/21/17. // Copyright © 2017 Snapchat, Inc. All rights reserved. // #import "SCManagedDroppedFramesReporter.h" #import "SCCameraTweaks.h" #import "SCManagedCapturerState.h" #import #import #import #import #import #import CGFloat const kSCCaptureTargetFramerate = 30; @interface SCManagedDroppedFramesReporter () @property (nonatomic) SCVideoFrameDropCounter *frameDropCounter; @end @implementation SCManagedDroppedFramesReporter { SCVideoFrameDropCounter *_frameDropCounter; NSUInteger _droppedFrames; } - (SCVideoFrameDropCounter *)frameDropCounter { if (_frameDropCounter == nil) { _frameDropCounter = [[SCVideoFrameDropCounter alloc] initWithTargetFramerate:kSCCaptureTargetFramerate]; _droppedFrames = 0; } return _frameDropCounter; } - (void)reportWithKeepLateFrames:(BOOL)keepLateFrames lensesApplied:(BOOL)lensesApplied { if (_frameDropCounter == nil) { return; } NSMutableDictionary *eventDict = [_frameDropCounter.toDict mutableCopy]; eventDict[@"total_frame_drop_measured"] = @(_droppedFrames); eventDict[@"keep_late_frames"] = @(keepLateFrames); // if user select none of the lenses when activing the lenses scroll view, we still enable keepLateFrames eventDict[@"lenses_applied"] = @(lensesApplied); [[SCLogger sharedInstance] logEvent:kSCCameraMetricsFramesDroppedDuringRecording parameters:eventDict]; // Reset _frameDropCounter = nil; _droppedFrames = 0; } - (void)didChangeCaptureDevicePosition { [_frameDropCounter didChangeCaptureDevicePosition]; } #pragma mark - SCManagedVideoDataSourceListener - (void)managedVideoDataSource:(id)managedVideoDataSource didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer devicePosition:(SCManagedCaptureDevicePosition)devicePosition { [self.frameDropCounter processFrameTime:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)]; } - (void)managedVideoDataSource:(id)managedVideoDataSource didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer devicePosition:(SCManagedCaptureDevicePosition)devicePosition { _droppedFrames += 1; NSDictionary *backgroundTaskScreenshot = SCBackgrounTaskScreenshotReport(); SCLogCoreCameraInfo(@"[SCManagedDroppedFramesReporter] frame dropped, background tasks: %@", backgroundTaskScreenshot); } @end ================================================ FILE: ManagedCapturer/SCManagedFrameHealthChecker.h ================================================ // // SCManagedFrameHealthChecker.h // Snapchat // // Created by Pinlin Chen on 30/08/2017. // #import #import #import #import @interface SCManagedFrameHealthChecker : NSObject + (SCManagedFrameHealthChecker *)sharedInstance; /*! @abstract Use sharedInstance instead. */ SC_INIT_AND_NEW_UNAVAILABLE; /* Utility method */ - (NSMutableDictionary *)metadataForSampleBuffer:(CMSampleBufferRef)sampleBuffer extraInfo:(NSDictionary *)extraInfo; - (NSMutableDictionary *)metadataForSampleBuffer:(CMSampleBufferRef)sampleBuffer photoCapturerEnabled:(BOOL)photoCapturerEnabled lensEnabled:(BOOL)lensesEnabled lensID:(NSString *)lensID; - (NSMutableDictionary *)metadataForMetadata:(NSDictionary *)metadata photoCapturerEnabled:(BOOL)photoCapturerEnabled lensEnabled:(BOOL)lensesEnabled lensID:(NSString *)lensID; - (NSMutableDictionary *)getPropertiesFromAsset:(AVAsset *)asset; /* Image snap */ - (void)checkImageHealthForCaptureFrameImage:(UIImage *)image captureSettings:(NSDictionary *)captureSettings captureSessionID:(NSString *)captureSessionID; - (void)checkImageHealthForPreTranscoding:(UIImage *)image metadata:(NSDictionary *)metadata captureSessionID:(NSString *)captureSessionID; - (void)checkImageHealthForPostTranscoding:(NSData *)imageData metadata:(NSDictionary *)metadata captureSessionID:(NSString *)captureSessionID; /* Video snap */ - (void)checkVideoHealthForCaptureFrameImage:(UIImage *)image metedata:(NSDictionary *)metadata captureSessionID:(NSString *)captureSessionID; - (void)checkVideoHealthForOverlayImage:(UIImage *)image metedata:(NSDictionary *)metadata captureSessionID:(NSString *)captureSessionID; - (void)checkVideoHealthForPostTranscodingThumbnail:(UIImage *)image metedata:(NSDictionary *)metadata properties:(NSDictionary *)properties captureSessionID:(NSString *)captureSessionID; - (void)reportFrameHealthCheckForCaptureSessionID:(NSString *)captureSessionID; @end ================================================ FILE: ManagedCapturer/SCManagedFrameHealthChecker.m ================================================ // // SCManagedFrameHealthChecker.m // Snapchat // // Created by Pinlin Chen on 30/08/2017. // #import "SCManagedFrameHealthChecker.h" #import "SCCameraSettingUtils.h" #import "SCCameraTweaks.h" #import #import #import #import #import #import #import #import #import #import @import Accelerate; static const char *kSCManagedFrameHealthCheckerQueueLabel = "com.snapchat.frame_health_checker"; static const int kSCManagedFrameHealthCheckerMaxSamples = 2304; static const float kSCManagedFrameHealthCheckerPossibleBlackThreshold = 20.0; static const float kSCManagedFrameHealthCheckerScaledImageMaxEdgeLength = 300.0; static const float kSCManagedFrameHealthCheckerScaledImageScale = 1.0; // assume we could process at most of 2 RGBA images which are 2304*4096 RGBA image static const double kSCManagedFrameHealthCheckerMinFreeMemMB = 72.0; typedef NS_ENUM(NSUInteger, SCManagedFrameHealthCheckType) { SCManagedFrameHealthCheck_ImageCapture = 0, SCManagedFrameHealthCheck_ImagePreTranscoding, SCManagedFrameHealthCheck_ImagePostTranscoding, SCManagedFrameHealthCheck_VideoCapture, SCManagedFrameHealthCheck_VideoOverlayImage, SCManagedFrameHealthCheck_VideoPostTranscoding, }; typedef NS_ENUM(NSUInteger, SCManagedFrameHealthCheckErrorType) { SCManagedFrameHealthCheckError_None = 0, SCManagedFrameHealthCheckError_Invalid_Bitmap, SCManagedFrameHealthCheckError_Frame_Possibly_Black, SCManagedFrameHealthCheckError_Frame_Totally_Black, SCManagedFrameHealthCheckError_Execution_Error, }; typedef struct { float R; float G; float B; float A; } FloatRGBA; @class SCManagedFrameHealthCheckerTask; typedef NSMutableDictionary * (^sc_managed_frame_checker_block)(SCManagedFrameHealthCheckerTask *task); float vDspColorElementSum(const Byte *data, NSInteger stripLength, NSInteger bufferLength) { float sum = 0; float colorArray[bufferLength]; // Convert to float for DSP registerator vDSP_vfltu8(data, stripLength, colorArray, 1, bufferLength); // Calculate sum of color element vDSP_sve(colorArray, 1, &sum, bufferLength); return sum; } @interface SCManagedFrameHealthCheckerTask : NSObject @property (nonatomic, assign) SCManagedFrameHealthCheckType type; @property (nonatomic, strong) id targetObject; @property (nonatomic, assign) CGSize sourceImageSize; @property (nonatomic, strong) UIImage *unifiedImage; @property (nonatomic, strong) NSDictionary *metadata; @property (nonatomic, strong) NSDictionary *videoProperties; @property (nonatomic, assign) SCManagedFrameHealthCheckErrorType errorType; + (SCManagedFrameHealthCheckerTask *)taskWithType:(SCManagedFrameHealthCheckType)type targetObject:(id)targetObject metadata:(NSDictionary *)metadata videoProperties:(NSDictionary *)videoProperties; + (SCManagedFrameHealthCheckerTask *)taskWithType:(SCManagedFrameHealthCheckType)type targetObject:(id)targetObject metadata:(NSDictionary *)metadata; @end @implementation SCManagedFrameHealthCheckerTask + (SCManagedFrameHealthCheckerTask *)taskWithType:(SCManagedFrameHealthCheckType)type targetObject:(id)targetObject metadata:(NSDictionary *)metadata { return [self taskWithType:type targetObject:targetObject metadata:metadata videoProperties:nil]; } + (SCManagedFrameHealthCheckerTask *)taskWithType:(SCManagedFrameHealthCheckType)type targetObject:(id)targetObject metadata:(NSDictionary *)metadata videoProperties:(NSDictionary *)videoProperties { SCManagedFrameHealthCheckerTask *task = [[SCManagedFrameHealthCheckerTask alloc] init]; task.type = type; task.targetObject = targetObject; task.metadata = metadata; task.videoProperties = videoProperties; return task; } - (NSString *)textForSnapType { switch (self.type) { case SCManagedFrameHealthCheck_ImageCapture: case SCManagedFrameHealthCheck_ImagePreTranscoding: case SCManagedFrameHealthCheck_ImagePostTranscoding: return @"IMAGE"; case SCManagedFrameHealthCheck_VideoCapture: case SCManagedFrameHealthCheck_VideoOverlayImage: case SCManagedFrameHealthCheck_VideoPostTranscoding: return @"VIDEO"; } } - (NSString *)textForSource { switch (self.type) { case SCManagedFrameHealthCheck_ImageCapture: return @"CAPTURE"; case SCManagedFrameHealthCheck_ImagePreTranscoding: return @"PRE_TRANSCODING"; case SCManagedFrameHealthCheck_ImagePostTranscoding: return @"POST_TRANSCODING"; case SCManagedFrameHealthCheck_VideoCapture: return @"CAPTURE"; case SCManagedFrameHealthCheck_VideoOverlayImage: return @"OVERLAY_IMAGE"; case SCManagedFrameHealthCheck_VideoPostTranscoding: return @"POST_TRANSCODING"; } } - (NSString *)textForErrorType { switch (self.errorType) { case SCManagedFrameHealthCheckError_None: return nil; case SCManagedFrameHealthCheckError_Invalid_Bitmap: return @"Invalid_Bitmap"; case SCManagedFrameHealthCheckError_Frame_Possibly_Black: return @"Frame_Possibly_Black"; case SCManagedFrameHealthCheckError_Frame_Totally_Black: return @"Frame_Totally_Black"; case SCManagedFrameHealthCheckError_Execution_Error: return @"Execution_Error"; } } @end @interface SCManagedFrameHealthChecker () { id _performer; // Dictionary structure // Key - NSString, captureSessionID // Value - NSMutableArray NSMutableDictionary *_frameCheckTasks; } @end @implementation SCManagedFrameHealthChecker + (SCManagedFrameHealthChecker *)sharedInstance { SCTraceODPCompatibleStart(2); static SCManagedFrameHealthChecker *checker; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ checker = [[SCManagedFrameHealthChecker alloc] _init]; }); return checker; } - (instancetype)_init { SCTraceODPCompatibleStart(2); if (self = [super init]) { // Use the lowest QoS level _performer = [[SCQueuePerformer alloc] initWithLabel:kSCManagedFrameHealthCheckerQueueLabel qualityOfService:QOS_CLASS_UTILITY queueType:DISPATCH_QUEUE_SERIAL context:SCQueuePerformerContextCamera]; _frameCheckTasks = [NSMutableDictionary dictionary]; } return self; } - (NSMutableDictionary *)metadataForSampleBuffer:(CMSampleBufferRef)sampleBuffer { SCTraceODPCompatibleStart(2); // add exposure, ISO, brightness NSMutableDictionary *metadata = [NSMutableDictionary dictionary]; if (!sampleBuffer || !CMSampleBufferDataIsReady(sampleBuffer)) { return metadata; } CFDictionaryRef exifAttachments = (CFDictionaryRef)CMGetAttachment(sampleBuffer, kCGImagePropertyExifDictionary, NULL); NSNumber *exposureTimeNum = retrieveExposureTimeFromEXIFAttachments(exifAttachments); if (exposureTimeNum) { metadata[@"exposure"] = exposureTimeNum; } NSNumber *isoSpeedRatingNum = retrieveISOSpeedRatingFromEXIFAttachments(exifAttachments); if (isoSpeedRatingNum) { metadata[@"iso"] = isoSpeedRatingNum; } NSNumber *brightnessNum = retrieveBrightnessFromEXIFAttachments(exifAttachments); if (brightnessNum) { float brightness = [brightnessNum floatValue]; metadata[@"brightness"] = isfinite(brightness) ? @(brightness) : @(0); } return metadata; } - (NSMutableDictionary *)metadataForMetadata:(NSDictionary *)metadata { SCTraceODPCompatibleStart(2); // add exposure, ISO, brightness NSMutableDictionary *newMetadata = [NSMutableDictionary dictionary]; CFDictionaryRef exifAttachments = (__bridge CFDictionaryRef)metadata; NSNumber *exposureTimeNum = retrieveExposureTimeFromEXIFAttachments(exifAttachments); if (exposureTimeNum) { newMetadata[@"exposure"] = exposureTimeNum; } NSNumber *isoSpeedRatingNum = retrieveISOSpeedRatingFromEXIFAttachments(exifAttachments); if (isoSpeedRatingNum) { newMetadata[@"iso"] = isoSpeedRatingNum; } NSNumber *brightnessNum = retrieveBrightnessFromEXIFAttachments(exifAttachments); if (brightnessNum) { float brightness = [brightnessNum floatValue]; newMetadata[@"brightness"] = isfinite(brightness) ? @(brightness) : @(0); } return newMetadata; } - (NSMutableDictionary *)metadataForSampleBuffer:(CMSampleBufferRef)sampleBuffer extraInfo:(NSDictionary *)extraInfo { SCTraceODPCompatibleStart(2); NSMutableDictionary *metadata = [self metadataForSampleBuffer:sampleBuffer]; [metadata addEntriesFromDictionary:extraInfo]; return metadata; } - (NSMutableDictionary *)metadataForSampleBuffer:(CMSampleBufferRef)sampleBuffer photoCapturerEnabled:(BOOL)photoCapturerEnabled lensEnabled:(BOOL)lensesEnabled lensID:(NSString *)lensID { SCTraceODPCompatibleStart(2); NSMutableDictionary *metadata = [self metadataForSampleBuffer:sampleBuffer]; metadata[@"photo_capturer_enabled"] = @(photoCapturerEnabled); metadata[@"lens_enabled"] = @(lensesEnabled); if (lensesEnabled) { metadata[@"lens_id"] = lensID ?: @""; } return metadata; } - (NSMutableDictionary *)metadataForMetadata:(NSDictionary *)metadata photoCapturerEnabled:(BOOL)photoCapturerEnabled lensEnabled:(BOOL)lensesEnabled lensID:(NSString *)lensID { SCTraceODPCompatibleStart(2); NSMutableDictionary *newMetadata = [self metadataForMetadata:metadata]; newMetadata[@"photo_capturer_enabled"] = @(photoCapturerEnabled); newMetadata[@"lens_enabled"] = @(lensesEnabled); if (lensesEnabled) { newMetadata[@"lens_id"] = lensID ?: @""; } return newMetadata; } - (NSMutableDictionary *)getPropertiesFromAsset:(AVAsset *)asset { SCTraceODPCompatibleStart(2); SC_GUARD_ELSE_RETURN_VALUE(asset != nil, nil); NSMutableDictionary *properties = [NSMutableDictionary dictionary]; // file size properties[@"file_size"] = @([asset fileSize]); // duration properties[@"duration"] = @(CMTimeGetSeconds(asset.duration)); // video track count NSArray *videoTracks = [asset tracksWithMediaType:AVMediaTypeVideo]; properties[@"video_track_count"] = @(videoTracks.count); if (videoTracks.count > 0) { // video bitrate properties[@"video_bitrate"] = @([videoTracks.firstObject estimatedDataRate]); // frame rate properties[@"video_frame_rate"] = @([videoTracks.firstObject nominalFrameRate]); } // audio track count NSArray *audioTracks = [asset tracksWithMediaType:AVMediaTypeAudio]; properties[@"audio_track_count"] = @(audioTracks.count); if (audioTracks.count > 0) { // audio bitrate properties[@"audio_bitrate"] = @([audioTracks.firstObject estimatedDataRate]); } // playable properties[@"playable"] = @(asset.isPlayable); return properties; } #pragma mark - Image snap - (void)checkImageHealthForCaptureFrameImage:(UIImage *)image captureSettings:(NSDictionary *)captureSettings captureSessionID:(NSString *)captureSessionID { SCTraceODPCompatibleStart(2); if (captureSessionID.length == 0) { SCLogCoreCameraError(@"[FrameHealthChecker] #IMAGE:CAPTURE - captureSessionID shouldn't be empty"); return; } SCManagedFrameHealthCheckerTask *task = [SCManagedFrameHealthCheckerTask taskWithType:SCManagedFrameHealthCheck_ImageCapture targetObject:image metadata:captureSettings]; [self _addTask:task withCaptureSessionID:captureSessionID]; } - (void)checkImageHealthForPreTranscoding:(UIImage *)image metadata:(NSDictionary *)metadata captureSessionID:(NSString *)captureSessionID { SCTraceODPCompatibleStart(2); if (captureSessionID.length == 0) { SCLogCoreCameraError(@"[FrameHealthChecker] #IMAGE:PRE_CAPTURE - captureSessionID shouldn't be empty"); return; } SCManagedFrameHealthCheckerTask *task = [SCManagedFrameHealthCheckerTask taskWithType:SCManagedFrameHealthCheck_ImagePreTranscoding targetObject:image metadata:metadata]; [self _addTask:task withCaptureSessionID:captureSessionID]; } - (void)checkImageHealthForPostTranscoding:(NSData *)imageData metadata:(NSDictionary *)metadata captureSessionID:(NSString *)captureSessionID { SCTraceODPCompatibleStart(2); if (captureSessionID.length == 0) { SCLogCoreCameraError(@"[FrameHealthChecker] #IMAGE:POST_CAPTURE - captureSessionID shouldn't be empty"); return; } SCManagedFrameHealthCheckerTask *task = [SCManagedFrameHealthCheckerTask taskWithType:SCManagedFrameHealthCheck_ImagePostTranscoding targetObject:imageData metadata:metadata]; [self _addTask:task withCaptureSessionID:captureSessionID]; } #pragma mark - Video snap - (void)checkVideoHealthForCaptureFrameImage:(UIImage *)image metedata:(NSDictionary *)metadata captureSessionID:(NSString *)captureSessionID { SCTraceODPCompatibleStart(2); if (captureSessionID.length == 0) { SCLogCoreCameraError(@"[FrameHealthChecker] #VIDEO:CAPTURE - captureSessionID shouldn't be empty"); return; } SCManagedFrameHealthCheckerTask *task = [SCManagedFrameHealthCheckerTask taskWithType:SCManagedFrameHealthCheck_VideoCapture targetObject:image metadata:metadata]; [self _addTask:task withCaptureSessionID:captureSessionID]; } - (void)checkVideoHealthForOverlayImage:(UIImage *)image metedata:(NSDictionary *)metadata captureSessionID:(NSString *)captureSessionID { SCTraceODPCompatibleStart(2); if (captureSessionID.length == 0) { SCLogCoreCameraError(@"[FrameHealthChecker] #VIDEO:OVERLAY_IMAGE - captureSessionID shouldn't be empty"); return; } // Overlay image could be nil if (!image) { SCLogCoreCameraInfo(@"[FrameHealthChecker] #VIDEO:OVERLAY_IMAGE - overlayImage is nil."); return; } SCManagedFrameHealthCheckerTask *task = [SCManagedFrameHealthCheckerTask taskWithType:SCManagedFrameHealthCheck_VideoOverlayImage targetObject:image metadata:metadata]; [self _addTask:task withCaptureSessionID:captureSessionID]; } - (void)checkVideoHealthForPostTranscodingThumbnail:(UIImage *)image metedata:(NSDictionary *)metadata properties:(NSDictionary *)properties captureSessionID:(NSString *)captureSessionID { SCTraceODPCompatibleStart(2); if (captureSessionID.length == 0) { SCLogCoreCameraError(@"[FrameHealthChecker] #VIDEO:POST_TRANSCODING - captureSessionID shouldn't be empty"); return; } SCManagedFrameHealthCheckerTask *task = [SCManagedFrameHealthCheckerTask taskWithType:SCManagedFrameHealthCheck_VideoPostTranscoding targetObject:image metadata:metadata videoProperties:properties]; [self _addTask:task withCaptureSessionID:captureSessionID]; } #pragma mark - Task management - (void)reportFrameHealthCheckForCaptureSessionID:(NSString *)captureSessionID { SCTraceODPCompatibleStart(2); if (!captureSessionID) { SCLogCoreCameraError(@"[FrameHealthChecker] report - captureSessionID shouldn't be nil"); return; } [self _asynchronouslyCheckForCaptureSessionID:captureSessionID]; } #pragma mark - Private functions /// Scale the source image to a new image with edges less than kSCManagedFrameHealthCheckerScaledImageMaxEdgeLength. - (UIImage *)_unifyImage:(UIImage *)sourceImage { CGFloat sourceWidth = sourceImage.size.width; CGFloat sourceHeight = sourceImage.size.height; if (sourceWidth == 0.0 || sourceHeight == 0.0) { SCLogCoreCameraInfo(@"[FrameHealthChecker] Tried scaling image with no size"); return sourceImage; } CGFloat maxEdgeLength = kSCManagedFrameHealthCheckerScaledImageMaxEdgeLength; CGFloat widthScalingFactor = maxEdgeLength / sourceWidth; CGFloat heightScalingFactor = maxEdgeLength / sourceHeight; CGFloat scalingFactor = MIN(widthScalingFactor, heightScalingFactor); if (scalingFactor >= 1) { SCLogCoreCameraInfo(@"[FrameHealthChecker] No need to scale image."); return sourceImage; } CGSize targetSize = CGSizeMake(sourceWidth * scalingFactor, sourceHeight * scalingFactor); SCLogCoreCameraInfo(@"[FrameHealthChecker] Scaling image from %@ to %@", NSStringFromCGSize(sourceImage.size), NSStringFromCGSize(targetSize)); return [sourceImage scaledImageToSize:targetSize scale:kSCManagedFrameHealthCheckerScaledImageScale]; } - (void)_addTask:(SCManagedFrameHealthCheckerTask *)newTask withCaptureSessionID:(NSString *)captureSessionID { SCTraceODPCompatibleStart(2); if (captureSessionID.length == 0) { return; } [_performer perform:^{ SCTraceODPCompatibleStart(2); CFTimeInterval beforeScaling = CACurrentMediaTime(); if (newTask.targetObject) { if ([newTask.targetObject isKindOfClass:[UIImage class]]) { UIImage *sourceImage = (UIImage *)newTask.targetObject; newTask.unifiedImage = [self _unifyImage:sourceImage]; newTask.sourceImageSize = sourceImage.size; } else if ([newTask.targetObject isKindOfClass:[NSData class]]) { UIImage *sourceImage = [UIImage sc_imageWithData:newTask.targetObject]; CFTimeInterval betweenDecodingAndScaling = CACurrentMediaTime(); SCLogCoreCameraInfo(@"[FrameHealthChecker] #Image decoding delay: %f", betweenDecodingAndScaling - beforeScaling); beforeScaling = betweenDecodingAndScaling; newTask.unifiedImage = [self _unifyImage:sourceImage]; newTask.sourceImageSize = sourceImage.size; } else { SCLogCoreCameraError(@"[FrameHealthChecker] Invalid targetObject class:%@", NSStringFromClass([newTask.targetObject class])); } newTask.targetObject = nil; } SCLogCoreCameraInfo(@"[FrameHealthChecker] #Scale image delay: %f", CACurrentMediaTime() - beforeScaling); NSMutableArray *taskQueue = _frameCheckTasks[captureSessionID]; if (!taskQueue) { taskQueue = [NSMutableArray array]; _frameCheckTasks[captureSessionID] = taskQueue; } // Remove previous same type task, avoid meaningless task, // for example repeat click "Send Button" and then "Back button" // will produce a lot of PRE_TRANSCODING and POST_TRANSCODING for (SCManagedFrameHealthCheckerTask *task in taskQueue) { if (task.type == newTask.type) { [taskQueue removeObject:task]; break; } } [taskQueue addObject:newTask]; }]; } - (void)_asynchronouslyCheckForCaptureSessionID:(NSString *)captureSessionID { SCTraceODPCompatibleStart(2); [_performer perform:^{ SCTraceODPCompatibleStart(2); NSMutableArray *tasksQueue = _frameCheckTasks[captureSessionID]; if (!tasksQueue) { return; } // Check the free memory, if it is too low, drop these tasks double memFree = [SCLogger memoryFreeMB]; if (memFree < kSCManagedFrameHealthCheckerMinFreeMemMB) { SCLogCoreCameraWarning( @"[FrameHealthChecker] mem_free:%f is too low, dropped checking tasks for captureSessionID:%@", memFree, captureSessionID); [_frameCheckTasks removeObjectForKey:captureSessionID]; return; } __block NSMutableArray *frameHealthInfoArray = [NSMutableArray array]; // Execute all tasks and wait for complete [tasksQueue enumerateObjectsUsingBlock:^(id _Nonnull obj, NSUInteger idx, BOOL *_Nonnull stop) { SCManagedFrameHealthCheckerTask *task = (SCManagedFrameHealthCheckerTask *)obj; NSMutableDictionary *frameHealthInfo; UIImage *image = task.unifiedImage; if (image) { // Get frame health info frameHealthInfo = [self _getFrameHealthInfoForImage:image source:[task textForSource] snapType:[task textForSnapType] metadata:task.metadata sourceImageSize:task.sourceImageSize captureSessionID:captureSessionID]; NSNumber *isPossibleBlackNum = frameHealthInfo[@"is_possible_black"]; NSNumber *isTotallyBlackNum = frameHealthInfo[@"is_total_black"]; NSNumber *hasExecutionError = frameHealthInfo[@"execution_error"]; if ([isTotallyBlackNum boolValue]) { task.errorType = SCManagedFrameHealthCheckError_Frame_Totally_Black; } else if ([isPossibleBlackNum boolValue]) { task.errorType = SCManagedFrameHealthCheckError_Frame_Possibly_Black; } else if ([hasExecutionError boolValue]) { task.errorType = SCManagedFrameHealthCheckError_Execution_Error; } } else { frameHealthInfo = [NSMutableDictionary dictionary]; task.errorType = SCManagedFrameHealthCheckError_Invalid_Bitmap; } if (frameHealthInfo) { frameHealthInfo[@"frame_source"] = [task textForSource]; frameHealthInfo[@"snap_type"] = [task textForSnapType]; frameHealthInfo[@"error_type"] = [task textForErrorType]; frameHealthInfo[@"capture_session_id"] = captureSessionID; frameHealthInfo[@"metadata"] = task.metadata; if (task.videoProperties.count > 0) { [frameHealthInfo addEntriesFromDictionary:task.videoProperties]; } [frameHealthInfoArray addObject:frameHealthInfo]; } // Release the image as soon as possible to mitigate the memory pressure task.unifiedImage = nil; }]; for (NSDictionary *frameHealthInfo in frameHealthInfoArray) { if ([frameHealthInfo[@"is_total_black"] boolValue] || [frameHealthInfo[@"is_possible_black"] boolValue]) { // // TODO: Zi Kai Chen - add this back. Normally we use id for // this but as this is a shared instance we cannot easily inject it. The work would // involve making this not a shared instance. // SCShakeBetaLogEvent(SCShakeBetaLoggerKeyCCamBlackSnap, // JSONStringSerializeObjectForLogging(frameHealthInfo)); } [[SCLogger sharedInstance] logUnsampledEventToEventLogger:kSCCameraMetricsFrameHealthCheckIndex parameters:frameHealthInfo secretParameters:nil metrics:nil]; } [_frameCheckTasks removeObjectForKey:captureSessionID]; }]; } - (NSMutableDictionary *)_getFrameHealthInfoForImage:(UIImage *)image source:(NSString *)source snapType:(NSString *)snapType metadata:(NSDictionary *)metadata sourceImageSize:(CGSize)sourceImageSize captureSessionID:(NSString *)captureSessionID { SCTraceODPCompatibleStart(2); NSMutableDictionary *parameters = [NSMutableDictionary dictionary]; size_t samplesCount = 0; CFTimeInterval start = CACurrentMediaTime(); CGImageRef imageRef = image.CGImage; size_t imageWidth = CGImageGetWidth(imageRef); size_t imageHeight = CGImageGetHeight(imageRef); CFDataRef pixelData = CGDataProviderCopyData(CGImageGetDataProvider(imageRef)); CFTimeInterval getImageDataTime = CACurrentMediaTime(); if (pixelData) { const Byte *imageData = CFDataGetBytePtr(pixelData); NSInteger stripLength = 0; NSInteger bufferLength = 0; NSInteger imagePixels = imageWidth * imageHeight; // Limit the max sampled frames if (imagePixels > kSCManagedFrameHealthCheckerMaxSamples) { stripLength = imagePixels / kSCManagedFrameHealthCheckerMaxSamples * 4; bufferLength = kSCManagedFrameHealthCheckerMaxSamples; } else { stripLength = 4; bufferLength = imagePixels; } samplesCount = bufferLength; // Avoid dividing by zero if (samplesCount != 0) { FloatRGBA sumRGBA = [self _getSumRGBAFromData:imageData stripLength:stripLength bufferLength:bufferLength bitmapInfo:CGImageGetBitmapInfo(imageRef)]; float averageR = sumRGBA.R / samplesCount; float averageG = sumRGBA.G / samplesCount; float averageB = sumRGBA.B / samplesCount; float averageA = sumRGBA.A / samplesCount; parameters[@"average_sampled_rgba_r"] = @(averageR); parameters[@"average_sampled_rgba_g"] = @(averageG); parameters[@"average_sampled_rgba_b"] = @(averageB); parameters[@"average_sampled_rgba_a"] = @(averageA); parameters[@"origin_frame_width"] = @(sourceImageSize.width); parameters[@"origin_frame_height"] = @(sourceImageSize.height); // Also report possible black to identify the intentional black snap by covering camera. // Normally, the averageA very near 255, but for video overlay image, it is very small. // So we use averageA > 250 to avoid considing video overlay image as possible black. if (averageA > 250 && averageR < kSCManagedFrameHealthCheckerPossibleBlackThreshold && averageG < kSCManagedFrameHealthCheckerPossibleBlackThreshold && averageB < kSCManagedFrameHealthCheckerPossibleBlackThreshold) { parameters[@"is_possible_black"] = @(YES); // Use this parameters for BigQuery conditions in Grafana if (averageR == 0 && averageG == 0 && averageB == 0) { parameters[@"is_total_black"] = @(YES); } } } else { SCLogCoreCameraError(@"[FrameHealthChecker] #%@:%@ - samplesCount is zero! captureSessionID:%@", snapType, source, captureSessionID); parameters[@"execution_error"] = @(YES); } CFRelease(pixelData); } else { SCLogCoreCameraError(@"[FrameHealthChecker] #%@:%@ - pixelData is nil! captureSessionID:%@", snapType, source, captureSessionID); parameters[@"execution_error"] = @(YES); } parameters[@"sample_size"] = @(samplesCount); CFTimeInterval end = CACurrentMediaTime(); SCLogCoreCameraInfo(@"[FrameHealthChecker] #%@:%@ - GET_IMAGE_DATA_TIME:%f SAMPLE_DATA_TIME:%f TOTAL_TIME:%f", snapType, source, getImageDataTime - start, end - getImageDataTime, end - start); return parameters; } - (FloatRGBA)_getSumRGBAFromData:(const Byte *)imageData stripLength:(NSInteger)stripLength bufferLength:(NSInteger)bufferLength bitmapInfo:(CGBitmapInfo)bitmapInfo { SCTraceODPCompatibleStart(2); FloatRGBA sumRGBA; if ((bitmapInfo & kCGImageAlphaPremultipliedFirst) && (bitmapInfo & kCGImageByteOrder32Little)) { // BGRA sumRGBA.B = vDspColorElementSum(imageData, stripLength, bufferLength); sumRGBA.G = vDspColorElementSum(imageData + 1, stripLength, bufferLength); sumRGBA.R = vDspColorElementSum(imageData + 2, stripLength, bufferLength); sumRGBA.A = vDspColorElementSum(imageData + 3, stripLength, bufferLength); } else { // TODO. support other types beside RGBA sumRGBA.R = vDspColorElementSum(imageData, stripLength, bufferLength); sumRGBA.G = vDspColorElementSum(imageData + 1, stripLength, bufferLength); sumRGBA.B = vDspColorElementSum(imageData + 2, stripLength, bufferLength); sumRGBA.A = vDspColorElementSum(imageData + 3, stripLength, bufferLength); } return sumRGBA; } @end ================================================ FILE: ManagedCapturer/SCManagedFrontFlashController.h ================================================ // // SCManagedFrontFlashController.h // Snapchat // // Created by Liu Liu on 5/4/15. // Copyright (c) 2015 Liu Liu. All rights reserved. // #import // This object is only access on SCManagedCapturer thread @interface SCManagedFrontFlashController : NSObject @property (nonatomic, assign) BOOL flashActive; @property (nonatomic, assign) BOOL torchActive; @end ================================================ FILE: ManagedCapturer/SCManagedFrontFlashController.m ================================================ // // SCManagedFrontFlashController.m // Snapchat // // Created by Liu Liu on 5/4/15. // Copyright (c) 2015 Liu Liu. All rights reserved. // #import "SCManagedFrontFlashController.h" #import #import #import #import @import UIKit; @implementation SCManagedFrontFlashController { BOOL _active; UIView *_brightView; CGFloat _brightnessWhenFlashAndTorchOff; } - (void)_setScreenWithFrontViewFlashActive:(BOOL)flashActive torchActive:(BOOL)torchActive { SCTraceStart(); SCAssertMainThread(); BOOL wasActive = _active; _active = flashActive || torchActive; if (!wasActive && _active) { [self _activateFlash:flashActive]; } else if (wasActive && !_active) { [self _deactivateFlash]; } } - (void)_activateFlash:(BOOL)flashActive { UIWindow *mainWindow = [[UIApplication sharedApplication] keyWindow]; if (!_brightView) { CGRect frame = [mainWindow bounds]; CGFloat maxLength = MAX(CGRectGetWidth(frame), CGRectGetHeight(frame)); frame.size = CGSizeMake(maxLength, maxLength); // Using the max length on either side to be compatible with different orientations _brightView = [[UIView alloc] initWithFrame:frame]; _brightView.userInteractionEnabled = NO; _brightView.backgroundColor = [UIColor whiteColor]; } _brightnessWhenFlashAndTorchOff = [UIScreen mainScreen].brightness; SCLogGeneralInfo(@"[SCManagedFrontFlashController] Activating flash, setting screen brightness from %f to 1.0", _brightnessWhenFlashAndTorchOff); [self _brightenLoop]; _brightView.alpha = flashActive ? 1.0 : 0.75; [mainWindow addSubview:_brightView]; } - (void)_deactivateFlash { SCLogGeneralInfo(@"[SCManagedFrontFlashController] Deactivating flash, setting screen brightness from %f to %f", [UIScreen mainScreen].brightness, _brightnessWhenFlashAndTorchOff); [UIScreen mainScreen].brightness = _brightnessWhenFlashAndTorchOff; if (_brightView) { [_brightView removeFromSuperview]; } } - (void)_brightenLoop { if (_active) { SCLogGeneralInfo(@"[SCManagedFrontFlashController] In brighten loop, setting brightness from %f to 1.0", [UIScreen mainScreen].brightness); [UIScreen mainScreen].brightness = 1.0; dispatch_after(dispatch_time(DISPATCH_TIME_NOW, NSEC_PER_SEC / 2), dispatch_get_main_queue(), ^(void) { [self _brightenLoop]; }); } else { SCLogGeneralInfo(@"[SCManagedFrontFlashController] Recording is done, brighten loop ends"); } } - (void)setFlashActive:(BOOL)flashActive { SCTraceStart(); if (_flashActive != flashActive) { _flashActive = flashActive; BOOL torchActive = _torchActive; runOnMainThreadAsynchronously(^{ [self _setScreenWithFrontViewFlashActive:flashActive torchActive:torchActive]; }); } } - (void)setTorchActive:(BOOL)torchActive { SCTraceStart(); if (_torchActive != torchActive) { _torchActive = torchActive; BOOL flashActive = _flashActive; runOnMainThreadAsynchronously(^{ [self _setScreenWithFrontViewFlashActive:flashActive torchActive:torchActive]; }); } } @end ================================================ FILE: ManagedCapturer/SCManagedLegacyStillImageCapturer.h ================================================ // // SCManagedLegacyStillImageCapturer.h // Snapchat // // Created by Chao Pang on 10/4/16. // Copyright © 2016 Snapchat, Inc. All rights reserved. // #import "SCManagedStillImageCapturer.h" @interface SCManagedLegacyStillImageCapturer : SCManagedStillImageCapturer @end ================================================ FILE: ManagedCapturer/SCManagedLegacyStillImageCapturer.m ================================================ // // SCManagedLegacyStillImageCapturer.m // Snapchat // // Created by Chao Pang on 10/4/16. // Copyright © 2016 Snapchat, Inc. All rights reserved. // #import "SCManagedLegacyStillImageCapturer.h" #import "AVCaptureConnection+InputDevice.h" #import "SCCameraTweaks.h" #import "SCLogger+Camera.h" #import "SCManagedCapturer.h" #import "SCManagedStillImageCapturer_Protected.h" #import "SCStillImageCaptureVideoInputMethod.h" #import #import #import #import #import #import #import #import #import @import ImageIO; static NSString *const kSCLegacyStillImageCaptureDefaultMethodErrorDomain = @"kSCLegacyStillImageCaptureDefaultMethodErrorDomain"; static NSString *const kSCLegacyStillImageCaptureLensStabilizationMethodErrorDomain = @"kSCLegacyStillImageCaptureLensStabilizationMethodErrorDomain"; static NSInteger const kSCLegacyStillImageCaptureDefaultMethodErrorEncounteredException = 10000; static NSInteger const kSCLegacyStillImageCaptureLensStabilizationMethodErrorEncounteredException = 10001; @implementation SCManagedLegacyStillImageCapturer { #pragma clang diagnostic push #pragma clang diagnostic ignored "-Wdeprecated-declarations" AVCaptureStillImageOutput *_stillImageOutput; #pragma clang diagnostic pop BOOL _shouldCapture; NSUInteger _retries; SCStillImageCaptureVideoInputMethod *_videoFileMethod; } - (instancetype)initWithSession:(AVCaptureSession *)session performer:(id)performer lensProcessingCore:(id)lensProcessingCore delegate:(id)delegate { SCTraceStart(); self = [super initWithSession:session performer:performer lensProcessingCore:lensProcessingCore delegate:delegate]; if (self) { [self setupWithSession:session]; } return self; } - (void)setupWithSession:(AVCaptureSession *)session { SCTraceStart(); #pragma clang diagnostic push #pragma clang diagnostic ignored "-Wdeprecated-declarations" _stillImageOutput = [[AVCaptureStillImageOutput alloc] init]; #pragma clang diagnostic pop _stillImageOutput.outputSettings = @{AVVideoCodecKey : AVVideoCodecJPEG}; [self setAsOutput:session]; } - (void)setAsOutput:(AVCaptureSession *)session { SCTraceStart(); if ([session canAddOutput:_stillImageOutput]) { [session addOutput:_stillImageOutput]; } } - (void)setHighResolutionStillImageOutputEnabled:(BOOL)highResolutionStillImageOutputEnabled { SCTraceStart(); if (_stillImageOutput.isHighResolutionStillImageOutputEnabled != highResolutionStillImageOutputEnabled) { _stillImageOutput.highResolutionStillImageOutputEnabled = highResolutionStillImageOutputEnabled; } } - (void)setPortraitModeCaptureEnabled:(BOOL)enabled { // Legacy capturer only used on devices running versions under 10.2, which don't support depth data // so this function is never called and does not need to be implemented } - (void)enableStillImageStabilization { SCTraceStart(); #pragma clang diagnostic push #pragma clang diagnostic ignored "-Wdeprecated-declarations" if (_stillImageOutput.isLensStabilizationDuringBracketedCaptureSupported) { _stillImageOutput.lensStabilizationDuringBracketedCaptureEnabled = YES; } #pragma clang diagnostic pop } - (void)removeAsOutput:(AVCaptureSession *)session { SCTraceStart(); [session removeOutput:_stillImageOutput]; } - (void)captureStillImageWithAspectRatio:(CGFloat)aspectRatio atZoomFactor:(float)zoomFactor fieldOfView:(float)fieldOfView state:(SCManagedCapturerState *)state captureSessionID:(NSString *)captureSessionID shouldCaptureFromVideo:(BOOL)shouldCaptureFromVideo completionHandler: (sc_managed_still_image_capturer_capture_still_image_completion_handler_t)completionHandler { SCTraceStart(); SCAssert(completionHandler, @"completionHandler shouldn't be nil"); _retries = 6; // AVFoundation Unknown Error usually resolves itself within 0.5 seconds _aspectRatio = aspectRatio; _zoomFactor = zoomFactor; _fieldOfView = fieldOfView; _state = state; _captureSessionID = captureSessionID; _shouldCaptureFromVideo = shouldCaptureFromVideo; SCAssert(!_completionHandler, @"We shouldn't have a _completionHandler at this point otherwise we are destroying " @"current completion handler."); _completionHandler = [completionHandler copy]; [[SCLogger sharedInstance] logCameraExposureAdjustmentDelayStart]; if (!_adjustingExposureManualDetect) { SCLogCoreCameraInfo(@"Capturing still image now"); [self _captureStillImageWithExposureAdjustmentStrategy:kSCCameraExposureAdjustmentStrategyNo]; _shouldCapture = NO; } else { SCLogCoreCameraInfo(@"Wait adjusting exposure (or after 0.4 seconds) and then capture still image"); _shouldCapture = YES; [self _deadlineCaptureStillImage]; } } #pragma mark - SCManagedDeviceCapacityAnalyzerListener - (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer didChangeAdjustingExposure:(BOOL)adjustingExposure { SCTraceStart(); @weakify(self); [_performer performImmediatelyIfCurrentPerformer:^{ // Since this is handled on a different thread, therefore, dispatch back to the queue we operated on. @strongify(self); SC_GUARD_ELSE_RETURN(self); self->_adjustingExposureManualDetect = adjustingExposure; [self _didChangeAdjustingExposure:adjustingExposure withStrategy:kSCCameraExposureAdjustmentStrategyManualDetect]; }]; } - (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer didChangeLightingCondition:(SCCapturerLightingConditionType)lightingCondition { SCTraceStart(); @weakify(self); [_performer performImmediatelyIfCurrentPerformer:^{ @strongify(self); SC_GUARD_ELSE_RETURN(self); self->_lightingConditionType = lightingCondition; }]; } #pragma mark - SCManagedCapturerListener - (void)managedCapturer:(id)managedCapturer didChangeAdjustingExposure:(SCManagedCapturerState *)state { SCTraceStart(); @weakify(self); [_performer performImmediatelyIfCurrentPerformer:^{ @strongify(self); SC_GUARD_ELSE_RETURN(self); // Since this is handled on a different thread, therefore, dispatch back to the queue we operated on. [self _didChangeAdjustingExposure:state.adjustingExposure withStrategy:kSCCameraExposureAdjustmentStrategyKVO]; }]; } #pragma mark - Private methods - (void)_didChangeAdjustingExposure:(BOOL)adjustingExposure withStrategy:(NSString *)strategy { if (!adjustingExposure && self->_shouldCapture) { SCLogCoreCameraInfo(@"Capturing after adjusting exposure using strategy: %@", strategy); [self _captureStillImageWithExposureAdjustmentStrategy:strategy]; self->_shouldCapture = NO; } } - (void)_deadlineCaptureStillImage { SCTraceStart(); // Use the SCManagedCapturer's private queue. [_performer perform:^{ if (_shouldCapture) { [self _captureStillImageWithExposureAdjustmentStrategy:kSCCameraExposureAdjustmentStrategyDeadline]; _shouldCapture = NO; } } after:SCCameraTweaksExposureDeadline()]; } - (void)_captureStillImageWithExposureAdjustmentStrategy:(NSString *)strategy { SCTraceStart(); [[SCLogger sharedInstance] logCameraExposureAdjustmentDelayEndWithStrategy:strategy]; if (_shouldCaptureFromVideo) { [self captureStillImageFromVideoBuffer]; return; } SCAssert(_stillImageOutput, @"stillImageOutput shouldn't be nil"); #pragma clang diagnostic push #pragma clang diagnostic ignored "-Wdeprecated-declarations" AVCaptureStillImageOutput *stillImageOutput = _stillImageOutput; #pragma clang diagnostic pop AVCaptureConnection *captureConnection = [self _captureConnectionFromStillImageOutput:stillImageOutput]; SCManagedCapturerState *state = [_state copy]; dispatch_block_t legacyStillImageCaptureBlock = ^{ SCCAssertMainThread(); // If the application is not in background, and we have still image connection, do thecapture. Otherwise fail. if ([UIApplication sharedApplication].applicationState == UIApplicationStateBackground) { [_performer performImmediatelyIfCurrentPerformer:^{ sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = _completionHandler; _completionHandler = nil; completionHandler(nil, nil, [NSError errorWithDomain:kSCManagedStillImageCapturerErrorDomain code:kSCManagedStillImageCapturerApplicationStateBackground userInfo:nil]); }]; return; } #if !TARGET_IPHONE_SIMULATOR if (!captureConnection) { [_performer performImmediatelyIfCurrentPerformer:^{ sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = _completionHandler; _completionHandler = nil; completionHandler(nil, nil, [NSError errorWithDomain:kSCManagedStillImageCapturerErrorDomain code:kSCManagedStillImageCapturerNoStillImageConnection userInfo:nil]); }]; return; } #endif // Select appropriate image capture method if ([_delegate managedStillImageCapturerShouldProcessFileInput:self]) { if (!_videoFileMethod) { _videoFileMethod = [[SCStillImageCaptureVideoInputMethod alloc] init]; } [[SCLogger sharedInstance] logStillImageCaptureApi:@"SCStillImageCapture"]; [[SCCoreCameraLogger sharedInstance] logCameraCreationDelaySplitPointStillImageCaptureApi:@"SCStillImageCapture"]; [_videoFileMethod captureStillImageWithCapturerState:state successBlock:^(NSData *imageData, NSDictionary *cameraInfo, NSError *error) { [self _legacyStillImageCaptureDidSucceedWithImageData:imageData sampleBuffer:nil cameraInfo:cameraInfo error:error]; } failureBlock:^(NSError *error) { [self _legacyStillImageCaptureDidFailWithError:error]; }]; } else { #pragma clang diagnostic push #pragma clang diagnostic ignored "-Wdeprecated-declarations" if (stillImageOutput.isLensStabilizationDuringBracketedCaptureSupported && !state.flashActive) { [self _captureStabilizedStillImageWithStillImageOutput:stillImageOutput captureConnection:captureConnection capturerState:state]; } else { [self _captureStillImageWithStillImageOutput:stillImageOutput captureConnection:captureConnection capturerState:state]; } #pragma clang diagnostic pop } }; // We need to call this on main thread and blocking. [[SCQueuePerformer mainQueuePerformer] performAndWait:legacyStillImageCaptureBlock]; } #pragma clang diagnostic push #pragma clang diagnostic ignored "-Wdeprecated-declarations" - (void)_captureStillImageWithStillImageOutput:(AVCaptureStillImageOutput *)stillImageOutput captureConnection:(AVCaptureConnection *)captureConnection capturerState:(SCManagedCapturerState *)state { [[SCLogger sharedInstance] logStillImageCaptureApi:@"AVStillImageCaptureAsynchronous"]; [[SCCoreCameraLogger sharedInstance] logCameraCreationDelaySplitPointStillImageCaptureApi:@"AVStillImageCaptureAsynchronous"]; @try { [stillImageOutput captureStillImageAsynchronouslyFromConnection:captureConnection completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) { if (imageDataSampleBuffer) { NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer]; [self _legacyStillImageCaptureDidSucceedWithImageData:imageData sampleBuffer: imageDataSampleBuffer cameraInfo: cameraInfoForBuffer( imageDataSampleBuffer) error:error]; } else { if (error.domain == AVFoundationErrorDomain && error.code == -11800) { // iOS 7 "unknown error"; works if we retry [self _legacyStillImageCaptureWillRetryWithError:error]; } else { [self _legacyStillImageCaptureDidFailWithError:error]; } } }]; } @catch (NSException *e) { [SCCrashLogger logHandledException:e]; [self _legacyStillImageCaptureDidFailWithError: [NSError errorWithDomain:kSCLegacyStillImageCaptureDefaultMethodErrorDomain code:kSCLegacyStillImageCaptureDefaultMethodErrorEncounteredException userInfo:@{ @"exception" : e }]]; } } - (void)_captureStabilizedStillImageWithStillImageOutput:(AVCaptureStillImageOutput *)stillImageOutput captureConnection:(AVCaptureConnection *)captureConnection capturerState:(SCManagedCapturerState *)state { [[SCLogger sharedInstance] logStillImageCaptureApi:@"AVStillImageOutputCaptureBracketAsynchronously"]; [[SCCoreCameraLogger sharedInstance] logCameraCreationDelaySplitPointStillImageCaptureApi:@"AVStillImageOutputCaptureBracketAsynchronously"]; NSArray *bracketArray = [self _bracketSettingsArray:captureConnection]; @try { [stillImageOutput captureStillImageBracketAsynchronouslyFromConnection:captureConnection withSettingsArray:bracketArray completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, AVCaptureBracketedStillImageSettings *settings, NSError *err) { if (!imageDataSampleBuffer) { [self _legacyStillImageCaptureDidFailWithError:err]; return; } NSData *jpegData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer]; [self _legacyStillImageCaptureDidSucceedWithImageData:jpegData sampleBuffer: imageDataSampleBuffer cameraInfo: cameraInfoForBuffer( imageDataSampleBuffer) error:nil]; }]; } @catch (NSException *e) { [SCCrashLogger logHandledException:e]; [self _legacyStillImageCaptureDidFailWithError: [NSError errorWithDomain:kSCLegacyStillImageCaptureLensStabilizationMethodErrorDomain code:kSCLegacyStillImageCaptureLensStabilizationMethodErrorEncounteredException userInfo:@{ @"exception" : e }]]; } } #pragma clang diagnostic pop - (NSArray *)_bracketSettingsArray:(AVCaptureConnection *)stillImageConnection { NSInteger const stillCount = 1; NSMutableArray *bracketSettingsArray = [NSMutableArray arrayWithCapacity:stillCount]; AVCaptureDevice *device = [stillImageConnection inputDevice]; AVCaptureManualExposureBracketedStillImageSettings *settings = [AVCaptureManualExposureBracketedStillImageSettings manualExposureSettingsWithExposureDuration:device.exposureDuration ISO:AVCaptureISOCurrent]; for (NSInteger i = 0; i < stillCount; i++) { [bracketSettingsArray addObject:settings]; } return [bracketSettingsArray copy]; } - (void)_legacyStillImageCaptureDidSucceedWithImageData:(NSData *)imageData sampleBuffer:(CMSampleBufferRef)sampleBuffer cameraInfo:(NSDictionary *)cameraInfo error:(NSError *)error { [[SCLogger sharedInstance] logPreCaptureOperationFinishedAt:CACurrentMediaTime()]; [[SCCoreCameraLogger sharedInstance] logCameraCreationDelaySplitPointPreCaptureOperationFinishedAt:CACurrentMediaTime()]; if (sampleBuffer) { CFRetain(sampleBuffer); } [_performer performImmediatelyIfCurrentPerformer:^{ UIImage *fullScreenImage = [self imageFromData:imageData currentZoomFactor:_zoomFactor targetAspectRatio:_aspectRatio fieldOfView:_fieldOfView state:_state sampleBuffer:sampleBuffer]; sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = _completionHandler; _completionHandler = nil; completionHandler(fullScreenImage, cameraInfo, error); if (sampleBuffer) { CFRelease(sampleBuffer); } }]; } - (void)_legacyStillImageCaptureDidFailWithError:(NSError *)error { [_performer performImmediatelyIfCurrentPerformer:^{ sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = _completionHandler; _completionHandler = nil; completionHandler(nil, nil, error); }]; } - (void)_legacyStillImageCaptureWillRetryWithError:(NSError *)error { if (_retries-- > 0) { [_performer perform:^{ [self _captureStillImageWithExposureAdjustmentStrategy:kSCCameraExposureAdjustmentStrategyNo]; } after:kSCCameraRetryInterval]; } else { [self _legacyStillImageCaptureDidFailWithError:error]; } } #pragma clang diagnostic push #pragma clang diagnostic ignored "-Wdeprecated-declarations" - (AVCaptureConnection *)_captureConnectionFromStillImageOutput:(AVCaptureStillImageOutput *)stillImageOutput #pragma clang diagnostic pop { SCTraceStart(); SCAssert([_performer isCurrentPerformer], @""); NSArray *connections = [stillImageOutput.connections copy]; for (AVCaptureConnection *connection in connections) { for (AVCaptureInputPort *port in [connection inputPorts]) { if ([[port mediaType] isEqual:AVMediaTypeVideo]) { return connection; } } } return nil; } @end ================================================ FILE: ManagedCapturer/SCManagedPhotoCapturer.h ================================================ // // SCManagedPhotoCapturer.h // Snapchat // // Created by Chao Pang on 10/5/16. // Copyright © 2016 Snapchat, Inc. All rights reserved. // #import "SCManagedStillImageCapturer.h" @interface SCManagedPhotoCapturer : SCManagedStillImageCapturer @end ================================================ FILE: ManagedCapturer/SCManagedPhotoCapturer.m ================================================ // // SCManagedPhotoCapturer.m // Snapchat // // Created by Chao Pang on 10/5/16. // Copyright © 2016 Snapchat, Inc. All rights reserved. // #import "SCManagedPhotoCapturer.h" #import "AVCaptureConnection+InputDevice.h" #import "SCCameraTweaks.h" #import "SCLogger+Camera.h" #import "SCManagedCapturer.h" #import "SCManagedFrameHealthChecker.h" #import "SCManagedStillImageCapturer_Protected.h" #import "SCStillImageCaptureVideoInputMethod.h" #import "SCStillImageDepthBlurFilter.h" #import #import #import #import #import #import #import #import #import #import @import ImageIO; static NSString *const kSCManagedPhotoCapturerErrorDomain = @"kSCManagedPhotoCapturerErrorDomain"; static NSInteger const kSCManagedPhotoCapturerErrorEncounteredException = 10000; static NSInteger const kSCManagedPhotoCapturerInconsistentStatus = 10001; typedef NS_ENUM(NSUInteger, SCManagedPhotoCapturerStatus) { SCManagedPhotoCapturerStatusPrepareToCapture, SCManagedPhotoCapturerStatusWillCapture, SCManagedPhotoCapturerStatusDidFinishProcess, }; @interface SCManagedPhotoCapturer () @end @implementation SCManagedPhotoCapturer { AVCapturePhotoOutput *_photoOutput; BOOL _shouldCapture; BOOL _shouldEnableHRSI; BOOL _portraitModeCaptureEnabled; NSUInteger _retries; CGPoint _portraitModePointOfInterest; SCStillImageDepthBlurFilter *_depthBlurFilter; sc_managed_still_image_capturer_capture_still_image_completion_handler_t _callbackBlock; SCStillImageCaptureVideoInputMethod *_videoFileMethod; SCManagedPhotoCapturerStatus _status; } - (instancetype)initWithSession:(AVCaptureSession *)session performer:(id)performer lensProcessingCore:(id)lensProcessingCore delegate:(id)delegate { SCTraceStart(); self = [super initWithSession:session performer:performer lensProcessingCore:lensProcessingCore delegate:delegate]; if (self) { [self setupWithSession:session]; _portraitModePointOfInterest = CGPointMake(0.5, 0.5); } return self; } - (void)setupWithSession:(AVCaptureSession *)session { SCTraceStart(); _photoOutput = [[AVCapturePhotoOutput alloc] init]; _photoOutput.highResolutionCaptureEnabled = YES; [self setAsOutput:session]; } - (void)setAsOutput:(AVCaptureSession *)session { SCTraceStart(); if ([session canAddOutput:_photoOutput]) { [session addOutput:_photoOutput]; } } - (void)setHighResolutionStillImageOutputEnabled:(BOOL)highResolutionStillImageOutputEnabled { SCTraceStart(); SCAssert([_performer isCurrentPerformer], @""); // Here we cannot directly set _photoOutput.highResolutionCaptureEnabled, since it will cause // black frame blink when enabling lenses. Instead, we enable HRSI in AVCapturePhotoSettings. // https://ph.sc-corp.net/T96228 _shouldEnableHRSI = highResolutionStillImageOutputEnabled; } - (void)enableStillImageStabilization { // The lens stabilization is enabled when configure AVCapturePhotoSettings // instead of AVCapturePhotoOutput SCTraceStart(); } - (void)setPortraitModeCaptureEnabled:(BOOL)enabled { _portraitModeCaptureEnabled = enabled; if (@available(ios 11.0, *)) { _photoOutput.depthDataDeliveryEnabled = enabled; } if (enabled && _depthBlurFilter == nil) { _depthBlurFilter = [[SCStillImageDepthBlurFilter alloc] init]; } } - (void)setPortraitModePointOfInterest:(CGPoint)pointOfInterest { _portraitModePointOfInterest = pointOfInterest; } - (void)removeAsOutput:(AVCaptureSession *)session { SCTraceStart(); [session removeOutput:_photoOutput]; } - (void)captureStillImageWithAspectRatio:(CGFloat)aspectRatio atZoomFactor:(float)zoomFactor fieldOfView:(float)fieldOfView state:(SCManagedCapturerState *)state captureSessionID:(NSString *)captureSessionID shouldCaptureFromVideo:(BOOL)shouldCaptureFromVideo completionHandler: (sc_managed_still_image_capturer_capture_still_image_completion_handler_t)completionHandler { SCTraceStart(); SCAssert(completionHandler, @"completionHandler shouldn't be nil"); SCAssert([_performer isCurrentPerformer], @""); _retries = 6; // AVFoundation Unknown Error usually resolves itself within 0.5 seconds _aspectRatio = aspectRatio; _zoomFactor = zoomFactor; _fieldOfView = fieldOfView; _state = state; _captureSessionID = captureSessionID; _shouldCaptureFromVideo = shouldCaptureFromVideo; SCAssert(!_completionHandler, @"We shouldn't have a _completionHandler at this point otherwise we are destroying " @"current completion handler."); // The purpose of these lines is to attach a strong reference to self to the completion handler. // This is because AVCapturePhotoOutput does not hold a strong reference to its delegate, which acts as a completion // handler. // If self is deallocated during the call to _photoOuptut capturePhotoWithSettings:delegate:, which may happen if // any AVFoundationError occurs, // then it's callback method, captureOutput:didFinish..., will not be called, and the completion handler will be // forgotten. // This comes with a risk of a memory leak. If for whatever reason the completion handler field is never used and // then unset, // then we have a permanent retain cycle. _callbackBlock = completionHandler; __typeof(self) strongSelf = self; _completionHandler = ^(UIImage *fullScreenImage, NSDictionary *metadata, NSError *error) { strongSelf->_callbackBlock(fullScreenImage, metadata, error); strongSelf->_callbackBlock = nil; }; [[SCLogger sharedInstance] logCameraExposureAdjustmentDelayStart]; if (!_adjustingExposureManualDetect) { SCLogCoreCameraInfo(@"Capturing still image now"); [self _capturePhotoWithExposureAdjustmentStrategy:kSCCameraExposureAdjustmentStrategyNo]; _shouldCapture = NO; } else { SCLogCoreCameraInfo(@"Wait adjusting exposure (or after 0.4 seconds) and then capture still image"); _shouldCapture = YES; [self _deadlineCapturePhoto]; } } #pragma mark - SCManagedDeviceCapacityAnalyzerListener - (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer didChangeAdjustingExposure:(BOOL)adjustingExposure { SCTraceStart(); @weakify(self); [_performer performImmediatelyIfCurrentPerformer:^{ @strongify(self); SC_GUARD_ELSE_RETURN(self); // Since this is handled on a different thread, therefore, dispatch back to the queue we operated on. self->_adjustingExposureManualDetect = adjustingExposure; [self _didChangeAdjustingExposure:adjustingExposure withStrategy:kSCCameraExposureAdjustmentStrategyManualDetect]; }]; } - (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer didChangeLightingCondition:(SCCapturerLightingConditionType)lightingCondition { SCTraceStart(); @weakify(self); [_performer performImmediatelyIfCurrentPerformer:^{ @strongify(self); SC_GUARD_ELSE_RETURN(self); self->_lightingConditionType = lightingCondition; }]; } #pragma mark - SCManagedCapturerListener - (void)managedCapturer:(id)managedCapturer didChangeAdjustingExposure:(SCManagedCapturerState *)state { SCTraceStart(); @weakify(self); [_performer performImmediatelyIfCurrentPerformer:^{ @strongify(self); SC_GUARD_ELSE_RETURN(self); // Since this is handled on a different thread, therefore, dispatch back to the queue we operated on. [self _didChangeAdjustingExposure:state.adjustingExposure withStrategy:kSCCameraExposureAdjustmentStrategyKVO]; }]; } #pragma mark - AVCapturePhotoCaptureDelegate - (void)captureOutput:(AVCapturePhotoOutput *)captureOutput didFinishProcessingPhotoSampleBuffer:(CMSampleBufferRef)photoSampleBuffer previewPhotoSampleBuffer:(CMSampleBufferRef)previewPhotoSampleBuffer resolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings bracketSettings:(AVCaptureBracketedStillImageSettings *)bracketSettings error:(NSError *)error { SCTraceStart(); if (photoSampleBuffer) { CFRetain(photoSampleBuffer); } @weakify(self); [_performer performImmediatelyIfCurrentPerformer:^{ SCTraceStart(); @strongify(self); SC_GUARD_ELSE_RETURN(self); SC_GUARD_ELSE_RUN_AND_RETURN(photoSampleBuffer, [self _photoCaptureDidFailWithError:error]); if (self->_status == SCManagedPhotoCapturerStatusWillCapture) { NSData *imageData = [AVCapturePhotoOutput JPEGPhotoDataRepresentationForJPEGSampleBuffer:photoSampleBuffer previewPhotoSampleBuffer:nil]; [[SCLogger sharedInstance] updateLogTimedEvent:kSCCameraMetricsRecordingDelay uniqueId:@"IMAGE" splitPoint:@"DID_FINISH_PROCESSING"]; [self _capturePhotoFinishedWithImageData:imageData sampleBuffer:photoSampleBuffer cameraInfo:cameraInfoForBuffer(photoSampleBuffer) error:error]; } else { SCLogCoreCameraInfo(@"DidFinishProcessingPhoto with unexpected status: %@", [self _photoCapturerStatusToString:self->_status]); [self _photoCaptureDidFailWithError:[NSError errorWithDomain:kSCManagedPhotoCapturerErrorDomain code:kSCManagedPhotoCapturerInconsistentStatus userInfo:nil]]; } CFRelease(photoSampleBuffer); }]; } - (void)captureOutput:(AVCapturePhotoOutput *)output didFinishProcessingPhoto:(nonnull AVCapturePhoto *)photo error:(nullable NSError *)error NS_AVAILABLE_IOS(11_0) { SCTraceStart(); @weakify(self); [_performer performImmediatelyIfCurrentPerformer:^{ SCTraceStart(); @strongify(self); SC_GUARD_ELSE_RETURN(self); NSData *imageData = [photo fileDataRepresentation]; SC_GUARD_ELSE_RUN_AND_RETURN(imageData, [self _photoCaptureDidFailWithError:error]); if (self->_status == SCManagedPhotoCapturerStatusWillCapture) { if (@available(ios 11.0, *)) { if (_portraitModeCaptureEnabled) { RenderData renderData = { .depthDataMap = photo.depthData.depthDataMap, .depthBlurPointOfInterest = &_portraitModePointOfInterest, }; imageData = [_depthBlurFilter renderWithPhotoData:imageData renderData:renderData]; } } [[SCLogger sharedInstance] updateLogTimedEvent:kSCCameraMetricsRecordingDelay uniqueId:@"IMAGE" splitPoint:@"DID_FINISH_PROCESSING"]; [self _capturePhotoFinishedWithImageData:imageData metadata:photo.metadata error:error]; } else { SCLogCoreCameraInfo(@"DidFinishProcessingPhoto with unexpected status: %@", [self _photoCapturerStatusToString:self->_status]); [self _photoCaptureDidFailWithError:[NSError errorWithDomain:kSCManagedPhotoCapturerErrorDomain code:kSCManagedPhotoCapturerInconsistentStatus userInfo:nil]]; } }]; } - (void)captureOutput:(AVCapturePhotoOutput *)captureOutput willBeginCaptureForResolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings { SCTraceStart(); @weakify(self); [_performer performImmediatelyIfCurrentPerformer:^{ SCTraceStart(); @strongify(self); SC_GUARD_ELSE_RETURN(self); if ([self->_delegate respondsToSelector:@selector(managedStillImageCapturerWillCapturePhoto:)]) { if (self->_status == SCManagedPhotoCapturerStatusPrepareToCapture) { self->_status = SCManagedPhotoCapturerStatusWillCapture; [[SCLogger sharedInstance] updateLogTimedEvent:kSCCameraMetricsRecordingDelay uniqueId:@"IMAGE" splitPoint:@"WILL_BEGIN_CAPTURE"]; [self->_delegate managedStillImageCapturerWillCapturePhoto:self]; } else { SCLogCoreCameraInfo(@"WillBeginCapture with unexpected status: %@", [self _photoCapturerStatusToString:self->_status]); } } }]; } - (void)captureOutput:(AVCapturePhotoOutput *)captureOutput didCapturePhotoForResolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings { SCTraceStart(); @weakify(self); [_performer performImmediatelyIfCurrentPerformer:^{ SCTraceStart(); @strongify(self); SC_GUARD_ELSE_RETURN(self); if ([self->_delegate respondsToSelector:@selector(managedStillImageCapturerDidCapturePhoto:)]) { if (self->_status == SCManagedPhotoCapturerStatusWillCapture || self->_status == SCManagedPhotoCapturerStatusDidFinishProcess) { [[SCLogger sharedInstance] updateLogTimedEvent:kSCCameraMetricsRecordingDelay uniqueId:@"IMAGE" splitPoint:@"DID_CAPTURE_PHOTO"]; [self->_delegate managedStillImageCapturerDidCapturePhoto:self]; } else { SCLogCoreCameraInfo(@"DidCapturePhoto with unexpected status: %@", [self _photoCapturerStatusToString:self->_status]); } } }]; } #pragma mark - Private methods - (void)_didChangeAdjustingExposure:(BOOL)adjustingExposure withStrategy:(NSString *)strategy { if (!adjustingExposure && self->_shouldCapture) { SCLogCoreCameraInfo(@"Capturing after adjusting exposure using strategy: %@", strategy); [self _capturePhotoWithExposureAdjustmentStrategy:strategy]; self->_shouldCapture = NO; } } - (void)_capturePhotoFinishedWithImageData:(NSData *)imageData sampleBuffer:(CMSampleBufferRef)sampleBuffer cameraInfo:(NSDictionary *)cameraInfo error:(NSError *)error { [self _photoCaptureDidSucceedWithImageData:imageData sampleBuffer:sampleBuffer cameraInfo:cameraInfoForBuffer(sampleBuffer) error:error]; self->_status = SCManagedPhotoCapturerStatusDidFinishProcess; } - (void)_capturePhotoFinishedWithImageData:(NSData *)imageData metadata:(NSDictionary *)metadata error:(NSError *)error { [self _photoCaptureDidSucceedWithImageData:imageData metadata:metadata error:error]; self->_status = SCManagedPhotoCapturerStatusDidFinishProcess; } - (void)_deadlineCapturePhoto { SCTraceStart(); // Use the SCManagedCapturer's private queue. @weakify(self); [_performer perform:^{ @strongify(self); SC_GUARD_ELSE_RETURN(self); if (self->_shouldCapture) { [self _capturePhotoWithExposureAdjustmentStrategy:kSCCameraExposureAdjustmentStrategyDeadline]; self->_shouldCapture = NO; } } after:SCCameraTweaksExposureDeadline()]; } - (void)_capturePhotoWithExposureAdjustmentStrategy:(NSString *)strategy { SCTraceStart(); [[SCLogger sharedInstance] logCameraExposureAdjustmentDelayEndWithStrategy:strategy]; if (_shouldCaptureFromVideo) { [self captureStillImageFromVideoBuffer]; return; } SCAssert([_performer isCurrentPerformer], @""); SCAssert(_photoOutput, @"_photoOutput shouldn't be nil"); _status = SCManagedPhotoCapturerStatusPrepareToCapture; AVCapturePhotoOutput *photoOutput = _photoOutput; AVCaptureConnection *captureConnection = [self _captureConnectionFromPhotoOutput:photoOutput]; SCManagedCapturerState *state = [_state copy]; #if !TARGET_IPHONE_SIMULATOR if (!captureConnection) { sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = _completionHandler; _completionHandler = nil; completionHandler(nil, nil, [NSError errorWithDomain:kSCManagedStillImageCapturerErrorDomain code:kSCManagedStillImageCapturerNoStillImageConnection userInfo:nil]); } #endif AVCapturePhotoSettings *photoSettings = [self _photoSettingsWithPhotoOutput:photoOutput captureConnection:captureConnection captureState:state]; // Select appropriate image capture method if ([_delegate managedStillImageCapturerShouldProcessFileInput:self]) { if (!_videoFileMethod) { _videoFileMethod = [[SCStillImageCaptureVideoInputMethod alloc] init]; } [[SCLogger sharedInstance] logStillImageCaptureApi:@"SCStillImageCaptureVideoFileInput"]; [[SCCoreCameraLogger sharedInstance] logCameraCreationDelaySplitPointStillImageCaptureApi:@"SCStillImageCaptureVideoFileInput"]; [_delegate managedStillImageCapturerWillCapturePhoto:self]; [_videoFileMethod captureStillImageWithCapturerState:state successBlock:^(NSData *imageData, NSDictionary *cameraInfo, NSError *error) { [_performer performImmediatelyIfCurrentPerformer:^{ [self _photoCaptureDidSucceedWithImageData:imageData sampleBuffer:nil cameraInfo:cameraInfo error:error]; }]; } failureBlock:^(NSError *error) { [_performer performImmediatelyIfCurrentPerformer:^{ [self _photoCaptureDidFailWithError:error]; }]; }]; } else { [[SCLogger sharedInstance] logStillImageCaptureApi:@"AVCapturePhoto"]; [[SCCoreCameraLogger sharedInstance] logCameraCreationDelaySplitPointStillImageCaptureApi:@"AVCapturePhoto"]; @try { [photoOutput capturePhotoWithSettings:photoSettings delegate:self]; } @catch (NSException *e) { [SCCrashLogger logHandledException:e]; [self _photoCaptureDidFailWithError:[NSError errorWithDomain:kSCManagedPhotoCapturerErrorDomain code:kSCManagedPhotoCapturerErrorEncounteredException userInfo:@{ @"exception" : e }]]; } } } - (void)_photoCaptureDidSucceedWithImageData:(NSData *)imageData sampleBuffer:(CMSampleBufferRef)sampleBuffer cameraInfo:(NSDictionary *)cameraInfo error:(NSError *)error { SCTraceStart(); SCAssert([_performer isCurrentPerformer], @""); [[SCLogger sharedInstance] logPreCaptureOperationFinishedAt:CACurrentMediaTime()]; [[SCCoreCameraLogger sharedInstance] logCameraCreationDelaySplitPointPreCaptureOperationFinishedAt:CACurrentMediaTime()]; UIImage *fullScreenImage = [self imageFromData:imageData currentZoomFactor:_zoomFactor targetAspectRatio:_aspectRatio fieldOfView:_fieldOfView state:_state sampleBuffer:sampleBuffer]; [[SCLogger sharedInstance] updateLogTimedEvent:kSCCameraMetricsRecordingDelay uniqueId:@"IMAGE" splitPoint:@"WILL_START_COMPLETION_HANDLER"]; sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = _completionHandler; _completionHandler = nil; if (completionHandler) { completionHandler(fullScreenImage, cameraInfo, error); } } - (void)_photoCaptureDidSucceedWithImageData:(NSData *)imageData metadata:(NSDictionary *)metadata error:(NSError *)error { SCTraceStart(); SCAssert([_performer isCurrentPerformer], @""); [[SCLogger sharedInstance] logPreCaptureOperationFinishedAt:CACurrentMediaTime()]; [[SCCoreCameraLogger sharedInstance] logCameraCreationDelaySplitPointPreCaptureOperationFinishedAt:CACurrentMediaTime()]; UIImage *fullScreenImage = [self imageFromData:imageData currentZoomFactor:_zoomFactor targetAspectRatio:_aspectRatio fieldOfView:_fieldOfView state:_state metadata:metadata]; [[SCLogger sharedInstance] updateLogTimedEvent:kSCCameraMetricsRecordingDelay uniqueId:@"IMAGE" splitPoint:@"WILL_START_COMPLETION_HANDLER"]; sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = _completionHandler; _completionHandler = nil; if (completionHandler) { completionHandler(fullScreenImage, metadata, error); } } - (void)_photoCaptureDidFailWithError:(NSError *)error { SCTraceStart(); SCAssert([_performer isCurrentPerformer], @""); sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = _completionHandler; _completionHandler = nil; if (completionHandler) { completionHandler(nil, nil, error); } } - (AVCaptureConnection *)_captureConnectionFromPhotoOutput:(AVCapturePhotoOutput *)photoOutput { SCTraceStart(); SCAssert([_performer isCurrentPerformer], @""); NSArray *connections = [photoOutput.connections copy]; for (AVCaptureConnection *connection in connections) { for (AVCaptureInputPort *port in [connection inputPorts]) { if ([[port mediaType] isEqual:AVMediaTypeVideo]) { return connection; } } } return nil; } - (AVCapturePhotoSettings *)_photoSettingsWithPhotoOutput:(AVCapturePhotoOutput *)photoOutput captureConnection:(AVCaptureConnection *)captureConnection captureState:(SCManagedCapturerState *)state { SCTraceStart(); if ([self _shouldUseBracketPhotoSettingsWithCaptureState:state]) { return [self _bracketPhotoSettingsWithPhotoOutput:photoOutput captureConnection:captureConnection captureState:state]; } else { return [self _defaultPhotoSettingsWithPhotoOutput:photoOutput captureState:state]; } } - (BOOL)_shouldUseBracketPhotoSettingsWithCaptureState:(SCManagedCapturerState *)state { // According to Apple docmentation, AVCapturePhotoBracketSettings do not support flashMode, // autoStillImageStabilizationEnabled, livePhotoMovieFileURL or livePhotoMovieMetadata. // Besides, we only use AVCapturePhotoBracketSettings if capture settings needs to be set manually. return !state.flashActive && !_portraitModeCaptureEnabled && (([SCManagedCaptureDevice isEnhancedNightModeSupported] && state.isNightModeActive) || [_delegate managedStillImageCapturerIsUnderDeviceMotion:self]); } - (AVCapturePhotoSettings *)_defaultPhotoSettingsWithPhotoOutput:(AVCapturePhotoOutput *)photoOutput captureState:(SCManagedCapturerState *)state { SCTraceStart(); // Specify the output file format AVCapturePhotoSettings *photoSettings = [AVCapturePhotoSettings photoSettingsWithFormat:@{AVVideoCodecKey : AVVideoCodecJPEG}]; // Enable HRSI if necessary if (photoSettings.isHighResolutionPhotoEnabled != _shouldEnableHRSI) { photoSettings.highResolutionPhotoEnabled = _shouldEnableHRSI; } // Turn on flash if active and supported by device if (state.flashActive && state.flashSupported) { photoSettings.flashMode = AVCaptureFlashModeOn; } // Turn on stabilization if available // Seems that setting autoStillImageStabilizationEnabled doesn't work during video capture session, // but we set enable it anyway as it is harmless. if (photoSettings.isAutoStillImageStabilizationEnabled) { photoSettings.autoStillImageStabilizationEnabled = YES; } if (_portraitModeCaptureEnabled) { if (@available(ios 11.0, *)) { photoSettings.depthDataDeliveryEnabled = YES; } } return photoSettings; } - (AVCapturePhotoSettings *)_bracketPhotoSettingsWithPhotoOutput:(AVCapturePhotoOutput *)photoOutput captureConnection:(AVCaptureConnection *)captureConnection captureState:(SCManagedCapturerState *)state { SCTraceStart(); OSType rawPixelFormatType = [photoOutput.availableRawPhotoPixelFormatTypes.firstObject unsignedIntValue]; NSArray *bracketedSettings = [self _bracketSettingsArray:captureConnection withCaptureState:state]; SCAssert(bracketedSettings.count <= photoOutput.maxBracketedCapturePhotoCount, @"Bracket photo count cannot exceed maximum count"); // Specify the output file format and raw pixel format AVCapturePhotoBracketSettings *photoSettings = [AVCapturePhotoBracketSettings photoBracketSettingsWithRawPixelFormatType:rawPixelFormatType processedFormat:@{ AVVideoCodecKey : AVVideoCodecJPEG } bracketedSettings:bracketedSettings]; // Enable HRSI if necessary if (photoSettings.isHighResolutionPhotoEnabled != _shouldEnableHRSI) { photoSettings.highResolutionPhotoEnabled = _shouldEnableHRSI; } // If lens stabilization is supportd, enable the stabilization when device is moving if (photoOutput.isLensStabilizationDuringBracketedCaptureSupported && !photoSettings.isLensStabilizationEnabled && [_delegate managedStillImageCapturerIsUnderDeviceMotion:self]) { photoSettings.lensStabilizationEnabled = YES; } return photoSettings; } - (NSArray *)_bracketSettingsArray:(AVCaptureConnection *)stillImageConnection withCaptureState:(SCManagedCapturerState *)state { NSInteger const stillCount = 1; NSMutableArray *bracketSettingsArray = [NSMutableArray arrayWithCapacity:stillCount]; AVCaptureDevice *device = [stillImageConnection inputDevice]; CMTime exposureDuration = device.exposureDuration; if ([SCManagedCaptureDevice isEnhancedNightModeSupported] && state.isNightModeActive) { exposureDuration = [self adjustedExposureDurationForNightModeWithCurrentExposureDuration:exposureDuration]; } AVCaptureBracketedStillImageSettings *settings = [AVCaptureManualExposureBracketedStillImageSettings manualExposureSettingsWithExposureDuration:exposureDuration ISO:AVCaptureISOCurrent]; for (NSInteger i = 0; i < stillCount; i++) { [bracketSettingsArray addObject:settings]; } return [bracketSettingsArray copy]; } - (NSString *)_photoCapturerStatusToString:(SCManagedPhotoCapturerStatus)status { switch (status) { case SCManagedPhotoCapturerStatusPrepareToCapture: return @"PhotoCapturerStatusPrepareToCapture"; case SCManagedPhotoCapturerStatusWillCapture: return @"PhotoCapturerStatusWillCapture"; case SCManagedPhotoCapturerStatusDidFinishProcess: return @"PhotoCapturerStatusDidFinishProcess"; } } @end ================================================ FILE: ManagedCapturer/SCManagedRecordedVideo.h ================================================ // ed265cb0c346ae35dce70d3fc12a0bd8deae0802 // Generated by the value-object.rb DO NOT EDIT!! #import #import #import @protocol SCManagedRecordedVideo @property (nonatomic, copy, readonly) NSURL *videoURL; @property (nonatomic, copy, readonly) NSURL *rawVideoDataFileURL; @property (nonatomic, copy, readonly) UIImage *placeholderImage; @property (nonatomic, assign, readonly) BOOL isFrontFacingCamera; @end @interface SCManagedRecordedVideo : NSObject @property (nonatomic, copy, readonly) NSURL *videoURL; @property (nonatomic, copy, readonly) NSURL *rawVideoDataFileURL; @property (nonatomic, copy, readonly) UIImage *placeholderImage; @property (nonatomic, assign, readonly) BOOL isFrontFacingCamera; - (instancetype)initWithVideoURL:(NSURL *)videoURL rawVideoDataFileURL:(NSURL *)rawVideoDataFileURL placeholderImage:(UIImage *)placeholderImage isFrontFacingCamera:(BOOL)isFrontFacingCamera; @end ================================================ FILE: ManagedCapturer/SCManagedRecordedVideo.m ================================================ // ed265cb0c346ae35dce70d3fc12a0bd8deae0802 // Generated by the value-object.rb DO NOT EDIT!! #import "SCManagedRecordedVideo.h" #import @implementation SCManagedRecordedVideo - (instancetype)initWithVideoURL:(NSURL *)videoURL rawVideoDataFileURL:(NSURL *)rawVideoDataFileURL placeholderImage:(UIImage *)placeholderImage isFrontFacingCamera:(BOOL)isFrontFacingCamera { self = [super init]; if (self) { _videoURL = [(NSObject *)videoURL copy]; _rawVideoDataFileURL = [(NSObject *)rawVideoDataFileURL copy]; _placeholderImage = [(NSObject *)placeholderImage copy]; _isFrontFacingCamera = isFrontFacingCamera; } return self; } #pragma mark - NSCopying - (instancetype)copyWithZone:(NSZone *)zone { // Immutable object, bypass copy return self; } #pragma mark - NSCoding - (instancetype)initWithCoder:(NSCoder *)aDecoder { self = [super init]; if (self) { _videoURL = [aDecoder decodeObjectForKey:@"videoURL"]; _rawVideoDataFileURL = [aDecoder decodeObjectForKey:@"rawVideoDataFileURL"]; _placeholderImage = [aDecoder decodeObjectForKey:@"placeholderImage"]; _isFrontFacingCamera = [aDecoder decodeBoolForKey:@"isFrontFacingCamera"]; } return self; } - (void)encodeWithCoder:(NSCoder *)aCoder { [aCoder encodeObject:_videoURL forKey:@"videoURL"]; [aCoder encodeObject:_rawVideoDataFileURL forKey:@"rawVideoDataFileURL"]; [aCoder encodeObject:_placeholderImage forKey:@"placeholderImage"]; [aCoder encodeBool:_isFrontFacingCamera forKey:@"isFrontFacingCamera"]; } #pragma mark - FasterCoding - (BOOL)preferFasterCoding { return YES; } - (void)encodeWithFasterCoder:(id)fasterCoder { [fasterCoder encodeBool:_isFrontFacingCamera]; [fasterCoder encodeObject:_placeholderImage]; [fasterCoder encodeObject:_rawVideoDataFileURL]; [fasterCoder encodeObject:_videoURL]; } - (void)decodeWithFasterDecoder:(id)fasterDecoder { _isFrontFacingCamera = (BOOL)[fasterDecoder decodeBool]; _placeholderImage = (UIImage *)[fasterDecoder decodeObject]; _rawVideoDataFileURL = (NSURL *)[fasterDecoder decodeObject]; _videoURL = (NSURL *)[fasterDecoder decodeObject]; } - (void)setObject:(id)val forUInt64Key:(uint64_t)key { switch (key) { case 50783861721184594ULL: _placeholderImage = (UIImage *)val; break; case 13152167848358790ULL: _rawVideoDataFileURL = (NSURL *)val; break; case 48945309622713334ULL: _videoURL = (NSURL *)val; break; } } - (void)setBool:(BOOL)val forUInt64Key:(uint64_t)key { switch (key) { case 11924284868025312ULL: _isFrontFacingCamera = (BOOL)val; break; } } + (uint64_t)fasterCodingVersion { return 17435789727352013688ULL; } + (uint64_t *)fasterCodingKeys { static uint64_t keys[] = { 4 /* Total */, FC_ENCODE_KEY_TYPE(11924284868025312, FCEncodeTypeBool), FC_ENCODE_KEY_TYPE(50783861721184594, FCEncodeTypeObject), FC_ENCODE_KEY_TYPE(13152167848358790, FCEncodeTypeObject), FC_ENCODE_KEY_TYPE(48945309622713334, FCEncodeTypeObject), }; return keys; } #pragma mark - isEqual - (BOOL)isEqual:(id)object { if (self == object) { return YES; } if (![object isMemberOfClass:[self class]]) { return NO; } SCManagedRecordedVideo *other = (SCManagedRecordedVideo *)object; if (other.videoURL != _videoURL && ![(NSObject *)other.videoURL isEqual:_videoURL]) { return NO; } if (other.rawVideoDataFileURL != _rawVideoDataFileURL && ![(NSObject *)other.rawVideoDataFileURL isEqual:_rawVideoDataFileURL]) { return NO; } if (other.placeholderImage != _placeholderImage && ![(NSObject *)other.placeholderImage isEqual:_placeholderImage]) { return NO; } if (other.isFrontFacingCamera != _isFrontFacingCamera) { return NO; } return YES; } - (NSUInteger)hash { NSUInteger subhashes[] = {[_videoURL hash], [_rawVideoDataFileURL hash], [_placeholderImage hash], (NSUInteger)_isFrontFacingCamera}; NSUInteger result = subhashes[0]; for (int i = 1; i < 4; i++) { unsigned long long base = (((unsigned long long)result) << 32 | subhashes[i]); base = (~base) + (base << 18); base ^= (base >> 31); base *= 21; base ^= (base >> 11); base += (base << 6); base ^= (base >> 22); result = (NSUInteger)base; } return result; } #pragma mark - Print description in console: lldb> po #{variable name} - (NSString *)description { NSMutableString *desc = [NSMutableString string]; [desc appendString:@"{\n"]; [desc appendFormat:@"\tvideoURL:%@\n", [_videoURL description]]; [desc appendFormat:@"\trawVideoDataFileURL:%@\n", [_rawVideoDataFileURL description]]; [desc appendFormat:@"\tplaceholderImage:%@\n", [_placeholderImage description]]; [desc appendFormat:@"\tisFrontFacingCamera:%@\n", [@(_isFrontFacingCamera) description]]; [desc appendString:@"}\n"]; return [desc copy]; } @end ================================================ FILE: ManagedCapturer/SCManagedRecordedVideo.value ================================================ interface SCManagedRecordedVideo NSURL *videoURL; NSURL *rawVideoDataFileURL; UIImage *placeholderImage; BOOL isFrontFacingCamera; end ================================================ FILE: ManagedCapturer/SCManagedStillImageCapturer.h ================================================ // // SCManagedStillImageCapturer.h // Snapchat // // Created by Liu Liu on 4/30/15. // Copyright (c) 2015 Liu Liu. All rights reserved. // #import "SCCoreCameraLogger.h" #import "SCManagedCaptureDevice.h" #import "SCManagedCapturerListener.h" #import "SCManagedCapturerState.h" #import "SCManagedDeviceCapacityAnalyzerListener.h" #import #import #import #import SC_EXTERN_C_BEGIN extern BOOL SCPhotoCapturerIsEnabled(void); SC_EXTERN_C_END @protocol SCPerforming; @protocol SCManagedStillImageCapturerDelegate; @class SCCaptureResource; typedef void (^sc_managed_still_image_capturer_capture_still_image_completion_handler_t)(UIImage *fullScreenImage, NSDictionary *metadata, NSError *error); @interface SCManagedStillImageCapturer : NSObject { SCManagedCapturerState *_state; BOOL _shouldCaptureFromVideo; BOOL _captureImageFromVideoImmediately; CGFloat _aspectRatio; float _zoomFactor; float _fieldOfView; BOOL _adjustingExposureManualDetect; sc_managed_still_image_capturer_capture_still_image_completion_handler_t _completionHandler; } + (instancetype)capturerWithCaptureResource:(SCCaptureResource *)captureResource; SC_INIT_AND_NEW_UNAVAILABLE; @property (nonatomic, weak) id delegate; - (void)setupWithSession:(AVCaptureSession *)session; - (void)setAsOutput:(AVCaptureSession *)session; - (void)removeAsOutput:(AVCaptureSession *)session; - (void)setHighResolutionStillImageOutputEnabled:(BOOL)highResolutionStillImageOutputEnabled; - (void)setPortraitModeCaptureEnabled:(BOOL)enabled; - (void)setPortraitModePointOfInterest:(CGPoint)pointOfInterest; - (void)enableStillImageStabilization; - (void)captureStillImageWithAspectRatio:(CGFloat)aspectRatio atZoomFactor:(float)zoomFactor fieldOfView:(float)fieldOfView state:(SCManagedCapturerState *)state captureSessionID:(NSString *)captureSessionID shouldCaptureFromVideo:(BOOL)shouldCaptureFromVideo completionHandler: (sc_managed_still_image_capturer_capture_still_image_completion_handler_t)completionHandler; - (void)captureStillImageFromVideoBuffer; @end @protocol SCManagedStillImageCapturerDelegate - (BOOL)managedStillImageCapturerIsUnderDeviceMotion:(SCManagedStillImageCapturer *)managedStillImageCapturer; - (BOOL)managedStillImageCapturerShouldProcessFileInput:(SCManagedStillImageCapturer *)managedStillImageCapturer; @optional - (void)managedStillImageCapturerWillCapturePhoto:(SCManagedStillImageCapturer *)managedStillImageCapturer; - (void)managedStillImageCapturerDidCapturePhoto:(SCManagedStillImageCapturer *)managedStillImageCapturer; @end ================================================ FILE: ManagedCapturer/SCManagedStillImageCapturer.mm ================================================ // // SCManagedStillImageCapturer.m // Snapchat // // Created by Liu Liu on 4/30/15. // Copyright (c) 2015 Liu Liu. All rights reserved. // #import "SCManagedStillImageCapturer.h" #import "SCCameraSettingUtils.h" #import "SCCameraTweaks.h" #import "SCCaptureResource.h" #import "SCLogger+Camera.h" #import "SCManagedCaptureSession.h" #import "SCManagedCapturer.h" #import "SCManagedCapturerLensAPI.h" #import "SCManagedFrameHealthChecker.h" #import "SCManagedLegacyStillImageCapturer.h" #import "SCManagedPhotoCapturer.h" #import "SCManagedStillImageCapturerHandler.h" #import "SCManagedStillImageCapturer_Protected.h" #import #import #import #import #import #import #import #import #import #import NSString *const kSCManagedStillImageCapturerErrorDomain = @"kSCManagedStillImageCapturerErrorDomain"; NSInteger const kSCCameraShutterSoundID = 1108; #if !TARGET_IPHONE_SIMULATOR NSInteger const kSCManagedStillImageCapturerNoStillImageConnection = 1101; #endif NSInteger const kSCManagedStillImageCapturerApplicationStateBackground = 1102; // We will do the image capture regardless if these is still camera adjustment in progress after 0.4 seconds. NSTimeInterval const kSCManagedStillImageCapturerDeadline = 0.4; NSTimeInterval const kSCCameraRetryInterval = 0.1; BOOL SCPhotoCapturerIsEnabled(void) { // Due to the native crash in https://jira.sc-corp.net/browse/CCAM-4904, we guard it >= 10.2 return SC_AT_LEAST_IOS_10_2; } NSDictionary *cameraInfoForBuffer(CMSampleBufferRef imageDataSampleBuffer) { CFDictionaryRef exifAttachments = (CFDictionaryRef)CMGetAttachment(imageDataSampleBuffer, kCGImagePropertyExifDictionary, NULL); float brightness = [retrieveBrightnessFromEXIFAttachments(exifAttachments) floatValue]; NSInteger ISOSpeedRating = [retrieveISOSpeedRatingFromEXIFAttachments(exifAttachments) integerValue]; return @{ (__bridge NSString *) kCGImagePropertyExifISOSpeedRatings : @(ISOSpeedRating), (__bridge NSString *) kCGImagePropertyExifBrightnessValue : @(brightness) }; } @implementation SCManagedStillImageCapturer + (instancetype)capturerWithCaptureResource:(SCCaptureResource *)captureResource { if (SCPhotoCapturerIsEnabled()) { return [[SCManagedPhotoCapturer alloc] initWithSession:captureResource.managedSession.avSession performer:captureResource.queuePerformer lensProcessingCore:captureResource.lensProcessingCore delegate:captureResource.stillImageCapturerHandler]; } else { return [[SCManagedLegacyStillImageCapturer alloc] initWithSession:captureResource.managedSession.avSession performer:captureResource.queuePerformer lensProcessingCore:captureResource.lensProcessingCore delegate:captureResource.stillImageCapturerHandler]; } } - (instancetype)initWithSession:(AVCaptureSession *)session performer:(id)performer lensProcessingCore:(id)lensAPI delegate:(id)delegate { self = [super init]; if (self) { _session = session; _performer = performer; _lensAPI = lensAPI; _delegate = delegate; } return self; } - (void)setupWithSession:(AVCaptureSession *)session { UNIMPLEMENTED_METHOD; } - (void)setAsOutput:(AVCaptureSession *)session { UNIMPLEMENTED_METHOD; } - (void)setHighResolutionStillImageOutputEnabled:(BOOL)highResolutionStillImageOutputEnabled { UNIMPLEMENTED_METHOD; } - (void)enableStillImageStabilization { UNIMPLEMENTED_METHOD; } - (void)removeAsOutput:(AVCaptureSession *)session { UNIMPLEMENTED_METHOD; } - (void)setPortraitModeCaptureEnabled:(BOOL)enabled { UNIMPLEMENTED_METHOD; } - (void)setPortraitModePointOfInterest:(CGPoint)pointOfInterest { UNIMPLEMENTED_METHOD; } - (void)captureStillImageWithAspectRatio:(CGFloat)aspectRatio atZoomFactor:(float)zoomFactor fieldOfView:(float)fieldOfView state:(SCManagedCapturerState *)state captureSessionID:(NSString *)captureSessionID shouldCaptureFromVideo:(BOOL)shouldCaptureFromVideo completionHandler: (sc_managed_still_image_capturer_capture_still_image_completion_handler_t)completionHandler { UNIMPLEMENTED_METHOD; } #pragma mark - SCManagedDeviceCapacityAnalyzerListener - (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer didChangeAdjustingExposure:(BOOL)adjustingExposure { UNIMPLEMENTED_METHOD; } - (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer didChangeLightingCondition:(SCCapturerLightingConditionType)lightingCondition { UNIMPLEMENTED_METHOD; } #pragma mark - SCManagedCapturerListener - (void)managedCapturer:(id)managedCapturer didChangeAdjustingExposure:(SCManagedCapturerState *)state { UNIMPLEMENTED_METHOD; } - (UIImage *)imageFromData:(NSData *)data currentZoomFactor:(float)currentZoomFactor targetAspectRatio:(CGFloat)targetAspectRatio fieldOfView:(float)fieldOfView state:(SCManagedCapturerState *)state sampleBuffer:(CMSampleBufferRef)sampleBuffer { UIImage *capturedImage = [self imageFromImage:[UIImage sc_imageWithData:data] currentZoomFactor:currentZoomFactor targetAspectRatio:targetAspectRatio fieldOfView:fieldOfView state:state]; // Check capture frame health before showing preview NSDictionary *metadata = [[SCManagedFrameHealthChecker sharedInstance] metadataForSampleBuffer:sampleBuffer photoCapturerEnabled:SCPhotoCapturerIsEnabled() lensEnabled:state.lensesActive lensID:[_lensAPI activeLensId]]; [[SCManagedFrameHealthChecker sharedInstance] checkImageHealthForCaptureFrameImage:capturedImage captureSettings:metadata captureSessionID:_captureSessionID]; _captureSessionID = nil; return capturedImage; } - (UIImage *)imageFromData:(NSData *)data currentZoomFactor:(float)currentZoomFactor targetAspectRatio:(CGFloat)targetAspectRatio fieldOfView:(float)fieldOfView state:(SCManagedCapturerState *)state metadata:(NSDictionary *)metadata { UIImage *capturedImage = [self imageFromImage:[UIImage sc_imageWithData:data] currentZoomFactor:currentZoomFactor targetAspectRatio:targetAspectRatio fieldOfView:fieldOfView state:state]; // Check capture frame health before showing preview NSDictionary *newMetadata = [[SCManagedFrameHealthChecker sharedInstance] metadataForMetadata:metadata photoCapturerEnabled:SCPhotoCapturerIsEnabled() lensEnabled:state.lensesActive lensID:[_lensAPI activeLensId]]; [[SCManagedFrameHealthChecker sharedInstance] checkImageHealthForCaptureFrameImage:capturedImage captureSettings:newMetadata captureSessionID:_captureSessionID]; _captureSessionID = nil; return capturedImage; } - (UIImage *)imageFromImage:(UIImage *)image currentZoomFactor:(float)currentZoomFactor targetAspectRatio:(CGFloat)targetAspectRatio fieldOfView:(float)fieldOfView state:(SCManagedCapturerState *)state { UIImage *fullScreenImage = image; if (state.lensesActive && _lensAPI.isLensApplied) { fullScreenImage = [_lensAPI processImage:fullScreenImage maxPixelSize:[_lensAPI maxPixelSize] devicePosition:state.devicePosition fieldOfView:fieldOfView]; } // Resize and crop return [self resizeImage:fullScreenImage currentZoomFactor:currentZoomFactor targetAspectRatio:targetAspectRatio]; } - (UIImage *)resizeImage:(UIImage *)image currentZoomFactor:(float)currentZoomFactor targetAspectRatio:(CGFloat)targetAspectRatio { SCTraceStart(); if (currentZoomFactor == 1) { return SCCropImageToTargetAspectRatio(image, targetAspectRatio); } else { @autoreleasepool { return [self resizeImageUsingCG:image currentZoomFactor:currentZoomFactor targetAspectRatio:targetAspectRatio maxPixelSize:[_lensAPI maxPixelSize]]; } } } - (UIImage *)resizeImageUsingCG:(UIImage *)inputImage currentZoomFactor:(float)currentZoomFactor targetAspectRatio:(CGFloat)targetAspectRatio maxPixelSize:(CGFloat)maxPixelSize { size_t imageWidth = CGImageGetWidth(inputImage.CGImage); size_t imageHeight = CGImageGetHeight(inputImage.CGImage); SCLogGeneralInfo(@"Captured still image at %dx%d", (int)imageWidth, (int)imageHeight); size_t targetWidth, targetHeight; float zoomFactor = currentZoomFactor; if (imageWidth > imageHeight) { targetWidth = maxPixelSize; targetHeight = (maxPixelSize * imageHeight + imageWidth / 2) / imageWidth; // Update zoom factor here zoomFactor *= (float)maxPixelSize / imageWidth; } else { targetHeight = maxPixelSize; targetWidth = (maxPixelSize * imageWidth + imageHeight / 2) / imageHeight; zoomFactor *= (float)maxPixelSize / imageHeight; } if (targetAspectRatio != kSCManagedCapturerAspectRatioUnspecified) { SCCropImageSizeToAspectRatio(targetWidth, targetHeight, inputImage.imageOrientation, targetAspectRatio, &targetWidth, &targetHeight); } CGContextRef context = CGBitmapContextCreate(NULL, targetWidth, targetHeight, CGImageGetBitsPerComponent(inputImage.CGImage), CGImageGetBitsPerPixel(inputImage.CGImage) * targetWidth / 8, CGImageGetColorSpace(inputImage.CGImage), CGImageGetBitmapInfo(inputImage.CGImage)); CGContextSetInterpolationQuality(context, kCGInterpolationHigh); CGContextDrawImage(context, CGRectMake(targetWidth * 0.5 - imageWidth * 0.5 * zoomFactor, targetHeight * 0.5 - imageHeight * 0.5 * zoomFactor, imageWidth * zoomFactor, imageHeight * zoomFactor), inputImage.CGImage); CGImageRef thumbnail = CGBitmapContextCreateImage(context); CGContextRelease(context); UIImage *image = [UIImage imageWithCGImage:thumbnail scale:inputImage.scale orientation:inputImage.imageOrientation]; CGImageRelease(thumbnail); return image; } - (CMTime)adjustedExposureDurationForNightModeWithCurrentExposureDuration:(CMTime)exposureDuration { CMTime adjustedExposureDuration = exposureDuration; if (_lightingConditionType == SCCapturerLightingConditionTypeDark) { adjustedExposureDuration = CMTimeMultiplyByFloat64(exposureDuration, 1.5); } else if (_lightingConditionType == SCCapturerLightingConditionTypeExtremeDark) { adjustedExposureDuration = CMTimeMultiplyByFloat64(exposureDuration, 2.5); } return adjustedExposureDuration; } #pragma mark - SCManagedVideoDataSourceListener - (void)managedVideoDataSource:(id)managedVideoDataSource didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer devicePosition:(SCManagedCaptureDevicePosition)devicePosition { SCTraceStart(); SC_GUARD_ELSE_RETURN(_captureImageFromVideoImmediately); _captureImageFromVideoImmediately = NO; @weakify(self); CFRetain(sampleBuffer); [_performer performImmediatelyIfCurrentPerformer:^{ SCTraceStart(); @strongify(self); SC_GUARD_ELSE_RETURN(self); [self _didCapturePhotoFromVideoBuffer]; UIImageOrientation orientation = devicePosition == SCManagedCaptureDevicePositionBack ? UIImageOrientationRight : UIImageOrientationLeftMirrored; UIImage *videoImage = [UIImage imageWithPixelBufferRef:CMSampleBufferGetImageBuffer(sampleBuffer) backingType:UIImageBackingTypeCGImage orientation:orientation context:[CIContext contextWithOptions:nil]]; UIImage *fullScreenImage = [self imageFromImage:videoImage currentZoomFactor:_zoomFactor targetAspectRatio:_aspectRatio fieldOfView:_fieldOfView state:_state]; NSMutableDictionary *cameraInfo = [cameraInfoForBuffer(sampleBuffer) mutableCopy]; cameraInfo[@"capture_image_from_video_buffer"] = @"enabled"; [self _didFinishProcessingFromVideoBufferWithImage:fullScreenImage cameraInfo:cameraInfo]; CFRelease(sampleBuffer); }]; } - (void)_willBeginCapturePhotoFromVideoBuffer { SCTraceStart(); @weakify(self); [_performer performImmediatelyIfCurrentPerformer:^{ SCTraceStart(); @strongify(self); SC_GUARD_ELSE_RETURN(self); if ([self->_delegate respondsToSelector:@selector(managedStillImageCapturerWillCapturePhoto:)]) { [self->_delegate managedStillImageCapturerWillCapturePhoto:self]; } }]; } - (void)_didCapturePhotoFromVideoBuffer { SCTraceStart(); @weakify(self); [_performer performImmediatelyIfCurrentPerformer:^{ SCTraceStart(); @strongify(self); SC_GUARD_ELSE_RETURN(self); if ([self->_delegate respondsToSelector:@selector(managedStillImageCapturerDidCapturePhoto:)]) { [self->_delegate managedStillImageCapturerDidCapturePhoto:self]; } }]; } - (void)_didFinishProcessingFromVideoBufferWithImage:(UIImage *)image cameraInfo:(NSDictionary *)cameraInfo { SCTraceStart(); @weakify(self); [_performer performImmediatelyIfCurrentPerformer:^{ SCTraceStart(); @strongify(self); SC_GUARD_ELSE_RETURN(self); [[SCLogger sharedInstance] logPreCaptureOperationFinishedAt:CACurrentMediaTime()]; [[SCCoreCameraLogger sharedInstance] logCameraCreationDelaySplitPointPreCaptureOperationFinishedAt:CACurrentMediaTime()]; sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = _completionHandler; _completionHandler = nil; if (completionHandler) { completionHandler(image, cameraInfo, nil); } }]; } - (void)captureStillImageFromVideoBuffer { SCTraceStart(); @weakify(self); [_performer performImmediatelyIfCurrentPerformer:^{ SCTraceStart(); @strongify(self); SC_GUARD_ELSE_RETURN(self); AudioServicesPlaySystemSoundWithCompletion(kSCCameraShutterSoundID, nil); [self _willBeginCapturePhotoFromVideoBuffer]; self->_captureImageFromVideoImmediately = YES; }]; } @end ================================================ FILE: ManagedCapturer/SCManagedStillImageCapturerHandler.h ================================================ // // SCManagedStillImageCapturerHandler.h // Snapchat // // Created by Jingtian Yang on 11/12/2017. // #import "SCManagedStillImageCapturer.h" #import @class SCCaptureResource; @protocol SCDeviceMotionProvider , SCFileInputDecider; @interface SCManagedStillImageCapturerHandler : NSObject SC_INIT_AND_NEW_UNAVAILABLE - (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource; @end ================================================ FILE: ManagedCapturer/SCManagedStillImageCapturerHandler.m ================================================ // // SCManagedStillImageCapturerHandler.m // Snapchat // // Created by Jingtian Yang on 11/12/2017. // #import "SCManagedStillImageCapturerHandler.h" #import "SCCaptureResource.h" #import "SCManagedCaptureDevice+SCManagedCapturer.h" #import "SCManagedCapturer.h" #import "SCManagedCapturerLogging.h" #import "SCManagedCapturerSampleMetadata.h" #import "SCManagedCapturerState.h" #import #import #import #import @interface SCManagedStillImageCapturerHandler () { __weak SCCaptureResource *_captureResource; } @end @implementation SCManagedStillImageCapturerHandler - (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource { self = [super init]; if (self) { SCAssert(captureResource, @""); _captureResource = captureResource; } return self; } - (void)managedStillImageCapturerWillCapturePhoto:(SCManagedStillImageCapturer *)managedStillImageCapturer { SCTraceODPCompatibleStart(2); SCLogCapturerInfo(@"Will capture photo. stillImageCapturer:%@", _captureResource.stillImageCapturer); [_captureResource.queuePerformer performImmediatelyIfCurrentPerformer:^{ SCTraceStart(); if (_captureResource.stillImageCapturer) { SCManagedCapturerState *state = [_captureResource.state copy]; SCManagedCapturerSampleMetadata *sampleMetadata = [[SCManagedCapturerSampleMetadata alloc] initWithPresentationTimestamp:kCMTimeZero fieldOfView:_captureResource.device.fieldOfView]; runOnMainThreadAsynchronously(^{ [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] willCapturePhoto:state sampleMetadata:sampleMetadata]; }); } }]; } - (void)managedStillImageCapturerDidCapturePhoto:(SCManagedStillImageCapturer *)managedStillImageCapturer { SCTraceODPCompatibleStart(2); SCLogCapturerInfo(@"Did capture photo. stillImageCapturer:%@", _captureResource.stillImageCapturer); [_captureResource.queuePerformer performImmediatelyIfCurrentPerformer:^{ SCTraceStart(); if (_captureResource.stillImageCapturer) { SCManagedCapturerState *state = [_captureResource.state copy]; runOnMainThreadAsynchronously(^{ [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didCapturePhoto:state]; }); } }]; } - (BOOL)managedStillImageCapturerIsUnderDeviceMotion:(SCManagedStillImageCapturer *)managedStillImageCapturer { return _captureResource.deviceMotionProvider.isUnderDeviceMotion; } - (BOOL)managedStillImageCapturerShouldProcessFileInput:(SCManagedStillImageCapturer *)managedStillImageCapturer { return _captureResource.fileInputDecider.shouldProcessFileInput; } @end ================================================ FILE: ManagedCapturer/SCManagedStillImageCapturer_Protected.h ================================================ // // SCManagedStillImageCapturer_Protected.h // Snapchat // // Created by Chao Pang on 10/4/16. // Copyright © 2016 Snapchat, Inc. All rights reserved. // SC_EXTERN_C_BEGIN extern NSDictionary *cameraInfoForBuffer(CMSampleBufferRef imageDataSampleBuffer); SC_EXTERN_C_END extern NSString *const kSCManagedStillImageCapturerErrorDomain; #if !TARGET_IPHONE_SIMULATOR extern NSInteger const kSCManagedStillImageCapturerNoStillImageConnection; #endif extern NSInteger const kSCManagedStillImageCapturerApplicationStateBackground; // We will do the image capture regardless if these is still camera adjustment in progress after 0.4 seconds. extern NSTimeInterval const kSCManagedStillImageCapturerDeadline; extern NSTimeInterval const kSCCameraRetryInterval; @protocol SCManagedCapturerLensAPI; @interface SCManagedStillImageCapturer () { @protected id _lensAPI; id _performer; AVCaptureSession *_session; id __weak _delegate; NSString *_captureSessionID; SCCapturerLightingConditionType _lightingConditionType; } - (instancetype)initWithSession:(AVCaptureSession *)session performer:(id)performer lensProcessingCore:(id)lensProcessingCore delegate:(id)delegate; - (UIImage *)imageFromData:(NSData *)data currentZoomFactor:(float)currentZoomFactor targetAspectRatio:(CGFloat)targetAspectRatio fieldOfView:(float)fieldOfView state:(SCManagedCapturerState *)state sampleBuffer:(CMSampleBufferRef)sampleBuffer; - (UIImage *)imageFromData:(NSData *)data currentZoomFactor:(float)currentZoomFactor targetAspectRatio:(CGFloat)targetAspectRatio fieldOfView:(float)fieldOfView state:(SCManagedCapturerState *)state metadata:(NSDictionary *)metadata; - (UIImage *)imageFromImage:(UIImage *)image currentZoomFactor:(float)currentZoomFactor targetAspectRatio:(CGFloat)targetAspectRatio fieldOfView:(float)fieldOfView state:(SCManagedCapturerState *)state; - (CMTime)adjustedExposureDurationForNightModeWithCurrentExposureDuration:(CMTime)exposureDuration; @end ================================================ FILE: ManagedCapturer/SCManagedVideoARDataSource.h ================================================ // // SCManagedVideoARDataSource.h // Snapchat // // Created by Eyal Segal on 20/10/2017. // #import "SCCapturerDefines.h" #import #import @protocol SCManagedVideoARDataSource @property (atomic, strong) ARFrame *currentFrame NS_AVAILABLE_IOS(11_0); #ifdef SC_USE_ARKIT_FACE @property (atomic, strong) AVDepthData *lastDepthData NS_AVAILABLE_IOS(11_0); #endif @property (atomic, assign) float fieldOfView NS_AVAILABLE_IOS(11_0); @end ================================================ FILE: ManagedCapturer/SCManagedVideoCapturer.h ================================================ // // SCManagedVideoCapturer.h // Snapchat // // Created by Liu Liu on 5/1/15. // Copyright (c) 2015 Liu Liu. All rights reserved. // #import "SCManagedRecordedVideo.h" #import "SCManagedVideoCapturerOutputSettings.h" #import "SCVideoCaptureSessionInfo.h" #import #import #import #import #import typedef void (^sc_managed_video_capturer_recording_completion_handler_t)(NSURL *fileURL, NSError *error); @class SCManagedVideoCapturer, SCTimedTask; @protocol SCManagedVideoCapturerDelegate // All these calbacks are invoked on a private queue for video recording channels - (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer didBeginVideoRecording:(SCVideoCaptureSessionInfo)sessionInfo; - (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer didBeginAudioRecording:(SCVideoCaptureSessionInfo)sessionInfo; - (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer willStopWithRecordedVideoFuture:(SCFuture> *)videoProviderFuture videoSize:(CGSize)videoSize placeholderImage:(UIImage *)placeholderImage session:(SCVideoCaptureSessionInfo)sessionInfo; - (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer didSucceedWithRecordedVideo:(SCManagedRecordedVideo *)recordedVideo session:(SCVideoCaptureSessionInfo)sessionInfo; - (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer didFailWithError:(NSError *)error session:(SCVideoCaptureSessionInfo)sessionInfo; - (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer didCancelVideoRecording:(SCVideoCaptureSessionInfo)sessionInfo; - (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer didGetError:(NSError *)error forType:(SCManagedVideoCapturerInfoType)type session:(SCVideoCaptureSessionInfo)sessionInfo; - (NSDictionary *)managedVideoCapturerGetExtraFrameHealthInfo:(SCManagedVideoCapturer *)managedVideoCapturer; - (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer didAppendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer presentationTimestamp:(CMTime)presentationTimestamp; @end /** * AVFoundation backed class that writes frames to an output file. SCManagedVideoCapturer * uses SCManagedVideoCapturerOutputSettings to determine output settings. If no output * settings are passed in (nil) SCManagedVideoCapturer will fall back on default settings. */ @interface SCManagedVideoCapturer : NSObject /** * Return the output URL that passed into beginRecordingToURL method */ @property (nonatomic, copy, readonly) NSURL *outputURL; @property (nonatomic, weak) id delegate; @property (nonatomic, readonly) SCVideoCaptureSessionInfo activeSession; @property (nonatomic, assign, readonly) CMTime firstWrittenAudioBufferDelay; @property (nonatomic, assign, readonly) BOOL audioQueueStarted; - (instancetype)initWithQueuePerformer:(SCQueuePerformer *)queuePerformer; - (void)prepareForRecordingWithAudioConfiguration:(SCAudioConfiguration *)configuration; - (SCVideoCaptureSessionInfo)startRecordingAsynchronouslyWithOutputSettings: (SCManagedVideoCapturerOutputSettings *)outputSettings audioConfiguration:(SCAudioConfiguration *)audioConfiguration maxDuration:(NSTimeInterval)maxDuration toURL:(NSURL *)URL deviceFormat:(AVCaptureDeviceFormat *)deviceFormat orientation:(AVCaptureVideoOrientation)videoOrientation captureSessionID:(NSString *)captureSessionID; - (void)stopRecordingAsynchronously; - (void)cancelRecordingAsynchronously; // Schedule a task to run, it is thread safe. - (void)addTimedTask:(SCTimedTask *)task; // Clear all tasks, it is thread safe. - (void)clearTimedTasks; @end ================================================ FILE: ManagedCapturer/SCManagedVideoCapturer.m ================================================ // // SCManagedVideoCapturer.m // Snapchat // // Created by Liu Liu on 5/1/15. // Copyright (c) 2015 Liu Liu. All rights reserved. // #import "SCManagedVideoCapturer.h" #import "NSURL+Asset.h" #import "SCAudioCaptureSession.h" #import "SCCameraTweaks.h" #import "SCCapturerBufferedVideoWriter.h" #import "SCCoreCameraLogger.h" #import "SCLogger+Camera.h" #import "SCManagedCapturer.h" #import "SCManagedFrameHealthChecker.h" #import "SCManagedVideoCapturerLogger.h" #import "SCManagedVideoCapturerTimeObserver.h" #import #import #import #import #import #import #import #import #import #import #import #import #import #import #import #import #import #import #import @import CoreMedia; @import ImageIO; static NSString *const kSCAudioCaptureAudioSessionLabel = @"CAMERA"; // wild card audio queue error code static NSInteger const kSCAudioQueueErrorWildCard = -50; // kAudioHardwareIllegalOperationError, it means hardware failure static NSInteger const kSCAudioQueueErrorHardware = 1852797029; typedef NS_ENUM(NSUInteger, SCManagedVideoCapturerStatus) { SCManagedVideoCapturerStatusUnknown, SCManagedVideoCapturerStatusIdle, SCManagedVideoCapturerStatusPrepareToRecord, SCManagedVideoCapturerStatusReadyForRecording, SCManagedVideoCapturerStatusRecording, SCManagedVideoCapturerStatusError, }; #define SCLogVideoCapturerInfo(fmt, ...) SCLogCoreCameraInfo(@"[SCManagedVideoCapturer] " fmt, ##__VA_ARGS__) #define SCLogVideoCapturerWarning(fmt, ...) SCLogCoreCameraWarning(@"[SCManagedVideoCapturer] " fmt, ##__VA_ARGS__) #define SCLogVideoCapturerError(fmt, ...) SCLogCoreCameraError(@"[SCManagedVideoCapturer] " fmt, ##__VA_ARGS__) @interface SCManagedVideoCapturer () // This value has to be atomic because it is read on a different thread (write // on output queue, as always) @property (atomic, assign, readwrite) SCManagedVideoCapturerStatus status; @property (nonatomic, assign) CMTime firstWrittenAudioBufferDelay; @end static char *const kSCManagedVideoCapturerQueueLabel = "com.snapchat.managed-video-capturer-queue"; static char *const kSCManagedVideoCapturerPromiseQueueLabel = "com.snapchat.video-capture-promise"; static NSString *const kSCManagedVideoCapturerErrorDomain = @"kSCManagedVideoCapturerErrorDomain"; static NSInteger const kSCManagedVideoCapturerCannotAddAudioVideoInput = 1001; static NSInteger const kSCManagedVideoCapturerEmptyFrame = 1002; static NSInteger const kSCManagedVideoCapturerStopBeforeStart = 1003; static NSInteger const kSCManagedVideoCapturerStopWithoutStart = 1004; static NSInteger const kSCManagedVideoCapturerZeroVideoSize = -111; static NSUInteger const kSCVideoContentComplexitySamplingRate = 90; // This is the maximum time we will wait for the Recording Capturer pipeline to drain // When video stabilization is turned on the extra frame delay is around 20 frames. // @30 fps this is 0.66 seconds static NSTimeInterval const kSCManagedVideoCapturerStopRecordingDeadline = 1.0; static const char *SCPlaceholderImageGenerationQueueLabel = "com.snapchat.video-capturer-placeholder-queue"; static const char *SCVideoRecordingPreparationQueueLabel = "com.snapchat.video-recording-preparation-queue"; static dispatch_queue_t SCPlaceholderImageGenerationQueue(void) { static dispatch_queue_t queue; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ queue = dispatch_queue_create(SCPlaceholderImageGenerationQueueLabel, DISPATCH_QUEUE_SERIAL); }); return queue; } @interface SCManagedVideoCapturer () @end @implementation SCManagedVideoCapturer { NSTimeInterval _maxDuration; NSTimeInterval _recordStartTime; SCCapturerBufferedVideoWriter *_videoWriter; BOOL _hasWritten; SCQueuePerformer *_performer; SCQueuePerformer *_videoPreparationPerformer; SCAudioCaptureSession *_audioCaptureSession; NSError *_lastError; UIImage *_placeholderImage; // For logging purpose BOOL _isVideoSnap; NSDictionary *_videoOutputSettings; // The following value is used to control the encoder shutdown following a stop recording message. // When a shutdown is requested this value will be the timestamp of the last captured frame. CFTimeInterval _stopTime; NSInteger _stopSession; SCAudioConfigurationToken *_preparedAudioConfiguration; SCAudioConfigurationToken *_audioConfiguration; dispatch_semaphore_t _startRecordingSemaphore; // For store the raw frame datas NSInteger _rawDataFrameNum; NSURL *_rawDataURL; SCVideoFrameRawDataCollector *_videoFrameRawDataCollector; CMTime _startSessionTime; // Indicates how actual processing time of first frame. Also used for camera timer animation start offset. NSTimeInterval _startSessionRealTime; CMTime _endSessionTime; sc_managed_capturer_recording_session_t _sessionId; SCManagedVideoCapturerTimeObserver *_timeObserver; SCManagedVideoCapturerLogger *_capturerLogger; CGSize _outputSize; BOOL _isFrontFacingCamera; SCPromise> *_recordedVideoPromise; SCManagedAudioDataSourceListenerAnnouncer *_announcer; NSString *_captureSessionID; CIContext *_ciContext; } @synthesize performer = _performer; - (instancetype)init { SCTraceStart(); return [self initWithQueuePerformer:[[SCQueuePerformer alloc] initWithLabel:kSCManagedVideoCapturerQueueLabel qualityOfService:QOS_CLASS_USER_INTERACTIVE queueType:DISPATCH_QUEUE_SERIAL context:SCQueuePerformerContextCamera]]; } - (instancetype)initWithQueuePerformer:(SCQueuePerformer *)queuePerformer { SCTraceStart(); self = [super init]; if (self) { _performer = queuePerformer; _audioCaptureSession = [[SCAudioCaptureSession alloc] init]; _audioCaptureSession.delegate = self; _announcer = [SCManagedAudioDataSourceListenerAnnouncer new]; self.status = SCManagedVideoCapturerStatusIdle; _capturerLogger = [[SCManagedVideoCapturerLogger alloc] init]; _startRecordingSemaphore = dispatch_semaphore_create(0); } return self; } - (void)dealloc { SCLogVideoCapturerInfo(@"SCVideoCaptureSessionInfo before dealloc: %@", SCVideoCaptureSessionInfoGetDebugDescription(self.activeSession)); } - (SCVideoCaptureSessionInfo)activeSession { return SCVideoCaptureSessionInfoMake(_startSessionTime, _endSessionTime, _sessionId); } - (CGSize)defaultSizeForDeviceFormat:(AVCaptureDeviceFormat *)format { SCTraceStart(); // if there is no device, and no format if (format == nil) { // hard code 720p return CGSizeMake(kSCManagedCapturerDefaultVideoActiveFormatWidth, kSCManagedCapturerDefaultVideoActiveFormatHeight); } CMVideoDimensions videoDimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription); CGSize size = CGSizeMake(videoDimensions.width, videoDimensions.height); if (videoDimensions.width > kSCManagedCapturerDefaultVideoActiveFormatWidth && videoDimensions.height > kSCManagedCapturerDefaultVideoActiveFormatHeight) { CGFloat scaleFactor = MAX((kSCManagedCapturerDefaultVideoActiveFormatWidth / videoDimensions.width), (kSCManagedCapturerDefaultVideoActiveFormatHeight / videoDimensions.height)); size = SCSizeMakeAlignTo(SCSizeApplyScale(size, scaleFactor), 2); } if ([SCDeviceName isIphoneX]) { size = SCSizeApplyScale(size, kSCIPhoneXCapturedImageVideoCropRatio); } return size; } - (CGSize)cropSize:(CGSize)size toAspectRatio:(CGFloat)aspectRatio { if (aspectRatio == kSCManagedCapturerAspectRatioUnspecified) { return size; } // video input is always in landscape mode aspectRatio = 1.0 / aspectRatio; if (size.width > size.height * aspectRatio) { size.width = size.height * aspectRatio; } else { size.height = size.width / aspectRatio; } return CGSizeMake(roundf(size.width / 2) * 2, roundf(size.height / 2) * 2); } - (SCManagedVideoCapturerOutputSettings *)defaultRecordingOutputSettingsWithDeviceFormat: (AVCaptureDeviceFormat *)deviceFormat { SCTraceStart(); CGFloat aspectRatio = SCManagedCapturedImageAndVideoAspectRatio(); CGSize outputSize = [self defaultSizeForDeviceFormat:deviceFormat]; outputSize = [self cropSize:outputSize toAspectRatio:aspectRatio]; // [TODO](Chao): remove the dependency of SCManagedVideoCapturer on SnapVideoMetaData NSInteger videoBitRate = [SnapVideoMetadata averageTranscodingBitRate:outputSize isRecording:YES highQuality:YES duration:0 iFrameOnly:NO originalVideoBitRate:0 overlayImageFileSizeBits:0 videoPlaybackRate:1 isLagunaVideo:NO hasOverlayToBlend:NO sourceType:SCSnapVideoFilterSourceTypeUndefined]; SCTraceSignal(@"Setup transcoding video bitrate"); [_capturerLogger logStartingStep:kSCCapturerStartingStepTranscodeingVideoBitrate]; SCManagedVideoCapturerOutputSettings *outputSettings = [[SCManagedVideoCapturerOutputSettings alloc] initWithWidth:outputSize.width height:outputSize.height videoBitRate:videoBitRate audioBitRate:64000.0 keyFrameInterval:15 outputType:SCManagedVideoCapturerOutputTypeVideoSnap]; return outputSettings; } - (SCQueuePerformer *)_getVideoPreparationPerformer { SCAssert([_performer isCurrentPerformer], @"must run on _performer"); if (!_videoPreparationPerformer) { _videoPreparationPerformer = [[SCQueuePerformer alloc] initWithLabel:SCVideoRecordingPreparationQueueLabel qualityOfService:QOS_CLASS_USER_INTERACTIVE queueType:DISPATCH_QUEUE_SERIAL context:SCQueuePerformerContextCamera]; } return _videoPreparationPerformer; } - (void)prepareForRecordingWithAudioConfiguration:(SCAudioConfiguration *)configuration { SCTraceStart(); [_performer performImmediatelyIfCurrentPerformer:^{ SCTraceStart(); self.status = SCManagedVideoCapturerStatusPrepareToRecord; if (_audioConfiguration) { [SCAudioSessionExperimentAdapter relinquishConfiguration:_audioConfiguration performer:nil completion:nil]; } __block NSError *audioSessionError = nil; _preparedAudioConfiguration = _audioConfiguration = [SCAudioSessionExperimentAdapter configureWith:configuration performer:[self _getVideoPreparationPerformer] completion:^(NSError *error) { audioSessionError = error; if (self.status == SCManagedVideoCapturerStatusPrepareToRecord) { dispatch_semaphore_signal(_startRecordingSemaphore); } }]; // Wait until preparation for recording is done dispatch_semaphore_wait(_startRecordingSemaphore, DISPATCH_TIME_FOREVER); [_delegate managedVideoCapturer:self didGetError:audioSessionError forType:SCManagedVideoCapturerInfoAudioSessionError session:self.activeSession]; }]; } - (SCVideoCaptureSessionInfo)startRecordingAsynchronouslyWithOutputSettings: (SCManagedVideoCapturerOutputSettings *)outputSettings audioConfiguration:(SCAudioConfiguration *)audioConfiguration maxDuration:(NSTimeInterval)maxDuration toURL:(NSURL *)URL deviceFormat:(AVCaptureDeviceFormat *)deviceFormat orientation:(AVCaptureVideoOrientation)videoOrientation captureSessionID:(NSString *)captureSessionID { SCTraceStart(); _captureSessionID = [captureSessionID copy]; [_capturerLogger prepareForStartingLog]; [[SCLogger sharedInstance] logTimedEventStart:kSCCameraMetricsAudioDelay uniqueId:_captureSessionID isUniqueEvent:NO]; NSTimeInterval startTime = CACurrentMediaTime(); [[SCLogger sharedInstance] logPreCaptureOperationRequestedAt:startTime]; [[SCCoreCameraLogger sharedInstance] logCameraCreationDelaySplitPointPreCaptureOperationRequested]; _sessionId = arc4random(); // Set a invalid time so that we don't process videos when no frame available _startSessionTime = kCMTimeInvalid; _endSessionTime = kCMTimeInvalid; _firstWrittenAudioBufferDelay = kCMTimeInvalid; _audioQueueStarted = NO; SCLogVideoCapturerInfo(@"SCVideoCaptureSessionInfo at start of recording: %@", SCVideoCaptureSessionInfoGetDebugDescription(self.activeSession)); SCVideoCaptureSessionInfo sessionInfo = self.activeSession; [_performer performImmediatelyIfCurrentPerformer:^{ _maxDuration = maxDuration; dispatch_block_t startRecordingBlock = ^{ _rawDataFrameNum = 0; // Begin audio recording asynchronously, first, need to have correct audio session. SCTraceStart(); SCLogVideoCapturerInfo(@"Dequeue begin recording with audio session change delay: %lf seconds", CACurrentMediaTime() - startTime); if (self.status != SCManagedVideoCapturerStatusReadyForRecording) { SCLogVideoCapturerInfo(@"SCManagedVideoCapturer status: %lu", (unsigned long)self.status); // We may already released, but this should be OK. [SCAudioSessionExperimentAdapter relinquishConfiguration:_preparedAudioConfiguration performer:nil completion:nil]; return; } if (_preparedAudioConfiguration != _audioConfiguration) { SCLogVideoCapturerInfo( @"SCManagedVideoCapturer has mismatched audio session token, prepared: %@, have: %@", _preparedAudioConfiguration.token, _audioConfiguration.token); // We are on a different audio session token already. [SCAudioSessionExperimentAdapter relinquishConfiguration:_preparedAudioConfiguration performer:nil completion:nil]; return; } // Divide start recording workflow into different steps to log delay time. // And checkpoint is the end of a step [_capturerLogger logStartingStep:kSCCapturerStartingStepAudioSession]; [[SCLogger sharedInstance] logStepToEvent:kSCCameraMetricsAudioDelay uniqueId:_captureSessionID stepName:@"audio_session_start_end"]; SCLogVideoCapturerInfo(@"Prepare to begin recording"); _lastError = nil; // initialize stopTime to a number much larger than the CACurrentMediaTime() which is the time from Jan 1, // 2001 _stopTime = kCFAbsoluteTimeIntervalSince1970; // Restart everything _hasWritten = NO; SCManagedVideoCapturerOutputSettings *finalOutputSettings = outputSettings ? outputSettings : [self defaultRecordingOutputSettingsWithDeviceFormat:deviceFormat]; _isVideoSnap = finalOutputSettings.outputType == SCManagedVideoCapturerOutputTypeVideoSnap; _outputSize = CGSizeMake(finalOutputSettings.height, finalOutputSettings.width); [[SCLogger sharedInstance] logEvent:kSCCameraMetricsVideoRecordingStart parameters:@{ @"video_width" : @(finalOutputSettings.width), @"video_height" : @(finalOutputSettings.height), @"bit_rate" : @(finalOutputSettings.videoBitRate), @"is_video_snap" : @(_isVideoSnap), }]; _outputURL = [URL copy]; _rawDataURL = [_outputURL URLByAppendingPathExtension:@"dat"]; [_capturerLogger logStartingStep:kSCCapturerStartingStepOutputSettings]; // Make sure the raw frame data file is gone SCTraceSignal(@"Setup video frame raw data"); [[NSFileManager defaultManager] removeItemAtURL:_rawDataURL error:NULL]; if ([SnapVideoMetadata deviceMeetsRequirementsForContentAdaptiveVideoEncoding]) { if (!_videoFrameRawDataCollector) { _videoFrameRawDataCollector = [[SCVideoFrameRawDataCollector alloc] initWithPerformer:_performer]; } [_videoFrameRawDataCollector prepareForCollectingVideoFrameRawDataWithRawDataURL:_rawDataURL]; } [_capturerLogger logStartingStep:kSCCapturerStartingStepVideoFrameRawData]; SCLogVideoCapturerInfo(@"Prepare to begin audio recording"); [[SCLogger sharedInstance] logStepToEvent:kSCCameraMetricsAudioDelay uniqueId:_captureSessionID stepName:@"audio_queue_start_begin"]; [self _beginAudioQueueRecordingWithCompleteHandler:^(NSError *error) { [[SCLogger sharedInstance] logStepToEvent:kSCCameraMetricsAudioDelay uniqueId:_captureSessionID stepName:@"audio_queue_start_end"]; if (error) { [_delegate managedVideoCapturer:self didGetError:error forType:SCManagedVideoCapturerInfoAudioQueueError session:sessionInfo]; } else { _audioQueueStarted = YES; } if (self.status == SCManagedVideoCapturerStatusRecording) { [_delegate managedVideoCapturer:self didBeginAudioRecording:sessionInfo]; } }]; // Call this delegate first so that we have proper state transition from begin recording to finish / error [_delegate managedVideoCapturer:self didBeginVideoRecording:sessionInfo]; // We need to start with a fresh recording file, make sure it's gone [[NSFileManager defaultManager] removeItemAtURL:_outputURL error:NULL]; [_capturerLogger logStartingStep:kSCCapturerStartingStepAudioRecording]; SCTraceSignal(@"Setup asset writer"); NSError *error = nil; _videoWriter = [[SCCapturerBufferedVideoWriter alloc] initWithPerformer:_performer outputURL:self.outputURL delegate:self error:&error]; if (error) { self.status = SCManagedVideoCapturerStatusError; _lastError = error; _placeholderImage = nil; [_delegate managedVideoCapturer:self didGetError:error forType:SCManagedVideoCapturerInfoAssetWriterError session:sessionInfo]; [_delegate managedVideoCapturer:self didFailWithError:_lastError session:sessionInfo]; return; } [_capturerLogger logStartingStep:kSCCapturerStartingStepAssetWriterConfiguration]; if (![_videoWriter prepareWritingWithOutputSettings:finalOutputSettings]) { _lastError = [NSError errorWithDomain:kSCManagedVideoCapturerErrorDomain code:kSCManagedVideoCapturerCannotAddAudioVideoInput userInfo:nil]; _placeholderImage = nil; [_delegate managedVideoCapturer:self didFailWithError:_lastError session:sessionInfo]; return; } SCTraceSignal(@"Observe asset writer status change"); SCCAssert(_placeholderImage == nil, @"placeholderImage should be nil"); self.status = SCManagedVideoCapturerStatusRecording; // Only log the recording delay event from camera view (excluding video note recording) if (_isVideoSnap) { [[SCLogger sharedInstance] logTimedEventEnd:kSCCameraMetricsRecordingDelay uniqueId:@"VIDEO" parameters:@{ @"type" : @"video" }]; } _recordStartTime = CACurrentMediaTime(); }; [[SCLogger sharedInstance] logStepToEvent:kSCCameraMetricsAudioDelay uniqueId:_captureSessionID stepName:@"audio_session_start_begin"]; if (self.status == SCManagedVideoCapturerStatusPrepareToRecord) { self.status = SCManagedVideoCapturerStatusReadyForRecording; startRecordingBlock(); } else { self.status = SCManagedVideoCapturerStatusReadyForRecording; if (_audioConfiguration) { [SCAudioSessionExperimentAdapter relinquishConfiguration:_audioConfiguration performer:nil completion:nil]; } _preparedAudioConfiguration = _audioConfiguration = [SCAudioSessionExperimentAdapter configureWith:audioConfiguration performer:_performer completion:^(NSError *error) { if (error) { [_delegate managedVideoCapturer:self didGetError:error forType:SCManagedVideoCapturerInfoAudioSessionError session:sessionInfo]; } startRecordingBlock(); }]; } }]; return sessionInfo; } - (NSError *)_handleRetryBeginAudioRecordingErrorCode:(NSInteger)errorCode error:(NSError *)error micResult:(NSDictionary *)resultInfo { SCTraceStart(); NSString *resultStr = SC_CAST_TO_CLASS_OR_NIL(resultInfo[SCAudioSessionRetryDataSourceInfoKey], NSString); BOOL changeMicSuccess = [resultInfo[SCAudioSessionRetryDataSourceResultKey] boolValue]; if (!error) { SCManagedVideoCapturerInfoType type = SCManagedVideoCapturerInfoAudioQueueRetrySuccess; if (changeMicSuccess) { if (errorCode == kSCAudioQueueErrorWildCard) { type = SCManagedVideoCapturerInfoAudioQueueRetryDataSourceSuccess_audioQueue; } else if (errorCode == kSCAudioQueueErrorHardware) { type = SCManagedVideoCapturerInfoAudioQueueRetryDataSourceSuccess_hardware; } } [_delegate managedVideoCapturer:self didGetError:nil forType:type session:self.activeSession]; } else { error = [self _appendInfo:resultStr forInfoKey:@"retry_datasource_result" toError:error]; SCLogVideoCapturerError(@"Retry setting audio session failed with error:%@", error); } return error; } - (BOOL)_isBottomMicBrokenCode:(NSInteger)errorCode { // we consider both -50 and 1852797029 as a broken microphone case return (errorCode == kSCAudioQueueErrorWildCard || errorCode == kSCAudioQueueErrorHardware); } - (void)_beginAudioQueueRecordingWithCompleteHandler:(audio_capture_session_block)block { SCTraceStart(); SCAssert(block, @"block can not be nil"); @weakify(self); void (^beginAudioBlock)(NSError *error) = ^(NSError *error) { @strongify(self); SC_GUARD_ELSE_RETURN(self); [_performer performImmediatelyIfCurrentPerformer:^{ SCTraceStart(); NSInteger errorCode = error.code; if ([self _isBottomMicBrokenCode:errorCode] && (self.status == SCManagedVideoCapturerStatusReadyForRecording || self.status == SCManagedVideoCapturerStatusRecording)) { SCLogVideoCapturerError(@"Start to retry begin audio queue (error code: %@)", @(errorCode)); // use front microphone to retry NSDictionary *resultInfo = [[SCAudioSession sharedInstance] tryUseFrontMicWithErrorCode:errorCode]; [self _retryRequestRecordingWithCompleteHandler:^(NSError *error) { // then retry audio queue again [_audioCaptureSession beginAudioRecordingAsynchronouslyWithSampleRate:kSCAudioCaptureSessionDefaultSampleRate completionHandler:^(NSError *innerError) { NSError *modifyError = [self _handleRetryBeginAudioRecordingErrorCode:errorCode error:innerError micResult:resultInfo]; block(modifyError); }]; }]; } else { block(error); } }]; }; [_audioCaptureSession beginAudioRecordingAsynchronouslyWithSampleRate:kSCAudioCaptureSessionDefaultSampleRate completionHandler:^(NSError *error) { beginAudioBlock(error); }]; } // This method must not change nullability of error, it should only either append info into userInfo, // or return the NSError as it is. - (NSError *)_appendInfo:(NSString *)infoStr forInfoKey:(NSString *)infoKey toError:(NSError *)error { if (!error || infoStr.length == 0 || infoKey.length == 0 || error.domain.length == 0) { return error; } NSMutableDictionary *errorInfo = [[error userInfo] mutableCopy]; errorInfo[infoKey] = infoStr.length > 0 ? infoStr : @"(null)"; return [NSError errorWithDomain:error.domain code:error.code userInfo:errorInfo]; } - (void)_retryRequestRecordingWithCompleteHandler:(audio_capture_session_block)block { SCTraceStart(); if (_audioConfiguration) { [SCAudioSessionExperimentAdapter relinquishConfiguration:_audioConfiguration performer:nil completion:nil]; } SCVideoCaptureSessionInfo sessionInfo = self.activeSession; _preparedAudioConfiguration = _audioConfiguration = [SCAudioSessionExperimentAdapter configureWith:_audioConfiguration.configuration performer:_performer completion:^(NSError *error) { if (error) { [_delegate managedVideoCapturer:self didGetError:error forType:SCManagedVideoCapturerInfoAudioSessionError session:sessionInfo]; } if (block) { block(error); } }]; } #pragma SCCapturerBufferedVideoWriterDelegate - (void)videoWriterDidFailWritingWithError:(NSError *)error { // If it failed, we call the delegate method, release everything else we // have, well, on the output queue obviously SCTraceStart(); [_performer performImmediatelyIfCurrentPerformer:^{ SCTraceStart(); SCVideoCaptureSessionInfo sessionInfo = self.activeSession; [_outputURL reloadAssetKeys]; [self _cleanup]; [self _disposeAudioRecording]; self.status = SCManagedVideoCapturerStatusError; _lastError = error; _placeholderImage = nil; [_delegate managedVideoCapturer:self didGetError:error forType:SCManagedVideoCapturerInfoAssetWriterError session:sessionInfo]; [_delegate managedVideoCapturer:self didFailWithError:_lastError session:sessionInfo]; }]; } - (void)_willStopRecording { if (self.status == SCManagedVideoCapturerStatusRecording) { // To notify UI continue the preview processing SCQueuePerformer *promisePerformer = [[SCQueuePerformer alloc] initWithLabel:kSCManagedVideoCapturerPromiseQueueLabel qualityOfService:QOS_CLASS_USER_INTERACTIVE queueType:DISPATCH_QUEUE_SERIAL context:SCQueuePerformerContextCamera]; _recordedVideoPromise = [[SCPromise alloc] initWithPerformer:promisePerformer]; [_delegate managedVideoCapturer:self willStopWithRecordedVideoFuture:_recordedVideoPromise.future videoSize:_outputSize placeholderImage:_placeholderImage session:self.activeSession]; } } - (void)_stopRecording { SCTraceStart(); SCAssert([_performer isCurrentPerformer], @"Needs to be on the performing queue"); // Reset stop session as well as stop time. ++_stopSession; _stopTime = kCFAbsoluteTimeIntervalSince1970; SCPromise> *recordedVideoPromise = _recordedVideoPromise; _recordedVideoPromise = nil; sc_managed_capturer_recording_session_t sessionId = _sessionId; if (self.status == SCManagedVideoCapturerStatusRecording) { self.status = SCManagedVideoCapturerStatusIdle; if (CMTIME_IS_VALID(_endSessionTime)) { [_videoWriter finishWritingAtSourceTime:_endSessionTime withCompletionHanlder:^{ // actually, make sure everything happens on outputQueue [_performer performImmediatelyIfCurrentPerformer:^{ if (sessionId != _sessionId) { SCLogVideoCapturerError(@"SessionId mismatch: before: %@, after: %@", @(sessionId), @(_sessionId)); return; } [self _disposeAudioRecording]; // Log the video snap recording success event w/ parameters, not including video // note if (_isVideoSnap) { [SnapVideoMetadata logVideoEvent:kSCCameraMetricsVideoRecordingSuccess videoSettings:_videoOutputSettings isSave:NO]; } void (^stopRecordingCompletionBlock)(NSURL *) = ^(NSURL *rawDataURL) { SCAssert([_performer isCurrentPerformer], @"Needs to be on the performing queue"); SCVideoCaptureSessionInfo sessionInfo = self.activeSession; [self _cleanup]; [[SCLogger sharedInstance] logTimedEventStart:@"SNAP_VIDEO_SIZE_LOADING" uniqueId:@"" isUniqueEvent:NO]; CGSize videoSize = [SnapVideoMetadata videoSizeForURL:_outputURL waitWhileLoadingTracksIfNeeded:YES]; [[SCLogger sharedInstance] logTimedEventEnd:@"SNAP_VIDEO_SIZE_LOADING" uniqueId:@"" parameters:nil]; // Log error if video file is not really ready if (videoSize.width == 0.0 || videoSize.height == 0.0) { _lastError = [NSError errorWithDomain:kSCManagedVideoCapturerErrorDomain code:kSCManagedVideoCapturerZeroVideoSize userInfo:nil]; [recordedVideoPromise completeWithError:_lastError]; [_delegate managedVideoCapturer:self didFailWithError:_lastError session:sessionInfo]; _placeholderImage = nil; return; } // If the video duration is too short, the future object will complete // with error as well SCManagedRecordedVideo *recordedVideo = [[SCManagedRecordedVideo alloc] initWithVideoURL:_outputURL rawVideoDataFileURL:_rawDataURL placeholderImage:_placeholderImage isFrontFacingCamera:_isFrontFacingCamera]; [recordedVideoPromise completeWithValue:recordedVideo]; [_delegate managedVideoCapturer:self didSucceedWithRecordedVideo:recordedVideo session:sessionInfo]; _placeholderImage = nil; }; if (_videoFrameRawDataCollector) { [_videoFrameRawDataCollector drainFrameDataCollectionWithCompletionHandler:^(NSURL *rawDataURL) { stopRecordingCompletionBlock(rawDataURL); }]; } else { stopRecordingCompletionBlock(nil); } }]; }]; } else { [self _disposeAudioRecording]; SCVideoCaptureSessionInfo sessionInfo = self.activeSession; [self _cleanup]; self.status = SCManagedVideoCapturerStatusError; _lastError = [NSError errorWithDomain:kSCManagedVideoCapturerErrorDomain code:kSCManagedVideoCapturerEmptyFrame userInfo:nil]; _placeholderImage = nil; [recordedVideoPromise completeWithError:_lastError]; [_delegate managedVideoCapturer:self didFailWithError:_lastError session:sessionInfo]; } } else { if (self.status == SCManagedVideoCapturerStatusPrepareToRecord || self.status == SCManagedVideoCapturerStatusReadyForRecording) { _lastError = [NSError errorWithDomain:kSCManagedVideoCapturerErrorDomain code:kSCManagedVideoCapturerStopBeforeStart userInfo:nil]; } else { _lastError = [NSError errorWithDomain:kSCManagedVideoCapturerErrorDomain code:kSCManagedVideoCapturerStopWithoutStart userInfo:nil]; } SCVideoCaptureSessionInfo sessionInfo = self.activeSession; [self _cleanup]; _placeholderImage = nil; if (_audioConfiguration) { [SCAudioSessionExperimentAdapter relinquishConfiguration:_audioConfiguration performer:nil completion:nil]; _audioConfiguration = nil; } [recordedVideoPromise completeWithError:_lastError]; [_delegate managedVideoCapturer:self didFailWithError:_lastError session:sessionInfo]; self.status = SCManagedVideoCapturerStatusIdle; [_capturerLogger logEventIfStartingTooSlow]; } } - (void)stopRecordingAsynchronously { SCTraceStart(); NSTimeInterval stopTime = CACurrentMediaTime(); [_performer performImmediatelyIfCurrentPerformer:^{ _stopTime = stopTime; NSInteger stopSession = _stopSession; [self _willStopRecording]; [_performer perform:^{ // If we haven't stopped yet, call the stop now nevertheless. if (stopSession == _stopSession) { [self _stopRecording]; } } after:kSCManagedVideoCapturerStopRecordingDeadline]; }]; } - (void)cancelRecordingAsynchronously { SCTraceStart(); [_performer performImmediatelyIfCurrentPerformer:^{ SCTraceStart(); SCLogVideoCapturerInfo(@"Cancel recording. status: %lu", (unsigned long)self.status); if (self.status == SCManagedVideoCapturerStatusRecording) { self.status = SCManagedVideoCapturerStatusIdle; [self _disposeAudioRecording]; [_videoWriter cancelWriting]; SCVideoCaptureSessionInfo sessionInfo = self.activeSession; [self _cleanup]; _placeholderImage = nil; [_delegate managedVideoCapturer:self didCancelVideoRecording:sessionInfo]; } else if ((self.status == SCManagedVideoCapturerStatusPrepareToRecord) || (self.status == SCManagedVideoCapturerStatusReadyForRecording)) { SCVideoCaptureSessionInfo sessionInfo = self.activeSession; [self _cleanup]; self.status = SCManagedVideoCapturerStatusIdle; _placeholderImage = nil; if (_audioConfiguration) { [SCAudioSessionExperimentAdapter relinquishConfiguration:_audioConfiguration performer:nil completion:nil]; _audioConfiguration = nil; } [_delegate managedVideoCapturer:self didCancelVideoRecording:sessionInfo]; } [_capturerLogger logEventIfStartingTooSlow]; }]; } - (void)addTimedTask:(SCTimedTask *)task { [_performer performImmediatelyIfCurrentPerformer:^{ // Only allow to add observers when we are not recording. if (!self->_timeObserver) { self->_timeObserver = [SCManagedVideoCapturerTimeObserver new]; } [self->_timeObserver addTimedTask:task]; SCLogVideoCapturerInfo(@"Added timetask: %@", task); }]; } - (void)clearTimedTasks { // _timeObserver will be initialized lazily when adding timed tasks SCLogVideoCapturerInfo(@"Clearing time observer"); [_performer performImmediatelyIfCurrentPerformer:^{ if (self->_timeObserver) { self->_timeObserver = nil; } }]; } - (void)_cleanup { [_videoWriter cleanUp]; _timeObserver = nil; SCLogVideoCapturerInfo(@"SCVideoCaptureSessionInfo before cleanup: %@", SCVideoCaptureSessionInfoGetDebugDescription(self.activeSession)); _startSessionTime = kCMTimeInvalid; _endSessionTime = kCMTimeInvalid; _firstWrittenAudioBufferDelay = kCMTimeInvalid; _sessionId = 0; _captureSessionID = nil; _audioQueueStarted = NO; } - (void)_disposeAudioRecording { SCLogVideoCapturerInfo(@"Disposing audio recording"); SCAssert([_performer isCurrentPerformer], @""); // Setup the audio session token correctly SCAudioConfigurationToken *audioConfiguration = _audioConfiguration; [[SCLogger sharedInstance] logStepToEvent:kSCCameraMetricsAudioDelay uniqueId:_captureSessionID stepName:@"audio_queue_stop_begin"]; NSString *captureSessionID = _captureSessionID; [_audioCaptureSession disposeAudioRecordingSynchronouslyWithCompletionHandler:^{ [[SCLogger sharedInstance] logStepToEvent:kSCCameraMetricsAudioDelay uniqueId:captureSessionID stepName:@"audio_queue_stop_end"]; SCLogVideoCapturerInfo(@"Did dispose audio recording"); if (audioConfiguration) { [[SCLogger sharedInstance] logStepToEvent:kSCCameraMetricsAudioDelay uniqueId:captureSessionID stepName:@"audio_session_stop_begin"]; [SCAudioSessionExperimentAdapter relinquishConfiguration:audioConfiguration performer:_performer completion:^(NSError *_Nullable error) { [[SCLogger sharedInstance] logStepToEvent:kSCCameraMetricsAudioDelay uniqueId:captureSessionID stepName:@"audio_session_stop_end"]; [[SCLogger sharedInstance] logTimedEventEnd:kSCCameraMetricsAudioDelay uniqueId:captureSessionID parameters:nil]; }]; } }]; _audioConfiguration = nil; } - (CIContext *)ciContext { if (!_ciContext) { _ciContext = [CIContext contextWithOptions:nil]; } return _ciContext; } #pragma mark - SCAudioCaptureSessionDelegate - (void)audioCaptureSession:(SCAudioCaptureSession *)audioCaptureSession didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer { SCTraceStart(); if (self.status != SCManagedVideoCapturerStatusRecording) { return; } CFRetain(sampleBuffer); [_performer performImmediatelyIfCurrentPerformer:^{ if (self.status == SCManagedVideoCapturerStatusRecording) { // Audio always follows video, there is no other way around this :) if (_hasWritten && CACurrentMediaTime() - _recordStartTime <= _maxDuration) { [self _processAudioSampleBuffer:sampleBuffer]; [_videoWriter appendAudioSampleBuffer:sampleBuffer]; } } CFRelease(sampleBuffer); }]; } #pragma mark - SCManagedVideoDataSourceListener - (void)managedVideoDataSource:(id)managedVideoDataSource didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer devicePosition:(SCManagedCaptureDevicePosition)devicePosition { SCTraceStart(); if (self.status != SCManagedVideoCapturerStatusRecording) { return; } CFRetain(sampleBuffer); [_performer performImmediatelyIfCurrentPerformer:^{ // the following check will allow the capture pipeline to drain if (CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) > _stopTime) { [self _stopRecording]; } else { if (self.status == SCManagedVideoCapturerStatusRecording) { _isFrontFacingCamera = (devicePosition == SCManagedCaptureDevicePositionFront); CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); if (CMTIME_IS_VALID(presentationTime)) { SCLogVideoCapturerInfo(@"Obtained video data source at time %lld", presentationTime.value); } else { SCLogVideoCapturerInfo(@"Obtained video data source with an invalid time"); } if (!_hasWritten) { // Start writing! [_videoWriter startWritingAtSourceTime:presentationTime]; [_capturerLogger endLoggingForStarting]; _startSessionTime = presentationTime; _startSessionRealTime = CACurrentMediaTime(); SCLogVideoCapturerInfo(@"First frame processed %f seconds after presentation Time", _startSessionRealTime - CMTimeGetSeconds(presentationTime)); _hasWritten = YES; [[SCLogger sharedInstance] logPreCaptureOperationFinishedAt:CMTimeGetSeconds(presentationTime)]; [[SCCoreCameraLogger sharedInstance] logCameraCreationDelaySplitPointPreCaptureOperationFinishedAt:CMTimeGetSeconds( presentationTime)]; SCLogVideoCapturerInfo(@"SCVideoCaptureSessionInfo after first frame: %@", SCVideoCaptureSessionInfoGetDebugDescription(self.activeSession)); } // Only respect video end session time, audio can be cut off, not video, // not video if (CMTIME_IS_INVALID(_endSessionTime)) { _endSessionTime = presentationTime; } else { _endSessionTime = CMTimeMaximum(_endSessionTime, presentationTime); } if (CACurrentMediaTime() - _recordStartTime <= _maxDuration) { [_videoWriter appendVideoSampleBuffer:sampleBuffer]; [self _processVideoSampleBuffer:sampleBuffer]; } if (_timeObserver) { [_timeObserver processTime:CMTimeSubtract(presentationTime, _startSessionTime) sessionStartTimeDelayInSecond:_startSessionRealTime - CMTimeGetSeconds(_startSessionTime)]; } } } CFRelease(sampleBuffer); }]; } - (void)_generatePlaceholderImageWithPixelBuffer:(CVImageBufferRef)pixelBuffer metaData:(NSDictionary *)metadata { SCTraceStart(); CVImageBufferRef imageBuffer = CVPixelBufferRetain(pixelBuffer); if (imageBuffer) { dispatch_async(SCPlaceholderImageGenerationQueue(), ^{ UIImage *placeholderImage = [UIImage imageWithPixelBufferRef:imageBuffer backingType:UIImageBackingTypeCGImage orientation:UIImageOrientationRight context:[self ciContext]]; placeholderImage = SCCropImageToTargetAspectRatio(placeholderImage, SCManagedCapturedImageAndVideoAspectRatio()); [_performer performImmediatelyIfCurrentPerformer:^{ // After processing, assign it back. if (self.status == SCManagedVideoCapturerStatusRecording) { _placeholderImage = placeholderImage; // Check video frame health by placeholder image [[SCManagedFrameHealthChecker sharedInstance] checkVideoHealthForCaptureFrameImage:placeholderImage metedata:metadata captureSessionID:_captureSessionID]; } CVPixelBufferRelease(imageBuffer); }]; }); } } #pragma mark - Pixel Buffer methods - (void)_processVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer { SC_GUARD_ELSE_RETURN(sampleBuffer); CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); BOOL shouldGeneratePlaceholderImage = CMTimeCompare(presentationTime, _startSessionTime) == 0; CVImageBufferRef outputPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); if (outputPixelBuffer) { [self _addVideoRawDataWithPixelBuffer:outputPixelBuffer]; if (shouldGeneratePlaceholderImage) { NSDictionary *extraInfo = [_delegate managedVideoCapturerGetExtraFrameHealthInfo:self]; NSDictionary *metadata = [[[SCManagedFrameHealthChecker sharedInstance] metadataForSampleBuffer:sampleBuffer extraInfo:extraInfo] copy]; [self _generatePlaceholderImageWithPixelBuffer:outputPixelBuffer metaData:metadata]; } } [_delegate managedVideoCapturer:self didAppendVideoSampleBuffer:sampleBuffer presentationTimestamp:CMTimeSubtract(presentationTime, _startSessionTime)]; } - (void)_processAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer { [_announcer managedAudioDataSource:self didOutputSampleBuffer:sampleBuffer]; if (!CMTIME_IS_VALID(self.firstWrittenAudioBufferDelay)) { self.firstWrittenAudioBufferDelay = CMTimeSubtract(CMSampleBufferGetPresentationTimeStamp(sampleBuffer), _startSessionTime); } } - (void)_addVideoRawDataWithPixelBuffer:(CVImageBufferRef)pixelBuffer { if (_videoFrameRawDataCollector && [SnapVideoMetadata deviceMeetsRequirementsForContentAdaptiveVideoEncoding] && ((_rawDataFrameNum % kSCVideoContentComplexitySamplingRate) == 0) && (_rawDataFrameNum > 0)) { if (_videoFrameRawDataCollector) { CVImageBufferRef imageBuffer = CVPixelBufferRetain(pixelBuffer); [_videoFrameRawDataCollector collectVideoFrameRawDataWithImageBuffer:imageBuffer frameNum:_rawDataFrameNum completion:^{ CVPixelBufferRelease(imageBuffer); }]; } } _rawDataFrameNum++; } #pragma mark - SCManagedAudioDataSource - (void)addListener:(id)listener { [_announcer addListener:listener]; } - (void)removeListener:(id)listener { [_announcer removeListener:listener]; } - (void)startStreamingWithAudioConfiguration:(SCAudioConfiguration *)configuration { SCAssertFail(@"Controlled by recorder"); } - (void)stopStreaming { SCAssertFail(@"Controlled by recorder"); } - (BOOL)isStreaming { return self.status == SCManagedVideoCapturerStatusRecording; } @end ================================================ FILE: ManagedCapturer/SCManagedVideoCapturerHandler.h ================================================ // // SCManagedVideoCapturerHandler.h // Snapchat // // Created by Jingtian Yang on 11/12/2017. // #import "SCManagedVideoCapturer.h" #import @class SCCaptureResource; @interface SCManagedVideoCapturerHandler : NSObject - (instancetype)init NS_UNAVAILABLE; - (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource; @end ================================================ FILE: ManagedCapturer/SCManagedVideoCapturerHandler.m ================================================ // // SCManagedVideoCapturerHandler.m // Snapchat // // Created by Jingtian Yang on 11/12/2017. // #import "SCManagedVideoCapturerHandler.h" #import "SCCaptureResource.h" #import "SCManagedCaptureDevice+SCManagedCapturer.h" #import "SCManagedCapturer.h" #import "SCManagedCapturerLensAPI.h" #import "SCManagedCapturerLogging.h" #import "SCManagedCapturerSampleMetadata.h" #import "SCManagedCapturerState.h" #import "SCManagedDeviceCapacityAnalyzer.h" #import "SCManagedFrontFlashController.h" #import "SCManagedVideoFileStreamer.h" #import "SCManagedVideoFrameSampler.h" #import "SCManagedVideoStreamer.h" #import #import #import #import #import @interface SCManagedVideoCapturerHandler () { __weak SCCaptureResource *_captureResource; } @end @implementation SCManagedVideoCapturerHandler - (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource { self = [super init]; if (self) { SCAssert(captureResource, @""); _captureResource = captureResource; } return self; } - (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer didBeginVideoRecording:(SCVideoCaptureSessionInfo)sessionInfo { SCTraceODPCompatibleStart(2); SCLogCapturerInfo(@"Did begin video recording. sessionId:%u", sessionInfo.sessionId); [_captureResource.queuePerformer perform:^{ SCTraceStart(); SCManagedCapturerState *state = [_captureResource.state copy]; runOnMainThreadAsynchronously(^{ [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didBeginVideoRecording:state session:sessionInfo]; }); }]; } - (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer didBeginAudioRecording:(SCVideoCaptureSessionInfo)sessionInfo { SCTraceODPCompatibleStart(2); SCLogCapturerInfo(@"Did begin audio recording. sessionId:%u", sessionInfo.sessionId); [_captureResource.queuePerformer perform:^{ if ([_captureResource.fileInputDecider shouldProcessFileInput]) { [_captureResource.videoDataSource startStreaming]; } SCTraceStart(); SCManagedCapturerState *state = [_captureResource.state copy]; runOnMainThreadAsynchronously(^{ [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didBeginAudioRecording:state session:sessionInfo]; }); }]; } - (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer willStopWithRecordedVideoFuture:(SCFuture> *)recordedVideoFuture videoSize:(CGSize)videoSize placeholderImage:(UIImage *)placeholderImage session:(SCVideoCaptureSessionInfo)sessionInfo { SCTraceODPCompatibleStart(2); SCLogCapturerInfo(@"Will stop recording. sessionId:%u placeHolderImage:%@ videoSize:(%f, %f)", sessionInfo.sessionId, placeholderImage, videoSize.width, videoSize.height); [_captureResource.queuePerformer perform:^{ SCTraceStart(); if (_captureResource.videoRecording) { SCManagedCapturerState *state = [_captureResource.state copy]; // Then, sync back to main thread to notify will finish recording runOnMainThreadAsynchronously(^{ [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] willFinishRecording:state session:sessionInfo recordedVideoFuture:recordedVideoFuture videoSize:videoSize placeholderImage:placeholderImage]; }); } }]; } - (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer didSucceedWithRecordedVideo:(SCManagedRecordedVideo *)recordedVideo session:(SCVideoCaptureSessionInfo)sessionInfo { SCTraceODPCompatibleStart(2); SCLogCapturerInfo(@"Did succeed recording. sessionId:%u recordedVideo:%@", sessionInfo.sessionId, recordedVideo); [_captureResource.queuePerformer perform:^{ SCTraceStart(); if (_captureResource.videoRecording) { [self _videoRecordingCleanup]; SCManagedCapturerState *state = [_captureResource.state copy]; // Then, sync back to main thread to notify the finish recording runOnMainThreadAsynchronously(^{ [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didFinishRecording:state session:sessionInfo recordedVideo:recordedVideo]; }); } }]; } - (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer didFailWithError:(NSError *)error session:(SCVideoCaptureSessionInfo)sessionInfo { SCTraceODPCompatibleStart(2); SCLogCapturerInfo(@"Did fail recording. sessionId:%u", sessionInfo.sessionId); [_captureResource.queuePerformer perform:^{ SCTraceStart(); if (_captureResource.videoRecording) { [self _videoRecordingCleanup]; SCManagedCapturerState *state = [_captureResource.state copy]; runOnMainThreadAsynchronously(^{ [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didFailRecording:state session:sessionInfo error:error]; }); } }]; } - (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer didCancelVideoRecording:(SCVideoCaptureSessionInfo)sessionInfo { SCTraceODPCompatibleStart(2); SCLogCapturerInfo(@"Did cancel recording. sessionId:%u", sessionInfo.sessionId); [_captureResource.queuePerformer perform:^{ SCTraceStart(); if (_captureResource.videoRecording) { [self _videoRecordingCleanup]; SCManagedCapturerState *state = [_captureResource.state copy]; runOnMainThreadAsynchronously(^{ [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didCancelRecording:state session:sessionInfo]; }); } }]; } - (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer didGetError:(NSError *)error forType:(SCManagedVideoCapturerInfoType)type session:(SCVideoCaptureSessionInfo)sessionInfo { SCTraceODPCompatibleStart(2); SCLogCapturerInfo(@"Did get error. sessionId:%u errorType:%lu, error:%@", sessionInfo.sessionId, (long)type, error); [_captureResource.queuePerformer perform:^{ runOnMainThreadAsynchronously(^{ [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didGetError:error forType:type session:sessionInfo]; }); }]; } - (NSDictionary *)managedVideoCapturerGetExtraFrameHealthInfo:(SCManagedVideoCapturer *)managedVideoCapturer { SCTraceODPCompatibleStart(2); if (_captureResource.state.lensesActive) { return @{ @"lens_active" : @(YES), @"lens_id" : ([_captureResource.lensProcessingCore activeLensId] ?: [NSNull null]) }; } return nil; } - (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer didAppendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer presentationTimestamp:(CMTime)presentationTimestamp { CFRetain(sampleBuffer); [_captureResource.queuePerformer perform:^{ SCManagedCapturerSampleMetadata *sampleMetadata = [[SCManagedCapturerSampleMetadata alloc] initWithPresentationTimestamp:presentationTimestamp fieldOfView:_captureResource.device.fieldOfView]; [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didAppendVideoSampleBuffer:sampleBuffer sampleMetadata:sampleMetadata]; CFRelease(sampleBuffer); }]; } - (void)_videoRecordingCleanup { SCTraceODPCompatibleStart(2); SCAssert(_captureResource.videoRecording, @"clean up function only can be called if the " @"video recording is still in progress."); SCAssert([_captureResource.queuePerformer isCurrentPerformer], @""); SCLogCapturerInfo(@"Video recording cleanup. previous state:%@", _captureResource.state); [_captureResource.videoDataSource removeListener:_captureResource.videoCapturer]; if (_captureResource.videoFrameSampler) { SCManagedVideoFrameSampler *sampler = _captureResource.videoFrameSampler; _captureResource.videoFrameSampler = nil; [_captureResource.announcer removeListener:sampler]; } // Add back other listeners to video streamer [_captureResource.videoDataSource addListener:_captureResource.deviceCapacityAnalyzer]; if (!_captureResource.state.torchActive) { // We should turn off torch for the device that we specifically turned on // for recording [_captureResource.device setTorchActive:NO]; if (_captureResource.state.devicePosition == SCManagedCaptureDevicePositionFront) { _captureResource.frontFlashController.torchActive = NO; } } // Unlock focus on both front and back camera if they were locked. // Even if ARKit was being used during recording, it'll be shut down by the time we get here // So DON'T match the ARKit check we use around [_ setRecording:YES] SCManagedCaptureDevice *front = [SCManagedCaptureDevice front]; SCManagedCaptureDevice *back = [SCManagedCaptureDevice back]; [front setRecording:NO]; [back setRecording:NO]; _captureResource.videoRecording = NO; if (_captureResource.state.lensesActive) { BOOL modifySource = _captureResource.videoRecording || _captureResource.state.liveVideoStreaming; [_captureResource.lensProcessingCore setModifySource:modifySource]; } } @end ================================================ FILE: ManagedCapturer/SCManagedVideoCapturerLogger.h ================================================ // // SCCaptureLogger.h // Snapchat // // Created by Pinlin on 12/04/2017. // Copyright © 2017 Snapchat, Inc. All rights reserved. // #import static NSString *const kSCCapturerStartingStepAudioSession = @"audio_session"; static NSString *const kSCCapturerStartingStepTranscodeingVideoBitrate = @"transcoding_video_bitrate"; static NSString *const kSCCapturerStartingStepOutputSettings = @"output_settings"; static NSString *const kSCCapturerStartingStepVideoFrameRawData = @"video_frame_raw_data"; static NSString *const kSCCapturerStartingStepAudioRecording = @"audio_recording"; static NSString *const kSCCapturerStartingStepAssetWriterConfiguration = @"asset_writer_config"; static NSString *const kSCCapturerStartingStepStartingWriting = @"start_writing"; static NSString *const kCapturerStartingTotalDelay = @"total_delay"; @interface SCManagedVideoCapturerLogger : NSObject - (void)prepareForStartingLog; - (void)logStartingStep:(NSString *)stepName; - (void)endLoggingForStarting; - (void)logEventIfStartingTooSlow; @end ================================================ FILE: ManagedCapturer/SCManagedVideoCapturerLogger.m ================================================ // // SCManagedVideoCapturerLogger.m // Snapchat // // Created by Pinlin on 12/04/2017. // Copyright © 2017 Snapchat, Inc. All rights reserved. // #import "SCManagedVideoCapturerLogger.h" #import #import #import #import @import QuartzCore; @interface SCManagedVideoCapturerLogger () { // For time profiles metric during start recording NSMutableDictionary *_startingStepsDelayTime; NSTimeInterval _beginStartTime; NSTimeInterval _lastCheckpointTime; NSTimeInterval _startedTime; } @end @implementation SCManagedVideoCapturerLogger - (instancetype)init { self = [super init]; if (self) { _startingStepsDelayTime = [NSMutableDictionary dictionary]; } return self; } - (void)prepareForStartingLog { _beginStartTime = CACurrentMediaTime(); _lastCheckpointTime = _beginStartTime; [_startingStepsDelayTime removeAllObjects]; } - (void)logStartingStep:(NSString *)stepname { SCAssert(_beginStartTime > 0, @"logger is not ready yet, please call prepareForStartingLog at first"); NSTimeInterval currentCheckpointTime = CACurrentMediaTime(); _startingStepsDelayTime[stepname] = @(currentCheckpointTime - _lastCheckpointTime); _lastCheckpointTime = currentCheckpointTime; } - (void)endLoggingForStarting { SCAssert(_beginStartTime > 0, @"logger is not ready yet, please call prepareForStartingLog at first"); _startedTime = CACurrentMediaTime(); [self logStartingStep:kSCCapturerStartingStepStartingWriting]; _startingStepsDelayTime[kCapturerStartingTotalDelay] = @(CACurrentMediaTime() - _beginStartTime); } - (void)logEventIfStartingTooSlow { if (_beginStartTime > 0) { if (_startingStepsDelayTime.count == 0) { // It should not be here. We only need to log once. return; } SCLogGeneralWarning(@"Capturer starting delay(in second):%f", _startedTime - _beginStartTime); [[SCLogger sharedInstance] logEvent:kSCCameraMetricsVideoCapturerStartDelay parameters:_startingStepsDelayTime]; // Clean all delay times after logging [_startingStepsDelayTime removeAllObjects]; _beginStartTime = 0; } } @end ================================================ FILE: ManagedCapturer/SCManagedVideoCapturerOutputSettings.h ================================================ // 42f6113daff3eebf06d809a073c99651867c42ea // Generated by the value-object.rb DO NOT EDIT!! #import "SCManagedVideoCapturerOutputType.h" #import #import #import @protocol SCManagedVideoCapturerOutputSettings @property (nonatomic, assign, readonly) CGFloat width; @property (nonatomic, assign, readonly) CGFloat height; @property (nonatomic, assign, readonly) CGFloat videoBitRate; @property (nonatomic, assign, readonly) CGFloat audioBitRate; @property (nonatomic, assign, readonly) NSUInteger keyFrameInterval; @property (nonatomic, assign, readonly) SCManagedVideoCapturerOutputType outputType; @end @interface SCManagedVideoCapturerOutputSettings : NSObject @property (nonatomic, assign, readonly) CGFloat width; @property (nonatomic, assign, readonly) CGFloat height; @property (nonatomic, assign, readonly) CGFloat videoBitRate; @property (nonatomic, assign, readonly) CGFloat audioBitRate; @property (nonatomic, assign, readonly) NSUInteger keyFrameInterval; @property (nonatomic, assign, readonly) SCManagedVideoCapturerOutputType outputType; - (instancetype)initWithWidth:(CGFloat)width height:(CGFloat)height videoBitRate:(CGFloat)videoBitRate audioBitRate:(CGFloat)audioBitRate keyFrameInterval:(NSUInteger)keyFrameInterval outputType:(SCManagedVideoCapturerOutputType)outputType; @end ================================================ FILE: ManagedCapturer/SCManagedVideoCapturerOutputSettings.m ================================================ // 42f6113daff3eebf06d809a073c99651867c42ea // Generated by the value-object.rb DO NOT EDIT!! #import "SCManagedVideoCapturerOutputSettings.h" #import #import @implementation SCManagedVideoCapturerOutputSettings static ptrdiff_t sSCManagedVideoCapturerOutputSettingsOffsets[0]; static BOOL sSCManagedVideoCapturerOutputSettingsHasOffsets; - (instancetype)initWithWidth:(CGFloat)width height:(CGFloat)height videoBitRate:(CGFloat)videoBitRate audioBitRate:(CGFloat)audioBitRate keyFrameInterval:(NSUInteger)keyFrameInterval outputType:(SCManagedVideoCapturerOutputType)outputType { self = [super init]; if (self) { _width = width; _height = height; _videoBitRate = videoBitRate; _audioBitRate = audioBitRate; _keyFrameInterval = keyFrameInterval; _outputType = outputType; } return self; } #pragma mark - NSCopying - (instancetype)copyWithZone:(NSZone *)zone { // Immutable object, bypass copy return self; } #pragma mark - NSCoding - (instancetype)initWithCoder:(NSCoder *)aDecoder { self = [super init]; if (self) { _width = [aDecoder decodeFloatForKey:@"width"]; _height = [aDecoder decodeFloatForKey:@"height"]; _videoBitRate = [aDecoder decodeFloatForKey:@"videoBitRate"]; _audioBitRate = [aDecoder decodeFloatForKey:@"audioBitRate"]; _keyFrameInterval = [[aDecoder decodeObjectForKey:@"keyFrameInterval"] unsignedIntegerValue]; _outputType = (SCManagedVideoCapturerOutputType)[aDecoder decodeIntegerForKey:@"outputType"]; } return self; } - (void)encodeWithCoder:(NSCoder *)aCoder { [aCoder encodeFloat:_width forKey:@"width"]; [aCoder encodeFloat:_height forKey:@"height"]; [aCoder encodeFloat:_videoBitRate forKey:@"videoBitRate"]; [aCoder encodeFloat:_audioBitRate forKey:@"audioBitRate"]; [aCoder encodeObject:@(_keyFrameInterval) forKey:@"keyFrameInterval"]; [aCoder encodeInteger:(NSInteger)_outputType forKey:@"outputType"]; } #pragma mark - FasterCoding - (BOOL)preferFasterCoding { return YES; } - (void)encodeWithFasterCoder:(id)fasterCoder { [fasterCoder encodeFloat64:_audioBitRate]; [fasterCoder encodeFloat64:_height]; [fasterCoder encodeUInt64:_keyFrameInterval]; [fasterCoder encodeSInt32:_outputType]; [fasterCoder encodeFloat64:_videoBitRate]; [fasterCoder encodeFloat64:_width]; } - (void)decodeWithFasterDecoder:(id)fasterDecoder { _audioBitRate = (CGFloat)[fasterDecoder decodeFloat64]; _height = (CGFloat)[fasterDecoder decodeFloat64]; _keyFrameInterval = (NSUInteger)[fasterDecoder decodeUInt64]; _outputType = (SCManagedVideoCapturerOutputType)[fasterDecoder decodeSInt32]; _videoBitRate = (CGFloat)[fasterDecoder decodeFloat64]; _width = (CGFloat)[fasterDecoder decodeFloat64]; } - (void)setSInt32:(int32_t)val forUInt64Key:(uint64_t)key { switch (key) { case 54425104364133881ULL: _outputType = (SCManagedVideoCapturerOutputType)val; break; } } - (void)setUInt64:(uint64_t)val forUInt64Key:(uint64_t)key { switch (key) { case 47327990652274883ULL: _keyFrameInterval = (NSUInteger)val; break; } } - (void)setFloat64:(double)val forUInt64Key:(uint64_t)key { switch (key) { case 50995534680662654ULL: _audioBitRate = (CGFloat)val; break; case 11656660716170763ULL: _height = (CGFloat)val; break; case 29034524155663716ULL: _videoBitRate = (CGFloat)val; break; case 30689178641753681ULL: _width = (CGFloat)val; break; } } + (uint64_t)fasterCodingVersion { return 14709152111692666517ULL; } + (uint64_t *)fasterCodingKeys { static uint64_t keys[] = { 6 /* Total */, FC_ENCODE_KEY_TYPE(50995534680662654, FCEncodeTypeFloat64), FC_ENCODE_KEY_TYPE(11656660716170763, FCEncodeTypeFloat64), FC_ENCODE_KEY_TYPE(47327990652274883, FCEncodeTypeUInt64), FC_ENCODE_KEY_TYPE(54425104364133881, FCEncodeTypeSInt32), FC_ENCODE_KEY_TYPE(29034524155663716, FCEncodeTypeFloat64), FC_ENCODE_KEY_TYPE(30689178641753681, FCEncodeTypeFloat64), }; return keys; } #pragma mark - isEqual - (BOOL)isEqual:(id)object { if (!SCObjectsIsEqual(self, object, &sSCManagedVideoCapturerOutputSettingsHasOffsets, sSCManagedVideoCapturerOutputSettingsOffsets, 6, 0)) { return NO; } SCManagedVideoCapturerOutputSettings *other = (SCManagedVideoCapturerOutputSettings *)object; if (other->_width != _width) { return NO; } if (other->_height != _height) { return NO; } if (other->_videoBitRate != _videoBitRate) { return NO; } if (other->_audioBitRate != _audioBitRate) { return NO; } if (other->_keyFrameInterval != _keyFrameInterval) { return NO; } if (other->_outputType != _outputType) { return NO; } return YES; } - (NSUInteger)hash { NSUInteger subhashes[] = {(NSUInteger)_width, (NSUInteger)_height, (NSUInteger)_videoBitRate, (NSUInteger)_audioBitRate, (NSUInteger)_keyFrameInterval, (NSUInteger)_outputType}; NSUInteger result = subhashes[0]; for (int i = 1; i < 6; i++) { unsigned long long base = (((unsigned long long)result) << 32 | subhashes[i]); base = (~base) + (base << 18); base ^= (base >> 31); base *= 21; base ^= (base >> 11); base += (base << 6); base ^= (base >> 22); result = (NSUInteger)base; } return result; } #pragma mark - Print description in console: lldb> po #{variable name} - (NSString *)description { NSMutableString *desc = [NSMutableString string]; [desc appendString:@"{\n"]; [desc appendFormat:@"\twidth:%@\n", [@(_width) description]]; [desc appendFormat:@"\theight:%@\n", [@(_height) description]]; [desc appendFormat:@"\tvideoBitRate:%@\n", [@(_videoBitRate) description]]; [desc appendFormat:@"\taudioBitRate:%@\n", [@(_audioBitRate) description]]; [desc appendFormat:@"\tkeyFrameInterval:%@\n", [@(_keyFrameInterval) description]]; [desc appendFormat:@"\toutputType:%@\n", [@(_outputType) description]]; [desc appendString:@"}\n"]; return [desc copy]; } @end ================================================ FILE: ManagedCapturer/SCManagedVideoCapturerOutputSettings.value ================================================ #import "SCManagedVideoCapturerOutputType.h" interface SCManagedVideoCapturerOutputSettings CGFloat width CGFloat height CGFloat videoBitRate CGFloat audioBitRate NSUInteger keyFrameInterval enum SCManagedVideoCapturerOutputType outputType end ================================================ FILE: ManagedCapturer/SCManagedVideoCapturerOutputType.h ================================================ // // SCManagedVideoCapturerOutputType.h // Snapchat // // Created by Chao Pang on 8/8/16. // Copyright © 2016 Snapchat, Inc. All rights reserved. // #import typedef NS_ENUM(NSInteger, SCManagedVideoCapturerOutputType) { SCManagedVideoCapturerOutputTypeVideoSnap = 0, SCManagedVideoCapturerOutputTypeVideoNote, }; ================================================ FILE: ManagedCapturer/SCManagedVideoCapturerTimeObserver.h ================================================ // // SCManagedVideoCapturerTimeObserver.h // Snapchat // // Created by Michel Loenngren on 4/3/17. // Copyright © 2017 Snapchat, Inc. All rights reserved. // #import #import @class SCTimedTask; /* Class keeping track of SCTimedTasks and firing them on the main thread when needed. */ @interface SCManagedVideoCapturerTimeObserver : NSObject - (void)addTimedTask:(SCTimedTask *_Nonnull)task; - (void)processTime:(CMTime)relativePresentationTime sessionStartTimeDelayInSecond:(CGFloat)sessionStartTimeDelayInSecond; @end ================================================ FILE: ManagedCapturer/SCManagedVideoCapturerTimeObserver.m ================================================ // // SCManagedVideoCapturerTimeObserver.m // Snapchat // // Created by Michel Loenngren on 4/3/17. // Copyright © 2017 Snapchat, Inc. All rights reserved. // #import "SCManagedVideoCapturerTimeObserver.h" #import "SCTimedTask.h" #import #import #import @implementation SCManagedVideoCapturerTimeObserver { NSMutableArray *_tasks; BOOL _isProcessing; } - (instancetype)init { if (self = [super init]) { _tasks = [NSMutableArray new]; _isProcessing = NO; } return self; } - (void)addTimedTask:(SCTimedTask *_Nonnull)task { SCAssert(!_isProcessing, @"[SCManagedVideoCapturerTimeObserver] Trying to add an SCTimedTask after streaming started."); SCAssert(CMTIME_IS_VALID(task.targetTime), @"[SCManagedVideoCapturerTimeObserver] Trying to add an SCTimedTask with invalid time."); [_tasks addObject:task]; [_tasks sortUsingComparator:^NSComparisonResult(SCTimedTask *_Nonnull obj1, SCTimedTask *_Nonnull obj2) { return (NSComparisonResult)CMTimeCompare(obj2.targetTime, obj1.targetTime); }]; SCLogGeneralInfo(@"[SCManagedVideoCapturerTimeObserver] Adding task: %@, task count: %lu", task, (unsigned long)_tasks.count); } - (void)processTime:(CMTime)relativePresentationTime sessionStartTimeDelayInSecond:(CGFloat)sessionStartTimeDelayInSecond { _isProcessing = YES; SCTimedTask *last = _tasks.lastObject; while (last && last.task && CMTimeCompare(relativePresentationTime, last.targetTime) >= 0) { [_tasks removeLastObject]; void (^task)(CMTime relativePresentationTime, CGFloat sessionStartTimeDelay) = last.task; last.task = nil; runOnMainThreadAsynchronously(^{ task(relativePresentationTime, sessionStartTimeDelayInSecond); }); last = _tasks.lastObject; } } @end ================================================ FILE: ManagedCapturer/SCManagedVideoFileStreamer.h ================================================ // // SCManagedVideoFileStreamer.h // Snapchat // // Created by Alexander Grytsiuk on 3/4/16. // Copyright © 2016 Snapchat, Inc. All rights reserved. // #import #import #import typedef void (^sc_managed_video_file_streamer_pixel_buffer_completion_handler_t)(CVPixelBufferRef pixelBuffer); /** * SCManagedVideoFileStreamer reads a video file from provided NSURL to create * and publish video output frames. SCManagedVideoFileStreamer also conforms * to SCManagedVideoDataSource allowing chained consumption of video frames. */ @interface SCManagedVideoFileStreamer : NSObject - (instancetype)initWithPlaybackForURL:(NSURL *)URL; - (void)getNextPixelBufferWithCompletion:(sc_managed_video_file_streamer_pixel_buffer_completion_handler_t)completion; @end ================================================ FILE: ManagedCapturer/SCManagedVideoFileStreamer.m ================================================ // // SCManagedVideoFileStreamer.m // Snapchat // // Created by Alexander Grytsiuk on 3/4/16. // Copyright © 2016 Snapchat, Inc. All rights reserved. // #import "SCManagedVideoFileStreamer.h" #import "SCManagedCapturePreviewLayerController.h" #import #import #import #import #import @import AVFoundation; @import CoreMedia; static char *const kSCManagedVideoFileStreamerQueueLabel = "com.snapchat.managed-video-file-streamer"; @interface SCManagedVideoFileStreamer () @end @implementation SCManagedVideoFileStreamer { SCManagedVideoDataSourceListenerAnnouncer *_announcer; SCManagedCaptureDevicePosition _devicePosition; sc_managed_video_file_streamer_pixel_buffer_completion_handler_t _nextPixelBufferHandler; id _notificationToken; id _performer; dispatch_semaphore_t _semaphore; CADisplayLink *_displayLink; AVPlayerItemVideoOutput *_videoOutput; AVPlayer *_player; BOOL _sampleBufferDisplayEnabled; id _sampleBufferDisplayController; } @synthesize isStreaming = _isStreaming; @synthesize performer = _performer; @synthesize videoOrientation = _videoOrientation; - (instancetype)initWithPlaybackForURL:(NSURL *)URL { SCTraceStart(); self = [super init]; if (self) { _videoOrientation = AVCaptureVideoOrientationLandscapeRight; _announcer = [[SCManagedVideoDataSourceListenerAnnouncer alloc] init]; _semaphore = dispatch_semaphore_create(1); _performer = [[SCQueuePerformer alloc] initWithLabel:kSCManagedVideoFileStreamerQueueLabel qualityOfService:QOS_CLASS_UNSPECIFIED queueType:DISPATCH_QUEUE_SERIAL context:SCQueuePerformerContextStories]; // Setup CADisplayLink which will callback displayPixelBuffer: at every vsync. _displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(displayLinkCallback:)]; [_displayLink addToRunLoop:[NSRunLoop mainRunLoop] forMode:NSDefaultRunLoopMode]; [_displayLink setPaused:YES]; // Prepare player _player = [[SCPlayer alloc] initWithPlayerDomain:SCPlayerDomainCameraFileStreamer URL:URL]; #if TARGET_IPHONE_SIMULATOR _player.volume = 0.0; #endif // Configure output [self configureOutput]; } return self; } - (void)addSampleBufferDisplayController:(id)sampleBufferDisplayController { _sampleBufferDisplayController = sampleBufferDisplayController; } - (void)setSampleBufferDisplayEnabled:(BOOL)sampleBufferDisplayEnabled { _sampleBufferDisplayEnabled = sampleBufferDisplayEnabled; SCLogGeneralInfo(@"[SCManagedVideoFileStreamer] sampleBufferDisplayEnabled set to:%d", _sampleBufferDisplayEnabled); } - (void)setKeepLateFrames:(BOOL)keepLateFrames { // Do nothing } - (BOOL)getKeepLateFrames { // return default NO value return NO; } - (void)waitUntilSampleBufferDisplayed:(dispatch_queue_t)queue completionHandler:(dispatch_block_t)completionHandler { SCAssert(queue, @"callback queue must be provided"); SCAssert(completionHandler, @"completion handler must be provided"); dispatch_async(queue, completionHandler); } - (void)startStreaming { SCTraceStart(); if (!_isStreaming) { _isStreaming = YES; [self addDidPlayToEndTimeNotificationForPlayerItem:_player.currentItem]; [_player play]; } } - (void)stopStreaming { SCTraceStart(); if (_isStreaming) { _isStreaming = NO; [_player pause]; [self removePlayerObservers]; } } - (void)pauseStreaming { [self stopStreaming]; } - (void)addListener:(id)listener { SCTraceStart(); [_announcer addListener:listener]; } - (void)removeListener:(id)listener { SCTraceStart(); [_announcer removeListener:listener]; } - (void)setAsOutput:(AVCaptureSession *)session devicePosition:(SCManagedCaptureDevicePosition)devicePosition { _devicePosition = devicePosition; } - (void)setDevicePosition:(SCManagedCaptureDevicePosition)devicePosition { _devicePosition = devicePosition; } - (void)setVideoOrientation:(AVCaptureVideoOrientation)videoOrientation { _videoOrientation = videoOrientation; } - (void)removeAsOutput:(AVCaptureSession *)session { // Ignored } - (void)setVideoStabilizationEnabledIfSupported:(BOOL)videoStabilizationIfSupported { // Ignored } - (void)beginConfiguration { // Ignored } - (void)commitConfiguration { // Ignored } - (void)setPortraitModePointOfInterest:(CGPoint)pointOfInterest { // Ignored } #pragma mark - AVPlayerItemOutputPullDelegate - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender { if (![_videoOutput hasNewPixelBufferForItemTime:CMTimeMake(1, 10)]) { [self configureOutput]; } [_displayLink setPaused:NO]; } #pragma mark - Internal - (void)displayLinkCallback:(CADisplayLink *)sender { CFTimeInterval nextVSync = [sender timestamp] + [sender duration]; CMTime time = [_videoOutput itemTimeForHostTime:nextVSync]; if (dispatch_semaphore_wait(_semaphore, DISPATCH_TIME_NOW) == 0) { [_performer perform:^{ if ([_videoOutput hasNewPixelBufferForItemTime:time]) { CVPixelBufferRef pixelBuffer = [_videoOutput copyPixelBufferForItemTime:time itemTimeForDisplay:NULL]; if (pixelBuffer != NULL) { if (_nextPixelBufferHandler) { _nextPixelBufferHandler(pixelBuffer); _nextPixelBufferHandler = nil; } else { CMSampleBufferRef sampleBuffer = [self createSampleBufferFromPixelBuffer:pixelBuffer presentationTime:CMTimeMake(CACurrentMediaTime() * 1000, 1000)]; if (sampleBuffer) { if (_sampleBufferDisplayEnabled) { [_sampleBufferDisplayController enqueueSampleBuffer:sampleBuffer]; } [_announcer managedVideoDataSource:self didOutputSampleBuffer:sampleBuffer devicePosition:_devicePosition]; CFRelease(sampleBuffer); } } CVBufferRelease(pixelBuffer); } } dispatch_semaphore_signal(_semaphore); }]; } } - (CMSampleBufferRef)createSampleBufferFromPixelBuffer:(CVPixelBufferRef)pixelBuffer presentationTime:(CMTime)time { CMSampleBufferRef sampleBuffer = NULL; CMVideoFormatDescriptionRef formatDesc = NULL; OSStatus err = CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, &formatDesc); if (err != noErr) { return NULL; } CMSampleTimingInfo sampleTimingInfo = {kCMTimeInvalid, time, kCMTimeInvalid}; CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, true, NULL, NULL, formatDesc, &sampleTimingInfo, &sampleBuffer); CFRelease(formatDesc); return sampleBuffer; } - (void)configureOutput { // Remove old output if (_videoOutput) { [[_player currentItem] removeOutput:_videoOutput]; } // Setup AVPlayerItemVideoOutput with the required pixelbuffer attributes. _videoOutput = [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:@{ (id) kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) }]; _videoOutput.suppressesPlayerRendering = YES; [_videoOutput setDelegate:self queue:_performer.queue]; // Add new output [[_player currentItem] addOutput:_videoOutput]; [_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:1.0 / 30.0]; } - (void)getNextPixelBufferWithCompletion:(sc_managed_video_file_streamer_pixel_buffer_completion_handler_t)completion { _nextPixelBufferHandler = completion; } - (void)addDidPlayToEndTimeNotificationForPlayerItem:(AVPlayerItem *)item { if (_notificationToken) { _notificationToken = nil; } _player.actionAtItemEnd = AVPlayerActionAtItemEndNone; _notificationToken = [[NSNotificationCenter defaultCenter] addObserverForName:AVPlayerItemDidPlayToEndTimeNotification object:item queue:[NSOperationQueue mainQueue] usingBlock:^(NSNotification *note) { [[_player currentItem] seekToTime:kCMTimeZero]; }]; } - (void)removePlayerObservers { if (_notificationToken) { [[NSNotificationCenter defaultCenter] removeObserver:_notificationToken name:AVPlayerItemDidPlayToEndTimeNotification object:_player.currentItem]; _notificationToken = nil; } } @end ================================================ FILE: ManagedCapturer/SCManagedVideoFrameSampler.h ================================================ // // SCManagedVideoFrameSampler.h // Snapchat // // Created by Michel Loenngren on 3/10/17. // Copyright © 2017 Snapchat, Inc. All rights reserved. // #import "SCManagedCapturerListener.h" #import /** Allows consumer to register a block to sample the next CMSampleBufferRef and automatically leverages Core image to convert the pixel buffer to a UIImage. Returned image will be a copy. */ @interface SCManagedVideoFrameSampler : NSObject - (void)sampleNextFrame:(void (^)(UIImage *frame, CMTime presentationTime))completeBlock; @end ================================================ FILE: ManagedCapturer/SCManagedVideoFrameSampler.m ================================================ // // SCManagedVideoFrameSampler.m // Snapchat // // Created by Michel Loenngren on 3/10/17. // Copyright © 2017 Snapchat, Inc. All rights reserved. // #import "SCManagedVideoFrameSampler.h" #import #import @import CoreImage; @import ImageIO; @interface SCManagedVideoFrameSampler () @property (nonatomic, copy) void (^frameSampleBlock)(UIImage *, CMTime); @property (nonatomic, strong) CIContext *ciContext; @end @implementation SCManagedVideoFrameSampler - (void)sampleNextFrame:(void (^)(UIImage *, CMTime))completeBlock { _frameSampleBlock = completeBlock; } #pragma mark - SCManagedCapturerListener - (void)managedCapturer:(id)managedCapturer didAppendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer sampleMetadata:(SCManagedCapturerSampleMetadata *)sampleMetadata { void (^block)(UIImage *, CMTime) = _frameSampleBlock; _frameSampleBlock = nil; if (!block) { return; } CVImageBufferRef cvImageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); UIImage *image; if (cvImageBuffer) { CGImageRef cgImage = SCCreateCGImageFromPixelBufferRef(cvImageBuffer); image = [[UIImage alloc] initWithCGImage:cgImage scale:1.0 orientation:UIImageOrientationRight]; CGImageRelease(cgImage); } runOnMainThreadAsynchronously(^{ block(image, presentationTime); }); } - (CIContext *)ciContext { if (!_ciContext) { _ciContext = [CIContext context]; } return _ciContext; } @end ================================================ FILE: ManagedCapturer/SCManagedVideoNoSoundLogger.h ================================================ // // SCManagedVideoNoSoundLogger.h // Snapchat // // Created by Pinlin Chen on 15/07/2017. // // #import #import #import @protocol SCManiphestTicketCreator; @interface SCManagedVideoNoSoundLogger : NSObject @property (nonatomic, strong) NSError *audioSessionError; @property (nonatomic, strong) NSError *audioQueueError; @property (nonatomic, strong) NSError *assetWriterError; @property (nonatomic, assign) BOOL retryAudioQueueSuccess; @property (nonatomic, assign) BOOL retryAudioQueueSuccessSetDataSource; @property (nonatomic, strong) NSString *brokenMicCodeType; @property (nonatomic, assign) BOOL lenseActiveWhileRecording; @property (nonatomic, strong) NSString *activeLensId; @property (nonatomic, assign) CMTime firstWrittenAudioBufferDelay; @property (nonatomic, assign) BOOL audioQueueStarted; SC_INIT_AND_NEW_UNAVAILABLE - (instancetype)initWithTicketCreator:(id)ticketCreator; /* Use to counting how many no sound issue we have fixed */ // Call at the place where we have fixed the AVPlayer leak before + (void)startCountingVideoNoSoundHaveBeenFixed; /* Use to report the detail of new no sound issue */ // Reset all the properties of recording error - (void)resetAll; // Log if the audio track is empty - (void)checkVideoFileAndLogIfNeeded:(NSURL *)videoURL; // called by AVCameraViewController when lense resume audio - (void)managedLensesProcessorDidCallResumeAllSounds; @end ================================================ FILE: ManagedCapturer/SCManagedVideoNoSoundLogger.m ================================================ // // SCManagedVideoNoSoundLogger.m // Snapchat // // Created by Pinlin Chen on 15/07/2017. // // #import "SCManagedVideoNoSoundLogger.h" #import "SCManagedCapturer.h" #import "SCManiphestTicketCreator.h" #import #import #import #import #import #import #import #import #import @import AVFoundation; static BOOL s_startCountingVideoNoSoundFixed; // Count the number of no sound errors for an App session static NSUInteger s_noSoundCaseCount = 0; @interface SCManagedVideoNoSoundLogger () { BOOL _isAudioSessionDeactivated; int _lenseResumeCount; } @property (nonatomic) id ticketCreator; @end @implementation SCManagedVideoNoSoundLogger - (instancetype)initWithTicketCreator:(id)ticketCreator { if (self = [super init]) { _ticketCreator = ticketCreator; } return self; } + (NSUInteger)noSoundCount { return s_noSoundCaseCount; } + (void)increaseNoSoundCount { s_noSoundCaseCount += 1; } + (void)startCountingVideoNoSoundHaveBeenFixed { static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ s_startCountingVideoNoSoundFixed = YES; SCLogGeneralInfo(@"start counting video no sound have been fixed"); }); } + (NSString *)appSessionIdForNoSound { static dispatch_once_t onceToken; static NSString *s_AppSessionIdForNoSound = @"SCDefaultSession"; dispatch_once(&onceToken, ^{ s_AppSessionIdForNoSound = SCUUID(); }); return s_AppSessionIdForNoSound; } + (void)logVideoNoSoundHaveBeenFixedIfNeeded { if (s_startCountingVideoNoSoundFixed) { [[SCLogger sharedInstance] logUnsampledEvent:kSCCameraMetricsVideoNoSoundError parameters:@{ @"have_been_fixed" : @"true", @"fixed_type" : @"player_leak", @"asset_writer_success" : @"true", @"audio_session_success" : @"true", @"audio_queue_success" : @"true", } secretParameters:nil metrics:nil]; } } + (void)logAudioSessionCategoryHaveBeenFixed { [[SCLogger sharedInstance] logUnsampledEvent:kSCCameraMetricsVideoNoSoundError parameters:@{ @"have_been_fixed" : @"true", @"fixed_type" : @"audio_session_category_mismatch", @"asset_writer_success" : @"true", @"audio_session_success" : @"true", @"audio_queue_success" : @"true", } secretParameters:nil metrics:nil]; } + (void)logAudioSessionBrokenMicHaveBeenFixed:(NSString *)type { [[SCLogger sharedInstance] logUnsampledEvent:kSCCameraMetricsVideoNoSoundError parameters:@{ @"have_been_fixed" : @"true", @"fixed_type" : @"broken_microphone", @"asset_writer_success" : @"true", @"audio_session_success" : @"true", @"audio_queue_success" : @"true", @"mic_broken_type" : SC_NULL_STRING_IF_NIL(type), @"audio_session_debug_info" : [SCAudioSession sharedInstance].lastRecordingRequestDebugInfo ?: @"(null)", } secretParameters:nil metrics:nil]; } - (instancetype)init { if (self = [super init]) { [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(_audioSessionWillDeactivate) name:SCAudioSessionWillDeactivateNotification object:nil]; [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(_audioSessionDidActivate) name:SCAudioSessionActivatedNotification object:nil]; _firstWrittenAudioBufferDelay = kCMTimeInvalid; } return self; } - (void)resetAll { _audioQueueError = nil; _audioSessionError = nil; _assetWriterError = nil; _retryAudioQueueSuccess = NO; _retryAudioQueueSuccessSetDataSource = NO; _brokenMicCodeType = nil; _lenseActiveWhileRecording = NO; _lenseResumeCount = 0; _activeLensId = nil; self.firstWrittenAudioBufferDelay = kCMTimeInvalid; } - (void)checkVideoFileAndLogIfNeeded:(NSURL *)videoURL { AVURLAsset *asset = [AVURLAsset assetWithURL:videoURL]; __block BOOL hasAudioTrack = ([asset tracksWithMediaType:AVMediaTypeAudio].count > 0); dispatch_block_t block = ^{ // Log no audio issues have been fixed if (hasAudioTrack) { if (_retryAudioQueueSuccess) { [SCManagedVideoNoSoundLogger logAudioSessionCategoryHaveBeenFixed]; } else if (_retryAudioQueueSuccessSetDataSource) { [SCManagedVideoNoSoundLogger logAudioSessionBrokenMicHaveBeenFixed:_brokenMicCodeType]; } else { [SCManagedVideoNoSoundLogger logVideoNoSoundHaveBeenFixedIfNeeded]; } } else { // Log no audio issues caused by no permission into "wont_fixed_type", won't show in Grafana BOOL isPermissonGranted = [[SCAudioSession sharedInstance] recordPermission] == AVAudioSessionRecordPermissionGranted; if (!isPermissonGranted) { [SCManagedVideoNoSoundLogger increaseNoSoundCount]; [[SCLogger sharedInstance] logUnsampledEvent:kSCCameraMetricsVideoNoSoundError parameters:@{ @"wont_fix_type" : @"no_permission", @"no_sound_count" : [@([SCManagedVideoNoSoundLogger noSoundCount]) stringValue] ?: @"(null)", @"session_id" : [SCManagedVideoNoSoundLogger appSessionIdForNoSound] ?: @"(null)" } secretParameters:nil metrics:nil]; } // Log no audio issues caused by microphone occupied into "wont_fixed_type", for example Phone Call, // It won't show in Grafana // TODO: maybe we should prompt the user of these errors in the future else if (_audioSessionError.code == AVAudioSessionErrorInsufficientPriority || _audioQueueError.code == AVAudioSessionErrorInsufficientPriority) { NSDictionary *parameters = @{ @"wont_fix_type" : @"microphone_in_use", @"asset_writer_error" : _assetWriterError ? [_assetWriterError description] : @"(null)", @"audio_session_error" : _audioSessionError.userInfo ?: @"(null)", @"audio_queue_error" : _audioQueueError.userInfo ?: @"(null)", @"audio_session_deactivated" : _isAudioSessionDeactivated ? @"true" : @"false", @"audio_session_debug_info" : [SCAudioSession sharedInstance].lastRecordingRequestDebugInfo ?: @"(null)", @"no_sound_count" : [@([SCManagedVideoNoSoundLogger noSoundCount]) stringValue] ?: @"(null)", @"session_id" : [SCManagedVideoNoSoundLogger appSessionIdForNoSound] ?: @"(null)" }; [SCManagedVideoNoSoundLogger increaseNoSoundCount]; [[SCLogger sharedInstance] logUnsampledEvent:kSCCameraMetricsVideoNoSoundError parameters:parameters secretParameters:nil metrics:nil]; [_ticketCreator createAndFileBetaReport:JSONStringSerializeObjectForLogging(parameters)]; } else { // Log other new no audio issues, use "have_been_fixed=false" to show in Grafana NSDictionary *parameters = @{ @"have_been_fixed" : @"false", @"asset_writer_error" : _assetWriterError ? [_assetWriterError description] : @"(null)", @"audio_session_error" : _audioSessionError.userInfo ?: @"(null)", @"audio_queue_error" : _audioQueueError.userInfo ?: @"(null)", @"asset_writer_success" : [NSString stringWithBool:_assetWriterError == nil], @"audio_session_success" : [NSString stringWithBool:_audioSessionError == nil], @"audio_queue_success" : [NSString stringWithBool:_audioQueueError == nil], @"audio_session_deactivated" : _isAudioSessionDeactivated ? @"true" : @"false", @"video_duration" : [NSString sc_stringWithFormat:@"%f", CMTimeGetSeconds(asset.duration)], @"is_audio_session_nil" : [[SCAudioSession sharedInstance] noSoundCheckAudioSessionIsNil] ? @"true" : @"false", @"lenses_active" : [NSString stringWithBool:self.lenseActiveWhileRecording], @"active_lense_id" : self.activeLensId ?: @"(null)", @"lense_audio_resume_count" : @(_lenseResumeCount), @"first_audio_buffer_delay" : [NSString sc_stringWithFormat:@"%f", CMTimeGetSeconds(self.firstWrittenAudioBufferDelay)], @"audio_session_debug_info" : [SCAudioSession sharedInstance].lastRecordingRequestDebugInfo ?: @"(null)", @"audio_queue_started" : [NSString stringWithBool:_audioQueueStarted], @"no_sound_count" : [@([SCManagedVideoNoSoundLogger noSoundCount]) stringValue] ?: @"(null)", @"session_id" : [SCManagedVideoNoSoundLogger appSessionIdForNoSound] ?: @"(null)" }; [SCManagedVideoNoSoundLogger increaseNoSoundCount]; [[SCLogger sharedInstance] logUnsampledEvent:kSCCameraMetricsVideoNoSoundError parameters:parameters secretParameters:nil metrics:nil]; [_ticketCreator createAndFileBetaReport:JSONStringSerializeObjectForLogging(parameters)]; } } }; if (hasAudioTrack) { block(); } else { // Wait for all tracks to be loaded, in case of error counting the metric [asset loadValuesAsynchronouslyForKeys:@[ @"tracks" ] completionHandler:^{ // Return when the tracks couldn't be loaded NSError *error = nil; if ([asset statusOfValueForKey:@"tracks" error:&error] != AVKeyValueStatusLoaded || error != nil) { return; } // check audio track again hasAudioTrack = ([asset tracksWithMediaType:AVMediaTypeAudio].count > 0); runOnMainThreadAsynchronously(block); }]; } } - (void)_audioSessionWillDeactivate { _isAudioSessionDeactivated = YES; } - (void)_audioSessionDidActivate { _isAudioSessionDeactivated = NO; } - (void)managedLensesProcessorDidCallResumeAllSounds { _lenseResumeCount += 1; } @end ================================================ FILE: ManagedCapturer/SCManagedVideoScanner.h ================================================ // // SCManagedVideoScanner.h // Snapchat // // Created by Liu Liu on 5/5/15. // Copyright (c) 2015 Snapchat, Inc. All rights reserved. // #import "SCManagedCapturer.h" #import "SCManagedDeviceCapacityAnalyzerListener.h" #import #import @class SCScanConfiguration; @interface SCManagedVideoScanner : NSObject /** * Calling this method to start scan, scan will automatically stop when a snapcode detected */ - (void)startScanAsynchronouslyWithScanConfiguration:(SCScanConfiguration *)configuration; /** * Calling this method to stop scan immediately (it is still possible that a successful scan can happen after this is * called) */ - (void)stopScanAsynchronously; - (instancetype)initWithMaxFrameDefaultDuration:(NSTimeInterval)maxFrameDefaultDuration maxFramePassiveDuration:(NSTimeInterval)maxFramePassiveDuration restCycle:(float)restCycle; @end ================================================ FILE: ManagedCapturer/SCManagedVideoScanner.m ================================================ // // SCManagedVideoScanner.m // Snapchat // // Created by Liu Liu on 5/5/15. // Copyright (c) 2015 Snapchat, Inc. All rights reserved. // #import "SCManagedVideoScanner.h" #import "SCScanConfiguration.h" #import #import #import #import #import #import #import #import #import #import #import #import #import #import // In seconds static NSTimeInterval const kDefaultScanTimeout = 60; static const char *kSCManagedVideoScannerQueueLabel = "com.snapchat.scvideoscanningcapturechannel.video.snapcode-scan"; @interface SCManagedVideoScanner () @end @implementation SCManagedVideoScanner { SCSnapScanner *_snapScanner; dispatch_semaphore_t _activeSemaphore; NSTimeInterval _maxFrameDuration; // Used to restrict how many frames the scanner processes NSTimeInterval _maxFrameDefaultDuration; NSTimeInterval _maxFramePassiveDuration; float _restCycleOfBusyCycle; NSTimeInterval _scanStartTime; BOOL _active; BOOL _shouldEmitEvent; dispatch_block_t _completionHandler; NSTimeInterval _scanTimeout; SCManagedCaptureDevicePosition _devicePosition; SCQueuePerformer *_performer; BOOL _adjustingFocus; NSArray *_codeTypes; NSArray *_codeTypesOld; sc_managed_capturer_scan_results_handler_t _scanResultsHandler; SCUserSession *_userSession; } - (instancetype)initWithMaxFrameDefaultDuration:(NSTimeInterval)maxFrameDefaultDuration maxFramePassiveDuration:(NSTimeInterval)maxFramePassiveDuration restCycle:(float)restCycle { SCTraceStart(); self = [super init]; if (self) { _snapScanner = [SCSnapScanner sharedInstance]; _performer = [[SCQueuePerformer alloc] initWithLabel:kSCManagedVideoScannerQueueLabel qualityOfService:QOS_CLASS_UNSPECIFIED queueType:DISPATCH_QUEUE_SERIAL context:SCQueuePerformerContextCamera]; _activeSemaphore = dispatch_semaphore_create(0); SCAssert(restCycle >= 0 && restCycle < 1, @"rest cycle should be between 0 to 1"); _maxFrameDefaultDuration = maxFrameDefaultDuration; _maxFramePassiveDuration = maxFramePassiveDuration; _restCycleOfBusyCycle = restCycle / (1 - restCycle); // Give CPU time to rest } return self; } #pragma mark - Public methods - (void)startScanAsynchronouslyWithScanConfiguration:(SCScanConfiguration *)configuration { SCTraceStart(); [_performer perform:^{ _shouldEmitEvent = YES; _completionHandler = nil; _scanResultsHandler = configuration.scanResultsHandler; _userSession = configuration.userSession; _scanTimeout = kDefaultScanTimeout; _maxFrameDuration = _maxFrameDefaultDuration; _codeTypes = [self _scanCodeTypes]; _codeTypesOld = @[ @(SCCodeTypeSnapcode18x18Old), @(SCCodeTypeQRCode) ]; SCTraceStart(); // Set the scan start time properly, if we call startScan multiple times while it is active, // This makes sure we can scan long enough. _scanStartTime = CACurrentMediaTime(); // we are not active, need to send the semaphore to start the scan if (!_active) { _active = YES; // Signal the semaphore that we can start scan! dispatch_semaphore_signal(_activeSemaphore); } }]; } - (void)stopScanAsynchronously { SCTraceStart(); [_performer perform:^{ SCTraceStart(); if (_active) { SCLogScanDebug(@"VideoScanner:stopScanAsynchronously turn off from active"); _active = NO; _scanStartTime = 0; _scanResultsHandler = nil; _userSession = nil; } else { SCLogScanDebug(@"VideoScanner:stopScanAsynchronously off already"); } }]; } #pragma mark - Private Methods - (void)_handleSnapScanResult:(SCSnapScannedData *)scannedData { if (scannedData.hasScannedData) { if (scannedData.codeType == SCCodeTypeSnapcode18x18 || scannedData.codeType == SCCodeTypeSnapcodeBitmoji || scannedData.codeType == SCCodeTypeSnapcode18x18Old) { NSString *data = [scannedData.rawData base64EncodedString]; NSString *version = [NSString sc_stringWithFormat:@"%i", scannedData.codeTypeMeta]; [[SCLogger sharedInstance] logEvent:@"SNAPCODE_18x18_SCANNED_FROM_CAMERA" parameters:@{ @"version" : version } secretParameters:@{ @"data" : data }]; if (_completionHandler != nil) { runOnMainThreadAsynchronously(_completionHandler); _completionHandler = nil; } } else if (scannedData.codeType == SCCodeTypeBarcode) { if (!_userSession || !_userSession.featureSettingsManager.barCodeScanEnabled) { return; } NSString *data = scannedData.data; NSString *type = [SCSnapScannedData stringFromBarcodeType:scannedData.codeTypeMeta]; [[SCLogger sharedInstance] logEvent:@"BARCODE_SCANNED_FROM_CAMERA" parameters:@{ @"type" : type } secretParameters:@{ @"data" : data }]; } else if (scannedData.codeType == SCCodeTypeQRCode) { if (!_userSession || !_userSession.featureSettingsManager.qrCodeScanEnabled) { return; } NSURL *url = [NSURL URLWithString:scannedData.data]; [[SCLogger sharedInstance] logEvent:@"QR_CODE_SCANNED_FROM_CAMERA" parameters:@{ @"type" : (url) ? @"url" : @"other" } secretParameters:@{}]; } if (_shouldEmitEvent) { sc_managed_capturer_scan_results_handler_t scanResultsHandler = _scanResultsHandler; runOnMainThreadAsynchronously(^{ if (scanResultsHandler != nil && scannedData) { SCMachineReadableCodeResult *result = [SCMachineReadableCodeResult machineReadableCodeResultWithScannedData:scannedData]; scanResultsHandler(result); } }); } } } - (NSArray *)_scanCodeTypes { // Scan types are defined by codetypes. SnapScan will scan the frame based on codetype. NSMutableArray *codeTypes = [[NSMutableArray alloc] initWithObjects:@(SCCodeTypeSnapcode18x18), @(SCCodeTypeQRCode), @(SCCodeTypeSnapcodeBitmoji), nil]; if (SCSearchEnableBarcodeProductSearch()) { [codeTypes addObject:@(SCCodeTypeBarcode)]; } return [codeTypes copy]; } #pragma mark - SCManagedVideoDataSourceListener - (void)managedVideoDataSource:(id)managedVideoDataSource didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer devicePosition:(SCManagedCaptureDevicePosition)devicePosition { SCTraceStart(); _devicePosition = devicePosition; if (!_active) { SCLogScanDebug(@"VideoScanner: Scanner is not active"); return; } SCLogScanDebug(@"VideoScanner: Scanner is active"); // If we have the semaphore now, enqueue a new buffer, otherwise drop the buffer if (dispatch_semaphore_wait(_activeSemaphore, DISPATCH_TIME_NOW) == 0) { CFRetain(sampleBuffer); NSTimeInterval startTime = CACurrentMediaTime(); [_performer perform:^{ SCTraceStart(); CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); SCLogScanInfo(@"VideoScanner: Scanner will scan a frame"); SCSnapScannedData *scannedData; SCLogScanInfo(@"VideoScanner:Use new scanner without false alarm check"); scannedData = [_snapScanner scanPixelBuffer:pixelBuffer forCodeTypes:_codeTypes]; if ([UIDevice shouldLogPerfEvents]) { NSInteger loadingMs = (CACurrentMediaTime() - startTime) * 1000; // Since there are too many unsuccessful scans, we will only log 1/10 of them for now. if (scannedData.hasScannedData || (!scannedData.hasScannedData && arc4random() % 10 == 0)) { [[SCLogger sharedInstance] logEvent:@"SCAN_SINGLE_FRAME" parameters:@{ @"time_span" : @(loadingMs), @"has_scanned_data" : @(scannedData.hasScannedData), }]; } } [self _handleSnapScanResult:scannedData]; // If it is not turned off, we will continue to scan if result is not presetn if (_active) { _active = !scannedData.hasScannedData; } // Clean up if result is reported for scan if (!_active) { _scanResultsHandler = nil; _completionHandler = nil; } CFRelease(sampleBuffer); NSTimeInterval currentTime = CACurrentMediaTime(); SCLogScanInfo(@"VideoScanner:Scan time %f maxFrameDuration:%f timeout:%f", currentTime - startTime, _maxFrameDuration, _scanTimeout); // Haven't found the scanned data yet, haven't reached maximum scan timeout yet, haven't turned this off // yet, ready for the next frame if (_active && currentTime < _scanStartTime + _scanTimeout) { // We've finished processing current sample buffer, ready for next one, but before that, we need to rest // a bit (if possible) if (currentTime - startTime >= _maxFrameDuration && _restCycleOfBusyCycle < FLT_MIN) { // If we already reached deadline (used too much time) and don't want to rest CPU, give the signal // now to grab the next frame SCLogScanInfo(@"VideoScanner:Signal to get next frame for snapcode scanner"); dispatch_semaphore_signal(_activeSemaphore); } else { NSTimeInterval afterTime = MAX((currentTime - startTime) * _restCycleOfBusyCycle, _maxFrameDuration - (currentTime - startTime)); // If we need to wait more than 0 second, then do that, otherwise grab the next frame immediately if (afterTime > 0) { [_performer perform:^{ SCLogScanInfo( @"VideoScanner:Waited and now signaling to get next frame for snapcode scanner"); dispatch_semaphore_signal(_activeSemaphore); } after:afterTime]; } else { SCLogScanInfo(@"VideoScanner:Now signaling to get next frame for snapcode scanner"); dispatch_semaphore_signal(_activeSemaphore); } } } else { // We are not active, and not going to be active any more. SCLogScanInfo(@"VideoScanner:not active anymore"); _active = NO; _scanResultsHandler = nil; _completionHandler = nil; } }]; } } #pragma mark - SCManagedDeviceCapacityAnalyzerListener - (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer didChangeAdjustingFocus:(BOOL)adjustingFocus { [_performer perform:^{ _adjustingFocus = adjustingFocus; }]; } @end ================================================ FILE: ManagedCapturer/SCManagedVideoStreamReporter.h ================================================ // // SCManagedVideoStreamReporter.h // Snapchat // // Created by Liu Liu on 5/16/15. // Copyright (c) 2015 Snapchat, Inc. All rights reserved. // #import #import @interface SCManagedVideoStreamReporter : NSObject @end ================================================ FILE: ManagedCapturer/SCManagedVideoStreamReporter.m ================================================ // // SCManagedVideoStreamReporter.m // Snapchat // // Created by Liu Liu on 5/16/15. // Copyright (c) 2015 Snapchat, Inc. All rights reserved. // #import "SCManagedVideoStreamReporter.h" #import #import static NSTimeInterval const SCManagedVideoStreamReporterInterval = 10; @implementation SCManagedVideoStreamReporter { NSUInteger _droppedSampleBuffers; NSUInteger _outputSampleBuffers; NSTimeInterval _lastReportTime; } - (instancetype)init { self = [super init]; if (self) { _lastReportTime = CACurrentMediaTime(); } return self; } - (void)_reportIfNeeded { NSTimeInterval currentTime = CACurrentMediaTime(); if (currentTime - _lastReportTime > SCManagedVideoStreamReporterInterval) { SCLogGeneralInfo(@"Time: (%.3f - %.3f], Video Streamer Dropped %tu, Output %tu", _lastReportTime, currentTime, _droppedSampleBuffers, _outputSampleBuffers); _droppedSampleBuffers = _outputSampleBuffers = 0; _lastReportTime = currentTime; } } - (void)managedVideoDataSource:(id)managedVideoDataSource didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer devicePosition:(SCManagedCaptureDevicePosition)devicePosition { ++_outputSampleBuffers; [self _reportIfNeeded]; } - (void)managedVideoDataSource:(id)managedVideoDataSource didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer devicePosition:(SCManagedCaptureDevicePosition)devicePosition { ++_droppedSampleBuffers; [self _reportIfNeeded]; } @end ================================================ FILE: ManagedCapturer/SCManagedVideoStreamer.h ================================================ // // SCManagedVideoStreamer.h // Snapchat // // Created by Liu Liu on 4/30/15. // Copyright (c) 2015 Liu Liu. All rights reserved. // #import "SCManagedVideoARDataSource.h" #import #import #import @class ARSession; /** * SCManagedVideoStreamer uses the current AVCaptureSession to create * and publish video output frames. SCManagedVideoStreamer also conforms * to SCManagedVideoDataSource allowing chained consumption of video frames. */ @interface SCManagedVideoStreamer : NSObject - (instancetype)initWithSession:(AVCaptureSession *)session devicePosition:(SCManagedCaptureDevicePosition)devicePosition; - (instancetype)initWithSession:(AVCaptureSession *)session arSession:(ARSession *)arSession devicePosition:(SCManagedCaptureDevicePosition)devicePosition NS_AVAILABLE_IOS(11_0); - (void)setupWithSession:(AVCaptureSession *)session devicePosition:(SCManagedCaptureDevicePosition)devicePosition; - (void)setupWithARSession:(ARSession *)arSession NS_AVAILABLE_IOS(11_0); @end ================================================ FILE: ManagedCapturer/SCManagedVideoStreamer.m ================================================ // // SCManagedVideoStreamer.m // Snapchat // // Created by Liu Liu on 4/30/15. // Copyright (c) 2015 Liu Liu. All rights reserved. // #import "SCManagedVideoStreamer.h" #import "ARConfiguration+SCConfiguration.h" #import "SCCameraTweaks.h" #import "SCCapturerDefines.h" #import "SCLogger+Camera.h" #import "SCManagedCapturePreviewLayerController.h" #import "SCMetalUtils.h" #import "SCProcessingPipeline.h" #import "SCProcessingPipelineBuilder.h" #import #import #import #import #import #import #import #import #import @import ARKit; @import AVFoundation; #define SCLogVideoStreamerInfo(fmt, ...) SCLogCoreCameraInfo(@"[SCManagedVideoStreamer] " fmt, ##__VA_ARGS__) #define SCLogVideoStreamerWarning(fmt, ...) SCLogCoreCameraWarning(@"[SCManagedVideoStreamer] " fmt, ##__VA_ARGS__) #define SCLogVideoStreamerError(fmt, ...) SCLogCoreCameraError(@"[SCManagedVideoStreamer] " fmt, ##__VA_ARGS__) static NSInteger const kSCCaptureFrameRate = 30; static CGFloat const kSCLogInterval = 3.0; static char *const kSCManagedVideoStreamerQueueLabel = "com.snapchat.managed-video-streamer"; static char *const kSCManagedVideoStreamerCallbackQueueLabel = "com.snapchat.managed-video-streamer.dequeue"; static NSTimeInterval const kSCManagedVideoStreamerMaxAllowedLatency = 1; // Drop the frame if it is 1 second late. static NSTimeInterval const kSCManagedVideoStreamerStalledDisplay = 5; // If the frame is not updated for 5 seconds, it is considered to be stalled. static NSTimeInterval const kSCManagedVideoStreamerARSessionFramerateCap = 1.0 / (kSCCaptureFrameRate + 1); // Restrict ARSession to 30fps static int32_t const kSCManagedVideoStreamerMaxProcessingBuffers = 15; @interface SCManagedVideoStreamer () @property (nonatomic, strong) AVCaptureSession *captureSession; @end @implementation SCManagedVideoStreamer { AVCaptureVideoDataOutput *_videoDataOutput; AVCaptureDepthDataOutput *_depthDataOutput NS_AVAILABLE_IOS(11_0); AVCaptureDataOutputSynchronizer *_dataOutputSynchronizer NS_AVAILABLE_IOS(11_0); BOOL _performingConfigurations; SCManagedCaptureDevicePosition _devicePosition; BOOL _videoStabilizationEnabledIfSupported; SCManagedVideoDataSourceListenerAnnouncer *_announcer; BOOL _sampleBufferDisplayEnabled; id _sampleBufferDisplayController; dispatch_block_t _flushOutdatedPreviewBlock; NSMutableArray *_waitUntilSampleBufferDisplayedBlocks; SCProcessingPipeline *_processingPipeline; NSTimeInterval _lastDisplayedFrameTimestamp; #ifdef SC_USE_ARKIT_FACE NSTimeInterval _lastDisplayedDepthFrameTimestamp; #endif BOOL _depthCaptureEnabled; CGPoint _portraitModePointOfInterest; // For sticky video tweaks BOOL _keepLateFrames; SCQueuePerformer *_callbackPerformer; atomic_int _processingBuffersCount; } @synthesize isStreaming = _isStreaming; @synthesize performer = _performer; @synthesize currentFrame = _currentFrame; @synthesize fieldOfView = _fieldOfView; #ifdef SC_USE_ARKIT_FACE @synthesize lastDepthData = _lastDepthData; #endif @synthesize videoOrientation = _videoOrientation; - (instancetype)initWithSession:(AVCaptureSession *)session devicePosition:(SCManagedCaptureDevicePosition)devicePosition { SCTraceStart(); self = [super init]; if (self) { _sampleBufferDisplayEnabled = YES; _announcer = [[SCManagedVideoDataSourceListenerAnnouncer alloc] init]; // We discard frames to support lenses in real time _keepLateFrames = NO; _performer = [[SCQueuePerformer alloc] initWithLabel:kSCManagedVideoStreamerQueueLabel qualityOfService:QOS_CLASS_USER_INTERACTIVE queueType:DISPATCH_QUEUE_SERIAL context:SCQueuePerformerContextCamera]; _videoOrientation = AVCaptureVideoOrientationLandscapeRight; [self setupWithSession:session devicePosition:devicePosition]; SCLogVideoStreamerInfo(@"init with position:%lu", (unsigned long)devicePosition); } return self; } - (instancetype)initWithSession:(AVCaptureSession *)session arSession:(ARSession *)arSession devicePosition:(SCManagedCaptureDevicePosition)devicePosition NS_AVAILABLE_IOS(11_0) { self = [self initWithSession:session devicePosition:devicePosition]; if (self) { [self setupWithARSession:arSession]; self.currentFrame = nil; #ifdef SC_USE_ARKIT_FACE self.lastDepthData = nil; #endif } return self; } - (AVCaptureVideoDataOutput *)_newVideoDataOutput { AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init]; // All inbound frames are going to be the native format of the camera avoid // any need for transcoding. output.videoSettings = @{(NSString *) kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) }; return output; } - (void)setupWithSession:(AVCaptureSession *)session devicePosition:(SCManagedCaptureDevicePosition)devicePosition { [self stopStreaming]; self.captureSession = session; _devicePosition = devicePosition; _videoDataOutput = [self _newVideoDataOutput]; if (SCDeviceSupportsMetal()) { // We default to start the streaming if the Metal is supported at startup time. _isStreaming = YES; // Set the sample buffer delegate before starting it. [_videoDataOutput setSampleBufferDelegate:self queue:[self callbackPerformer].queue]; } if ([session canAddOutput:_videoDataOutput]) { [session addOutput:_videoDataOutput]; [self _enableVideoMirrorForDevicePosition:devicePosition]; } if (SCCameraTweaksEnablePortraitModeButton()) { if (@available(iOS 11.0, *)) { _depthDataOutput = [[AVCaptureDepthDataOutput alloc] init]; [[_depthDataOutput connectionWithMediaType:AVMediaTypeDepthData] setEnabled:NO]; if ([session canAddOutput:_depthDataOutput]) { [session addOutput:_depthDataOutput]; [_depthDataOutput setDelegate:self callbackQueue:_performer.queue]; } _depthCaptureEnabled = NO; } _portraitModePointOfInterest = CGPointMake(0.5, 0.5); } [self setVideoStabilizationEnabledIfSupported:YES]; } - (void)setupWithARSession:(ARSession *)arSession NS_AVAILABLE_IOS(11_0) { arSession.delegateQueue = _performer.queue; arSession.delegate = self; } - (void)addSampleBufferDisplayController:(id)sampleBufferDisplayController { [_performer perform:^{ _sampleBufferDisplayController = sampleBufferDisplayController; SCLogVideoStreamerInfo(@"add sampleBufferDisplayController:%@", _sampleBufferDisplayController); }]; } - (void)setSampleBufferDisplayEnabled:(BOOL)sampleBufferDisplayEnabled { [_performer perform:^{ _sampleBufferDisplayEnabled = sampleBufferDisplayEnabled; SCLogVideoStreamerInfo(@"sampleBufferDisplayEnabled set to:%d", _sampleBufferDisplayEnabled); }]; } - (void)waitUntilSampleBufferDisplayed:(dispatch_queue_t)queue completionHandler:(dispatch_block_t)completionHandler { SCAssert(queue, @"callback queue must be provided"); SCAssert(completionHandler, @"completion handler must be provided"); SCLogVideoStreamerInfo(@"waitUntilSampleBufferDisplayed queue:%@ completionHandler:%p isStreaming:%d", queue, completionHandler, _isStreaming); if (_isStreaming) { [_performer perform:^{ if (!_waitUntilSampleBufferDisplayedBlocks) { _waitUntilSampleBufferDisplayedBlocks = [NSMutableArray array]; } [_waitUntilSampleBufferDisplayedBlocks addObject:@[ queue, completionHandler ]]; SCLogVideoStreamerInfo(@"waitUntilSampleBufferDisplayed add block:%p", completionHandler); }]; } else { dispatch_async(queue, completionHandler); } } - (void)startStreaming { SCTraceStart(); SCLogVideoStreamerInfo(@"start streaming. _isStreaming:%d", _isStreaming); if (!_isStreaming) { _isStreaming = YES; [self _cancelFlushOutdatedPreview]; if (@available(ios 11.0, *)) { if (_depthCaptureEnabled) { [[_depthDataOutput connectionWithMediaType:AVMediaTypeDepthData] setEnabled:YES]; } } [_videoDataOutput setSampleBufferDelegate:self queue:[self callbackPerformer].queue]; } } - (void)setAsOutput:(AVCaptureSession *)session devicePosition:(SCManagedCaptureDevicePosition)devicePosition { SCTraceStart(); if ([session canAddOutput:_videoDataOutput]) { SCLogVideoStreamerError(@"add videoDataOutput:%@", _videoDataOutput); [session addOutput:_videoDataOutput]; [self _enableVideoMirrorForDevicePosition:devicePosition]; } else { SCLogVideoStreamerError(@"cannot add videoDataOutput:%@ to session:%@", _videoDataOutput, session); } [self _enableVideoStabilizationIfSupported]; } - (void)removeAsOutput:(AVCaptureSession *)session { SCTraceStart(); SCLogVideoStreamerInfo(@"remove videoDataOutput:%@ from session:%@", _videoDataOutput, session); [session removeOutput:_videoDataOutput]; } - (void)_cancelFlushOutdatedPreview { SCLogVideoStreamerInfo(@"cancel flush outdated preview:%p", _flushOutdatedPreviewBlock); if (_flushOutdatedPreviewBlock) { dispatch_block_cancel(_flushOutdatedPreviewBlock); _flushOutdatedPreviewBlock = nil; } } - (SCQueuePerformer *)callbackPerformer { // If sticky video tweak is on, use a separated performer queue if (_keepLateFrames) { if (!_callbackPerformer) { _callbackPerformer = [[SCQueuePerformer alloc] initWithLabel:kSCManagedVideoStreamerCallbackQueueLabel qualityOfService:QOS_CLASS_USER_INTERACTIVE queueType:DISPATCH_QUEUE_SERIAL context:SCQueuePerformerContextCamera]; } return _callbackPerformer; } return _performer; } - (void)pauseStreaming { SCTraceStart(); SCLogVideoStreamerInfo(@"pauseStreaming isStreaming:%d", _isStreaming); if (_isStreaming) { _isStreaming = NO; [_videoDataOutput setSampleBufferDelegate:nil queue:NULL]; if (@available(ios 11.0, *)) { if (_depthCaptureEnabled) { [[_depthDataOutput connectionWithMediaType:AVMediaTypeDepthData] setEnabled:NO]; } } @weakify(self); _flushOutdatedPreviewBlock = dispatch_block_create(0, ^{ SCLogVideoStreamerInfo(@"execute flushOutdatedPreviewBlock"); @strongify(self); SC_GUARD_ELSE_RETURN(self); [self->_sampleBufferDisplayController flushOutdatedPreview]; }); [_performer perform:_flushOutdatedPreviewBlock after:SCCameraTweaksEnableKeepLastFrameOnCamera() ? kSCManagedVideoStreamerStalledDisplay : 0]; [_performer perform:^{ [self _performCompletionHandlersForWaitUntilSampleBufferDisplayed]; }]; } } - (void)stopStreaming { SCTraceStart(); SCLogVideoStreamerInfo(@"stopStreaming isStreaming:%d", _isStreaming); if (_isStreaming) { _isStreaming = NO; [_videoDataOutput setSampleBufferDelegate:nil queue:NULL]; if (@available(ios 11.0, *)) { if (_depthCaptureEnabled) { [[_depthDataOutput connectionWithMediaType:AVMediaTypeDepthData] setEnabled:NO]; } } } [self _cancelFlushOutdatedPreview]; [_performer perform:^{ SCLogVideoStreamerInfo(@"stopStreaming in perfome queue"); [_sampleBufferDisplayController flushOutdatedPreview]; [self _performCompletionHandlersForWaitUntilSampleBufferDisplayed]; }]; } - (void)beginConfiguration { SCLogVideoStreamerInfo(@"enter beginConfiguration"); [_performer perform:^{ SCLogVideoStreamerInfo(@"performingConfigurations set to YES"); _performingConfigurations = YES; }]; } - (void)setDevicePosition:(SCManagedCaptureDevicePosition)devicePosition { SCLogVideoStreamerInfo(@"setDevicePosition with newPosition:%lu", (unsigned long)devicePosition); [self _enableVideoMirrorForDevicePosition:devicePosition]; [self _enableVideoStabilizationIfSupported]; [_performer perform:^{ SCLogVideoStreamerInfo(@"setDevicePosition in perform queue oldPosition:%lu newPosition:%lu", (unsigned long)_devicePosition, (unsigned long)devicePosition); if (_devicePosition != devicePosition) { _devicePosition = devicePosition; } }]; } - (void)setVideoOrientation:(AVCaptureVideoOrientation)videoOrientation { SCTraceStart(); // It is not neccessary call these changes on private queue, because is is just only data output configuration. // It should be called from manged capturer queue to prevent lock capture session in two different(private and // managed capturer) queues that will cause the deadlock. SCLogVideoStreamerInfo(@"setVideoOrientation oldOrientation:%lu newOrientation:%lu", (unsigned long)_videoOrientation, (unsigned long)videoOrientation); _videoOrientation = videoOrientation; AVCaptureConnection *connection = [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo]; connection.videoOrientation = _videoOrientation; } - (void)setKeepLateFrames:(BOOL)keepLateFrames { SCTraceStart(); [_performer perform:^{ SCTraceStart(); if (keepLateFrames != _keepLateFrames) { _keepLateFrames = keepLateFrames; // Get and set corresponding queue base on keepLateFrames. // We don't use AVCaptureVideoDataOutput.alwaysDiscardsLateVideo anymore, because it will potentially // result in lenses regression, and we could use all 15 sample buffers by adding a separated calllback // queue. [_videoDataOutput setSampleBufferDelegate:self queue:[self callbackPerformer].queue]; SCLogVideoStreamerInfo(@"keepLateFrames was set to:%d", keepLateFrames); } }]; } - (void)setDepthCaptureEnabled:(BOOL)enabled NS_AVAILABLE_IOS(11_0) { _depthCaptureEnabled = enabled; [[_depthDataOutput connectionWithMediaType:AVMediaTypeDepthData] setEnabled:enabled]; if (enabled) { _dataOutputSynchronizer = [[AVCaptureDataOutputSynchronizer alloc] initWithDataOutputs:@[ _videoDataOutput, _depthDataOutput ]]; [_dataOutputSynchronizer setDelegate:self queue:_performer.queue]; } else { _dataOutputSynchronizer = nil; } } - (void)setPortraitModePointOfInterest:(CGPoint)pointOfInterest { _portraitModePointOfInterest = pointOfInterest; } - (BOOL)getKeepLateFrames { return _keepLateFrames; } - (void)commitConfiguration { SCLogVideoStreamerInfo(@"enter commitConfiguration"); [_performer perform:^{ SCLogVideoStreamerInfo(@"performingConfigurations set to NO"); _performingConfigurations = NO; }]; } - (void)addListener:(id)listener { SCTraceStart(); SCLogVideoStreamerInfo(@"add listener:%@", listener); [_announcer addListener:listener]; } - (void)removeListener:(id)listener { SCTraceStart(); SCLogVideoStreamerInfo(@"remove listener:%@", listener); [_announcer removeListener:listener]; } - (void)addProcessingPipeline:(SCProcessingPipeline *)processingPipeline { SCLogVideoStreamerInfo(@"enter addProcessingPipeline:%@", processingPipeline); [_performer perform:^{ SCLogVideoStreamerInfo(@"processingPipeline set to %@", processingPipeline); _processingPipeline = processingPipeline; }]; } - (void)removeProcessingPipeline { SCLogVideoStreamerInfo(@"enter removeProcessingPipeline"); [_performer perform:^{ SCLogVideoStreamerInfo(@"processingPipeline set to nil"); _processingPipeline = nil; }]; } - (BOOL)isVideoMirrored { SCTraceStart(); AVCaptureConnection *connection = [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo]; return connection.isVideoMirrored; } #pragma mark - Common Sample Buffer Handling - (void)didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer { return [self didOutputSampleBuffer:sampleBuffer depthData:nil]; } - (void)didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer depthData:(CVPixelBufferRef)depthDataMap { // Don't send the sample buffer if we are perform configurations if (_performingConfigurations) { SCLogVideoStreamerError(@"didOutputSampleBuffer return because performingConfigurations is YES"); return; } SC_GUARD_ELSE_RETURN([_performer isCurrentPerformer]); // We can't set alwaysDiscardsLateVideoFrames to YES when lens is activated because it will cause camera freezing. // When alwaysDiscardsLateVideoFrames is set to NO, the late frames will not be dropped until it reach 15 frames, // so we should simulate the dropping behaviour as AVFoundation do. NSTimeInterval presentationTime = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)); _lastDisplayedFrameTimestamp = presentationTime; NSTimeInterval frameLatency = CACurrentMediaTime() - presentationTime; // Log interval definied in macro LOG_INTERVAL, now is 3.0s BOOL shouldLog = (long)(CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) * kSCCaptureFrameRate) % ((long)(kSCCaptureFrameRate * kSCLogInterval)) == 0; if (shouldLog) { SCLogVideoStreamerInfo(@"didOutputSampleBuffer:%p", sampleBuffer); } if (_processingPipeline) { RenderData renderData = { .sampleBuffer = sampleBuffer, .depthDataMap = depthDataMap, .depthBlurPointOfInterest = SCCameraTweaksEnablePortraitModeAutofocus() || SCCameraTweaksEnablePortraitModeTapToFocus() ? &_portraitModePointOfInterest : nil, }; // Ensure we are doing all render operations (i.e. accessing textures) on performer to prevent race condition SCAssertPerformer(_performer); sampleBuffer = [_processingPipeline render:renderData]; if (shouldLog) { SCLogVideoStreamerInfo(@"rendered sampleBuffer:%p in processingPipeline:%@", sampleBuffer, _processingPipeline); } } if (sampleBuffer && _sampleBufferDisplayEnabled) { // Send the buffer only if it is valid, set it to be displayed immediately (See the enqueueSampleBuffer method // header, need to get attachments array and set the dictionary). CFArrayRef attachmentsArray = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, YES); if (!attachmentsArray) { SCLogVideoStreamerError(@"Error getting attachment array for CMSampleBuffer"); } else if (CFArrayGetCount(attachmentsArray) > 0) { CFMutableDictionaryRef attachment = (CFMutableDictionaryRef)CFArrayGetValueAtIndex(attachmentsArray, 0); CFDictionarySetValue(attachment, kCMSampleAttachmentKey_DisplayImmediately, kCFBooleanTrue); } // Warn if frame that went through is not most recent enough. if (frameLatency >= kSCManagedVideoStreamerMaxAllowedLatency) { SCLogVideoStreamerWarning( @"The sample buffer we received is too late, why? presentationTime:%lf frameLatency:%f", presentationTime, frameLatency); } [_sampleBufferDisplayController enqueueSampleBuffer:sampleBuffer]; if (shouldLog) { SCLogVideoStreamerInfo(@"displayed sampleBuffer:%p in Metal", sampleBuffer); } [self _performCompletionHandlersForWaitUntilSampleBufferDisplayed]; } if (shouldLog) { SCLogVideoStreamerInfo(@"begin annoucing sampleBuffer:%p of devicePosition:%lu", sampleBuffer, (unsigned long)_devicePosition); } [_announcer managedVideoDataSource:self didOutputSampleBuffer:sampleBuffer devicePosition:_devicePosition]; if (shouldLog) { SCLogVideoStreamerInfo(@"end annoucing sampleBuffer:%p", sampleBuffer); } } - (void)didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer { if (_performingConfigurations) { return; } SC_GUARD_ELSE_RETURN([_performer isCurrentPerformer]); NSTimeInterval currentProcessingTime = CACurrentMediaTime(); NSTimeInterval currentSampleTime = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)); // Only logging it when sticky tweak is on, which means sticky time is too long, and AVFoundation have to drop the // sampleBuffer if (_keepLateFrames) { SCLogVideoStreamerInfo(@"didDropSampleBuffer:%p timestamp:%f latency:%f", sampleBuffer, currentProcessingTime, currentSampleTime); } [_announcer managedVideoDataSource:self didDropSampleBuffer:sampleBuffer devicePosition:_devicePosition]; } #pragma mark - AVCaptureVideoDataOutputSampleBufferDelegate - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection NS_AVAILABLE_IOS(11_0) { // Sticky video tweak is off, i.e. lenses is on, // we use same queue for callback and processing, and let AVFoundation decide which frame should be dropped if (!_keepLateFrames) { [self didOutputSampleBuffer:sampleBuffer]; } // Sticky video tweak is on else { if ([_performer isCurrentPerformer]) { // Note: there might be one frame callbacked in processing queue when switching callback queue, // it should be fine. But if following log appears too much, it is not our design. SCLogVideoStreamerWarning(@"The callback queue should be a separated queue when sticky tweak is on"); } // TODO: In sticky video v2, we should consider check free memory if (_processingBuffersCount >= kSCManagedVideoStreamerMaxProcessingBuffers - 1) { SCLogVideoStreamerWarning(@"processingBuffersCount reached to the max. current count:%d", _processingBuffersCount); [self didDropSampleBuffer:sampleBuffer]; return; } atomic_fetch_add(&_processingBuffersCount, 1); CFRetain(sampleBuffer); // _performer should always be the processing queue [_performer perform:^{ [self didOutputSampleBuffer:sampleBuffer]; CFRelease(sampleBuffer); atomic_fetch_sub(&_processingBuffersCount, 1); }]; } } - (void)captureOutput:(AVCaptureOutput *)captureOutput didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection { [self didDropSampleBuffer:sampleBuffer]; } #pragma mark - AVCaptureDataOutputSynchronizer (Video + Depth) - (void)dataOutputSynchronizer:(AVCaptureDataOutputSynchronizer *)synchronizer didOutputSynchronizedDataCollection:(AVCaptureSynchronizedDataCollection *)synchronizedDataCollection NS_AVAILABLE_IOS(11_0) { AVCaptureSynchronizedDepthData *syncedDepthData = (AVCaptureSynchronizedDepthData *)[synchronizedDataCollection synchronizedDataForCaptureOutput:_depthDataOutput]; AVDepthData *depthData = nil; if (syncedDepthData && !syncedDepthData.depthDataWasDropped) { depthData = syncedDepthData.depthData; } AVCaptureSynchronizedSampleBufferData *syncedVideoData = (AVCaptureSynchronizedSampleBufferData *)[synchronizedDataCollection synchronizedDataForCaptureOutput:_videoDataOutput]; if (syncedVideoData && !syncedVideoData.sampleBufferWasDropped) { CMSampleBufferRef videoSampleBuffer = syncedVideoData.sampleBuffer; [self didOutputSampleBuffer:videoSampleBuffer depthData:depthData ? depthData.depthDataMap : nil]; } } #pragma mark - ARSessionDelegate - (void)session:(ARSession *)session cameraDidChangeTrackingState:(ARCamera *)camera NS_AVAILABLE_IOS(11_0) { NSString *state = nil; NSString *reason = nil; switch (camera.trackingState) { case ARTrackingStateNormal: state = @"Normal"; break; case ARTrackingStateLimited: state = @"Limited"; break; case ARTrackingStateNotAvailable: state = @"Not Available"; break; } switch (camera.trackingStateReason) { case ARTrackingStateReasonNone: reason = @"None"; break; case ARTrackingStateReasonInitializing: reason = @"Initializing"; break; case ARTrackingStateReasonExcessiveMotion: reason = @"Excessive Motion"; break; case ARTrackingStateReasonInsufficientFeatures: reason = @"Insufficient Features"; break; #if SC_AT_LEAST_SDK_11_3 case ARTrackingStateReasonRelocalizing: reason = @"Relocalizing"; break; #endif } SCLogVideoStreamerInfo(@"ARKit changed tracking state - %@ (reason: %@)", state, reason); } - (void)session:(ARSession *)session didUpdateFrame:(ARFrame *)frame NS_AVAILABLE_IOS(11_0) { #ifdef SC_USE_ARKIT_FACE // This is extremely weird, but LOOK-10251 indicates that despite the class having it defined, on some specific // devices there are ARFrame instances that don't respond to `capturedDepthData`. // (note: this was discovered to be due to some people staying on iOS 11 betas). AVDepthData *depth = nil; if ([frame respondsToSelector:@selector(capturedDepthData)]) { depth = frame.capturedDepthData; } #endif CGFloat timeSince = frame.timestamp - _lastDisplayedFrameTimestamp; // Don't deliver more than 30 frames per sec BOOL framerateMinimumElapsed = timeSince >= kSCManagedVideoStreamerARSessionFramerateCap; #ifdef SC_USE_ARKIT_FACE if (depth) { CGFloat timeSince = frame.timestamp - _lastDisplayedDepthFrameTimestamp; framerateMinimumElapsed |= timeSince >= kSCManagedVideoStreamerARSessionFramerateCap; } #endif SC_GUARD_ELSE_RETURN(framerateMinimumElapsed); #ifdef SC_USE_ARKIT_FACE if (depth) { self.lastDepthData = depth; _lastDisplayedDepthFrameTimestamp = frame.timestamp; } #endif // Make sure that current frame is no longer being used, otherwise drop current frame. SC_GUARD_ELSE_RETURN(self.currentFrame == nil); CVPixelBufferRef pixelBuffer = frame.capturedImage; CVPixelBufferLockBaseAddress(pixelBuffer, 0); CMTime time = CMTimeMakeWithSeconds(frame.timestamp, 1000000); CMSampleTimingInfo timing = {kCMTimeInvalid, time, kCMTimeInvalid}; CMVideoFormatDescriptionRef videoInfo; CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, &videoInfo); CMSampleBufferRef buffer; CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, YES, nil, nil, videoInfo, &timing, &buffer); CFRelease(videoInfo); CVPixelBufferUnlockBaseAddress(pixelBuffer, 0); self.currentFrame = frame; [self didOutputSampleBuffer:buffer]; [self _updateFieldOfViewWithARFrame:frame]; CFRelease(buffer); } - (void)session:(ARSession *)session didAddAnchors:(NSArray *)anchors NS_AVAILABLE_IOS(11_0) { for (ARAnchor *anchor in anchors) { if ([anchor isKindOfClass:[ARPlaneAnchor class]]) { SCLogVideoStreamerInfo(@"ARKit added plane anchor"); return; } } } - (void)session:(ARSession *)session didFailWithError:(NSError *)error NS_AVAILABLE_IOS(11_0) { SCLogVideoStreamerError(@"ARKit session failed with error: %@. Resetting", error); [session runWithConfiguration:[ARConfiguration sc_configurationForDevicePosition:_devicePosition]]; } - (void)sessionWasInterrupted:(ARSession *)session NS_AVAILABLE_IOS(11_0) { SCLogVideoStreamerWarning(@"ARKit session interrupted"); } - (void)sessionInterruptionEnded:(ARSession *)session NS_AVAILABLE_IOS(11_0) { SCLogVideoStreamerInfo(@"ARKit interruption ended"); } #pragma mark - Private methods - (void)_performCompletionHandlersForWaitUntilSampleBufferDisplayed { for (NSArray *completion in _waitUntilSampleBufferDisplayedBlocks) { // Call the completion handlers. dispatch_async(completion[0], completion[1]); } [_waitUntilSampleBufferDisplayedBlocks removeAllObjects]; } // This is the magic that ensures the VideoDataOutput will have the correct // orientation. - (void)_enableVideoMirrorForDevicePosition:(SCManagedCaptureDevicePosition)devicePosition { SCLogVideoStreamerInfo(@"enable video mirror for device position:%lu", (unsigned long)devicePosition); AVCaptureConnection *connection = [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo]; connection.videoOrientation = _videoOrientation; if (devicePosition == SCManagedCaptureDevicePositionFront) { connection.videoMirrored = YES; } } - (void)_enableVideoStabilizationIfSupported { SCTraceStart(); if (!SCCameraTweaksEnableVideoStabilization()) { SCLogVideoStreamerWarning(@"SCCameraTweaksEnableVideoStabilization is NO, won't enable video stabilization"); return; } AVCaptureConnection *videoConnection = [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo]; if (!videoConnection) { SCLogVideoStreamerError(@"cannot get videoConnection from videoDataOutput:%@", videoConnection); return; } // Set the video stabilization mode to auto. Default is off. if ([videoConnection isVideoStabilizationSupported]) { videoConnection.preferredVideoStabilizationMode = _videoStabilizationEnabledIfSupported ? AVCaptureVideoStabilizationModeStandard : AVCaptureVideoStabilizationModeOff; NSDictionary *params = @{ @"iOS8_Mode" : @(videoConnection.activeVideoStabilizationMode) }; [[SCLogger sharedInstance] logEvent:@"VIDEO_STABILIZATION_MODE" parameters:params]; SCLogVideoStreamerInfo(@"set video stabilization mode:%ld to videoConnection:%@", (long)videoConnection.preferredVideoStabilizationMode, videoConnection); } else { SCLogVideoStreamerInfo(@"video stabilization isn't supported on videoConnection:%@", videoConnection); } } - (void)setVideoStabilizationEnabledIfSupported:(BOOL)videoStabilizationIfSupported { SCLogVideoStreamerInfo(@"setVideoStabilizationEnabledIfSupported:%d", videoStabilizationIfSupported); _videoStabilizationEnabledIfSupported = videoStabilizationIfSupported; [self _enableVideoStabilizationIfSupported]; } - (void)_updateFieldOfViewWithARFrame:(ARFrame *)frame NS_AVAILABLE_IOS(11_0) { SC_GUARD_ELSE_RETURN(frame.camera); CGSize imageResolution = frame.camera.imageResolution; matrix_float3x3 intrinsics = frame.camera.intrinsics; float xFovDegrees = 2 * atan(imageResolution.width / (2 * intrinsics.columns[0][0])) * 180 / M_PI; if (_fieldOfView != xFovDegrees) { self.fieldOfView = xFovDegrees; } } - (NSString *)description { return [self debugDescription]; } - (NSString *)debugDescription { NSDictionary *debugDict = @{ @"_sampleBufferDisplayEnabled" : _sampleBufferDisplayEnabled ? @"Yes" : @"No", @"_videoStabilizationEnabledIfSupported" : _videoStabilizationEnabledIfSupported ? @"Yes" : @"No", @"_performingConfigurations" : _performingConfigurations ? @"Yes" : @"No", @"alwaysDiscardLateVideoFrames" : _videoDataOutput.alwaysDiscardsLateVideoFrames ? @"Yes" : @"No" }; return [NSString sc_stringWithFormat:@"%@", debugDict]; } @end ================================================ FILE: ManagedCapturer/SCMetalUtils.h ================================================ // // SCMetalUtils.h // Snapchat // // Created by Michel Loenngren on 7/11/17. // // Utility class for metal related helpers. #import #if !TARGET_IPHONE_SIMULATOR #import #endif #import #import SC_EXTERN_C_BEGIN #if !TARGET_IPHONE_SIMULATOR extern id SCGetManagedCaptureMetalDevice(void); #endif static SC_ALWAYS_INLINE BOOL SCDeviceSupportsMetal(void) { #if TARGET_CPU_ARM64 return YES; // All 64 bit system supports Metal. #else return NO; #endif } #if !TARGET_IPHONE_SIMULATOR static inline id SCMetalTextureFromPixelBuffer(CVPixelBufferRef pixelBuffer, size_t planeIndex, MTLPixelFormat pixelFormat, CVMetalTextureCacheRef textureCache) { size_t width = CVPixelBufferGetWidthOfPlane(pixelBuffer, planeIndex); size_t height = CVPixelBufferGetHeightOfPlane(pixelBuffer, planeIndex); CVMetalTextureRef textureRef; if (kCVReturnSuccess != CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, textureCache, pixelBuffer, nil, pixelFormat, width, height, planeIndex, &textureRef)) { return nil; } id texture = CVMetalTextureGetTexture(textureRef); CVBufferRelease(textureRef); return texture; } static inline void SCMetalCopyTexture(id texture, CVPixelBufferRef pixelBuffer, NSUInteger planeIndex) { CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly); void *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, planeIndex); NSUInteger bytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, planeIndex); MTLRegion region = MTLRegionMake2D(0, 0, CVPixelBufferGetWidthOfPlane(pixelBuffer, planeIndex), CVPixelBufferGetHeightOfPlane(pixelBuffer, planeIndex)); [texture getBytes:baseAddress bytesPerRow:bytesPerRow fromRegion:region mipmapLevel:0]; CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly); } #endif SC_EXTERN_C_END ================================================ FILE: ManagedCapturer/SCMetalUtils.m ================================================ // // SCMetalUtils.m // Snapchat // // Created by Michel Loenngren on 8/16/17. // // #import "SCMetalUtils.h" #import id SCGetManagedCaptureMetalDevice(void) { #if !TARGET_IPHONE_SIMULATOR SCTraceStart(); static dispatch_once_t onceToken; static id device; dispatch_once(&onceToken, ^{ device = MTLCreateSystemDefaultDevice(); }); return device; #endif return nil; } ================================================ FILE: ManagedCapturer/SCScanConfiguration.h ================================================ // // SCScanConfiguration.h // Snapchat // // Created by Yang Dai on 3/7/17. // Copyright © 2017 Snapchat, Inc. All rights reserved. // #import "SCManagedCapturer.h" #import @interface SCScanConfiguration : NSObject @property (nonatomic, strong) sc_managed_capturer_scan_results_handler_t scanResultsHandler; @property (nonatomic, strong) SCUserSession *userSession; @end ================================================ FILE: ManagedCapturer/SCScanConfiguration.m ================================================ // // SCScanConfiguration.m // Snapchat // // Created by Yang Dai on 3/7/17. // Copyright © 2017 Snapchat, Inc. All rights reserved. // #import "SCScanConfiguration.h" @implementation SCScanConfiguration @end ================================================ FILE: ManagedCapturer/SCSingleFrameStreamCapturer.h ================================================ // // SCSingleFrameStreamCapturer.h // Snapchat // // Created by Benjamin Hollis on 5/3/16. // Copyright © 2016 Snapchat, Inc. All rights reserved. // #import "SCCaptureCommon.h" #import #import @interface SCSingleFrameStreamCapturer : NSObject - (instancetype)initWithCompletion:(sc_managed_capturer_capture_video_frame_completion_handler_t)completionHandler; @end ================================================ FILE: ManagedCapturer/SCSingleFrameStreamCapturer.m ================================================ // // SCSingleFrameStreamCapturer.m // Snapchat // // Created by Benjamin Hollis on 5/3/16. // Copyright © 2016 Snapchat, Inc. All rights reserved. // #import "SCSingleFrameStreamCapturer.h" #import "SCManagedCapturer.h" @implementation SCSingleFrameStreamCapturer { sc_managed_capturer_capture_video_frame_completion_handler_t _callback; } - (instancetype)initWithCompletion:(sc_managed_capturer_capture_video_frame_completion_handler_t)completionHandler { self = [super init]; if (self) { _callback = completionHandler; } return self; } #pragma mark - SCManagedVideoDataSourceListener - (void)managedVideoDataSource:(id)managedVideoDataSource didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer devicePosition:(SCManagedCaptureDevicePosition)devicePosition { if (_callback) { UIImage *image = [self imageFromSampleBuffer:sampleBuffer]; _callback(image); } _callback = nil; } /** * Decode a CMSampleBufferRef to our native camera format (kCVPixelFormatType_420YpCbCr8BiPlanarFullRange, * as set in SCManagedVideoStreamer) to a UIImage. * * Code from http://stackoverflow.com/a/31553521/11284 */ #define clamp(a) (a > 255 ? 255 : (a < 0 ? 0 : a)) // TODO: Use the transform code from SCImageProcessIdentityYUVCommand - (UIImage *)imageFromSampleBuffer:(CMSampleBufferRef)sampleBuffer { CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); CVPixelBufferLockBaseAddress(imageBuffer, 0); size_t width = CVPixelBufferGetWidth(imageBuffer); size_t height = CVPixelBufferGetHeight(imageBuffer); uint8_t *yBuffer = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0); size_t yPitch = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 0); uint8_t *cbCrBuffer = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 1); size_t cbCrPitch = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 1); int bytesPerPixel = 4; uint8_t *rgbBuffer = malloc(width * height * bytesPerPixel); for (int y = 0; y < height; y++) { uint8_t *rgbBufferLine = &rgbBuffer[y * width * bytesPerPixel]; uint8_t *yBufferLine = &yBuffer[y * yPitch]; uint8_t *cbCrBufferLine = &cbCrBuffer[(y >> 1) * cbCrPitch]; for (int x = 0; x < width; x++) { int16_t y = yBufferLine[x]; int16_t cb = cbCrBufferLine[x & ~1] - 128; int16_t cr = cbCrBufferLine[x | 1] - 128; uint8_t *rgbOutput = &rgbBufferLine[x * bytesPerPixel]; int16_t r = (int16_t)roundf(y + cr * 1.4); int16_t g = (int16_t)roundf(y + cb * -0.343 + cr * -0.711); int16_t b = (int16_t)roundf(y + cb * 1.765); rgbOutput[0] = 0xff; rgbOutput[1] = clamp(b); rgbOutput[2] = clamp(g); rgbOutput[3] = clamp(r); } } CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); CGContextRef context = CGBitmapContextCreate(rgbBuffer, width, height, 8, width * bytesPerPixel, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaNoneSkipLast); CGImageRef quartzImage = CGBitmapContextCreateImage(context); // TODO: Hardcoding UIImageOrientationRight seems cheesy UIImage *image = [UIImage imageWithCGImage:quartzImage scale:1.0 orientation:UIImageOrientationRight]; CGContextRelease(context); CGColorSpaceRelease(colorSpace); CGImageRelease(quartzImage); free(rgbBuffer); CVPixelBufferUnlockBaseAddress(imageBuffer, 0); return image; } @end ================================================ FILE: ManagedCapturer/SCStillImageCaptureVideoInputMethod.h ================================================ // // SCStillImageCaptureVideoInputMethod.h // Snapchat // // Created by Alexander Grytsiuk on 3/16/16. // Copyright © 2016 Snapchat, Inc. All rights reserved. // #import "SCManagedCapturerState.h" #import @interface SCStillImageCaptureVideoInputMethod : NSObject - (void)captureStillImageWithCapturerState:(SCManagedCapturerState *)state successBlock:(void (^)(NSData *imageData, NSDictionary *cameraInfo, NSError *error))successBlock failureBlock:(void (^)(NSError *error))failureBlock; @end ================================================ FILE: ManagedCapturer/SCStillImageCaptureVideoInputMethod.m ================================================ // // SCStillImageCaptureVideoInputMethod.m // Snapchat // // Created by Alexander Grytsiuk on 3/16/16. // Copyright © 2016 Snapchat, Inc. All rights reserved. // #import "SCStillImageCaptureVideoInputMethod.h" #import "SCManagedCapturer.h" #import "SCManagedVideoFileStreamer.h" typedef unsigned char uchar_t; int clamp(int val, int low, int high) { if (val < low) val = low; if (val > high) val = high; return val; } void yuv2rgb(uchar_t yValue, uchar_t uValue, uchar_t vValue, uchar_t *r, uchar_t *g, uchar_t *b) { double red = yValue + (1.370705 * (vValue - 128)); double green = yValue - (0.698001 * (vValue - 128)) - (0.337633 * (uValue - 128)); double blue = yValue + (1.732446 * (uValue - 128)); *r = clamp(red, 0, 255); *g = clamp(green, 0, 255); *b = clamp(blue, 0, 255); } void convertNV21DataToRGBData(int width, int height, uchar_t *nv21Data, uchar_t *rgbData, int rgbBytesPerPixel, int rgbBytesPerRow) { uchar_t *uvData = nv21Data + height * width; for (int h = 0; h < height; h++) { uchar_t *yRowBegin = nv21Data + h * width; uchar_t *uvRowBegin = uvData + h / 2 * width; uchar_t *rgbRowBegin = rgbData + rgbBytesPerRow * h; for (int w = 0; w < width; w++) { uchar_t *rgbPixelBegin = rgbRowBegin + rgbBytesPerPixel * w; yuv2rgb(yRowBegin[w], uvRowBegin[w / 2 * 2], uvRowBegin[w / 2 * 2 + 1], &(rgbPixelBegin[0]), &(rgbPixelBegin[1]), &(rgbPixelBegin[2])); } } } @implementation SCStillImageCaptureVideoInputMethod - (void)captureStillImageWithCapturerState:(SCManagedCapturerState *)state successBlock:(void (^)(NSData *imageData, NSDictionary *cameraInfo, NSError *error))successBlock failureBlock:(void (^)(NSError *error))failureBlock { id videoDataSource = [[SCManagedCapturer sharedInstance] currentVideoDataSource]; if ([videoDataSource isKindOfClass:[SCManagedVideoFileStreamer class]]) { SCManagedVideoFileStreamer *videoFileStreamer = (SCManagedVideoFileStreamer *)videoDataSource; [videoFileStreamer getNextPixelBufferWithCompletion:^(CVPixelBufferRef pixelBuffer) { BOOL shouldFlip = state.devicePosition == SCManagedCaptureDevicePositionFront; #if TARGET_IPHONE_SIMULATOR UIImage *uiImage = [self imageWithCVPixelBuffer:pixelBuffer]; CGImageRef videoImage = uiImage.CGImage; UIImage *capturedImage = [UIImage imageWithCGImage:shouldFlip ? [self flipCGImage:videoImage size:uiImage.size].CGImage : videoImage scale:1.0 orientation:UIImageOrientationRight]; #else CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pixelBuffer]; CIContext *temporaryContext = [CIContext contextWithOptions:nil]; CGSize size = CGSizeMake(CVPixelBufferGetWidth(pixelBuffer), CVPixelBufferGetHeight(pixelBuffer)); CGImageRef videoImage = [temporaryContext createCGImage:ciImage fromRect:CGRectMake(0, 0, size.width, size.height)]; UIImage *capturedImage = [UIImage imageWithCGImage:shouldFlip ? [self flipCGImage:videoImage size:size].CGImage : videoImage scale:1.0 orientation:UIImageOrientationRight]; CGImageRelease(videoImage); #endif if (successBlock) { successBlock(UIImageJPEGRepresentation(capturedImage, 1.0), nil, nil); } }]; } else { if (failureBlock) { failureBlock([NSError errorWithDomain:NSStringFromClass(self.class) code:-1 userInfo:nil]); } } } - (UIImage *)flipCGImage:(CGImageRef)cgImage size:(CGSize)size { UIGraphicsBeginImageContext(size); CGContextDrawImage(UIGraphicsGetCurrentContext(), CGRectMake(0, 0, size.width, size.height), cgImage); UIImage *image = UIGraphicsGetImageFromCurrentImageContext(); UIGraphicsEndImageContext(); return image; } - (UIImage *)imageWithCVPixelBuffer:(CVPixelBufferRef)imageBuffer { CVPixelBufferLockBaseAddress(imageBuffer, 0); size_t width = CVPixelBufferGetWidth(imageBuffer); size_t height = CVPixelBufferGetHeight(imageBuffer); size_t rgbBytesPerPixel = 4; size_t rgbBytesPerRow = width * rgbBytesPerPixel; uchar_t *nv21Data = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0); uchar_t *rgbData = malloc(rgbBytesPerRow * height); convertNV21DataToRGBData((int)width, (int)height, nv21Data, rgbData, (int)rgbBytesPerPixel, (int)rgbBytesPerRow); CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); CGContextRef context = CGBitmapContextCreate(rgbData, width, height, 8, rgbBytesPerRow, colorSpace, kCGImageAlphaNoneSkipLast); CGImageRef cgImage = CGBitmapContextCreateImage(context); UIImage *result = [UIImage imageWithCGImage:cgImage]; CGImageRelease(cgImage); CGContextRelease(context); CGColorSpaceRelease(colorSpace); free(rgbData); CVPixelBufferUnlockBaseAddress(imageBuffer, 0); return result; } - (NSString *)methodName { return @"VideoInput"; } @end ================================================ FILE: ManagedCapturer/SCTimedTask.h ================================================ // // SCTimedTask.h // Snapchat // // Created by Michel Loenngren on 4/2/17. // Copyright © 2017 Snapchat, Inc. All rights reserved. // #import #import /* Block based timed task */ @interface SCTimedTask : NSObject @property (nonatomic, assign) CMTime targetTime; @property (nonatomic, copy) void (^task)(CMTime relativePresentationTime, CGFloat sessionStartTimeDelayInSecond); - (instancetype)init NS_UNAVAILABLE; - (instancetype)initWithTargetTime:(CMTime)targetTime task:(void (^)(CMTime relativePresentationTime, CGFloat sessionStartTimeDelayInSecond))task; - (NSString *)description; @end ================================================ FILE: ManagedCapturer/SCTimedTask.m ================================================ // // SCTimedTask.m // Snapchat // // Created by Michel Loenngren on 4/2/17. // Copyright © 2017 Snapchat, Inc. All rights reserved. // #import "SCTimedTask.h" #import @implementation SCTimedTask - (instancetype)initWithTargetTime:(CMTime)targetTime task: (void (^)(CMTime relativePresentationTime, CGFloat sessionStartTimeDelayInSecond))task { if (self = [super init]) { _targetTime = targetTime; _task = task; } return self; } - (NSString *)description { return [NSString sc_stringWithFormat:@"<%@: %p, targetTime: %lld>", NSStringFromClass([self class]), self, _targetTime.value]; } @end ================================================ FILE: ManagedCapturer/SCVideoCaptureSessionInfo.h ================================================ // // SCVideoCaptureSessionInfo.h // Snapchat // // Created by Michel Loenngren on 3/27/17. // Copyright © 2017 Snapchat, Inc. All rights reserved. // #import #import #import typedef NS_ENUM(NSInteger, SCManagedVideoCapturerInfoType) { SCManagedVideoCapturerInfoAudioQueueError, SCManagedVideoCapturerInfoAssetWriterError, SCManagedVideoCapturerInfoAudioSessionError, SCManagedVideoCapturerInfoAudioQueueRetrySuccess, SCManagedVideoCapturerInfoAudioQueueRetryDataSourceSuccess_audioQueue, SCManagedVideoCapturerInfoAudioQueueRetryDataSourceSuccess_hardware }; typedef u_int32_t sc_managed_capturer_recording_session_t; /* Container object holding information about the current recording session. */ typedef struct { CMTime startTime; CMTime endTime; CMTime duration; sc_managed_capturer_recording_session_t sessionId; } SCVideoCaptureSessionInfo; static inline SCVideoCaptureSessionInfo SCVideoCaptureSessionInfoMake(CMTime startTime, CMTime endTime, sc_managed_capturer_recording_session_t sessionId) { SCVideoCaptureSessionInfo session; session.startTime = startTime; session.endTime = endTime; if (CMTIME_IS_VALID(startTime) && CMTIME_IS_VALID(endTime)) { session.duration = CMTimeSubtract(endTime, startTime); } else { session.duration = kCMTimeInvalid; } session.sessionId = sessionId; return session; } static inline NSTimeInterval SCVideoCaptureSessionInfoGetCurrentDuration(SCVideoCaptureSessionInfo sessionInfo) { if (CMTIME_IS_VALID(sessionInfo.startTime)) { if (CMTIME_IS_VALID(sessionInfo.endTime)) { return CMTimeGetSeconds(sessionInfo.duration); } return CACurrentMediaTime() - CMTimeGetSeconds(sessionInfo.startTime); } return 0; } static inline NSString *SCVideoCaptureSessionInfoGetDebugString(CMTime time, NSString *label) { if (CMTIME_IS_VALID(time)) { return [NSString sc_stringWithFormat:@"%@: %f", label, CMTimeGetSeconds(time)]; } else { return [NSString sc_stringWithFormat:@"%@: Invalid", label]; } } static inline NSString *SCVideoCaptureSessionInfoGetDebugDescription(SCVideoCaptureSessionInfo sessionInfo) { NSMutableString *description = [NSMutableString new]; [description appendString:SCVideoCaptureSessionInfoGetDebugString(sessionInfo.startTime, @"StartTime")]; [description appendString:@", "]; [description appendString:SCVideoCaptureSessionInfoGetDebugString(sessionInfo.endTime, @"EndTime")]; [description appendString:@", "]; [description appendString:SCVideoCaptureSessionInfoGetDebugString(sessionInfo.duration, @"Duration")]; [description appendString:@", "]; [description appendString:[NSString sc_stringWithFormat:@"Id: %u", sessionInfo.sessionId]]; return [description copy]; } ================================================ FILE: ManagedCapturer/StateMachine/SCCaptureBaseState.h ================================================ // // SCCaptureBaseState.h // Snapchat // // Created by Lin Jia on 10/19/17. // // #import "SCCaptureCommon.h" #import "SCCaptureStateDelegate.h" #import "SCCaptureStateMachineBookKeeper.h" #import "SCCaptureStateUtil.h" #import "SCCaptureWorker.h" #import "SCManagedCaptureDevice.h" #import "SCManagedCapturerState.h" #import "SCStateTransitionPayload.h" #import @class SCCaptureResource; @class SCCapturerToken; @class SCAudioConfiguration; @class SCQueuePerformer; /* Every state machine state needs to inherent SCCaptureBaseState to have the APIs. State machine state in general will only implement APIs which are legal for itself. If illegal APIs are invoked, SCCaptureBaseState will handle it. The intended behavior: 1) crash using SCAssert in Debug build, 2) ignore api call, and log the call, for alpha/master/production. 3) in the future, we will introduce dangerous API call concept, and restart camera in such case, to avoid bad state. Every state machine state is going to be built to follow functional programming as more as possible. The shared resources between them will be passed into the API via SCCaptureResource. */ @interface SCCaptureBaseState : NSObject - (instancetype)init NS_UNAVAILABLE; - (instancetype)initWithPerformer:(SCQueuePerformer *)performer bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper delegate:(id)delegate; /* The following API will be invoked at the moment state context promote the state to be current state. State use this * chance to do something, such as start recording for recording state. */ - (void)didBecomeCurrentState:(SCStateTransitionPayload *)payload resource:(SCCaptureResource *)resource context:(NSString *)context; - (SCCaptureStateMachineStateId)stateId; - (void)initializeCaptureWithDevicePosition:(SCManagedCaptureDevicePosition)devicePosition resource:(SCCaptureResource *)resource completionHandler:(dispatch_block_t)completionHandler context:(NSString *)context; - (void)startRunningWithCapturerToken:(SCCapturerToken *)token resource:(SCCaptureResource *)resource completionHandler:(dispatch_block_t)completionHandler context:(NSString *)context; - (void)stopRunningWithCapturerToken:(SCCapturerToken *)token resource:(SCCaptureResource *)resource completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler context:(NSString *)context; - (void)prepareForRecordingWithResource:(SCCaptureResource *)resource audioConfiguration:(SCAudioConfiguration *)configuration context:(NSString *)context; - (void)startRecordingWithResource:(SCCaptureResource *)resource audioConfiguration:(SCAudioConfiguration *)configuration outputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings maxDuration:(NSTimeInterval)maxDuration fileURL:(NSURL *)fileURL captureSessionID:(NSString *)captureSessionID completionHandler:(sc_managed_capturer_start_recording_completion_handler_t)completionHandler context:(NSString *)context; - (void)stopRecordingWithResource:(SCCaptureResource *)resource context:(NSString *)context; - (void)cancelRecordingWithResource:(SCCaptureResource *)resource context:(NSString *)context; - (void)captureStillImageWithResource:(SCCaptureResource *)resource aspectRatio:(CGFloat)aspectRatio captureSessionID:(NSString *)captureSessionID completionHandler:(sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler context:(NSString *)context; - (void)startScanWithScanConfiguration:(SCScanConfiguration *)configuration resource:(SCCaptureResource *)resource context:(NSString *)context; - (void)stopScanWithCompletionHandler:(dispatch_block_t)completionHandler resource:(SCCaptureResource *)resource context:(NSString *)context; @property (nonatomic, strong, readonly) SCCaptureStateMachineBookKeeper *bookKeeper; @end ================================================ FILE: ManagedCapturer/StateMachine/SCCaptureBaseState.m ================================================ // // SCCaptureBaseState.m // Snapchat // // Created by Lin Jia on 10/19/17. // // #import "SCCaptureBaseState.h" #import "SCCaptureStateMachineBookKeeper.h" #import "SCCapturerToken.h" #import "SCManagedCapturerV1_Private.h" #import #import #import @implementation SCCaptureBaseState { SCCaptureStateMachineBookKeeper *_bookKeeper; SCQueuePerformer *_performer; __weak id _delegate; } - (instancetype)initWithPerformer:(SCQueuePerformer *)performer bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper delegate:(id)delegate { self = [super init]; if (self) { SCAssert(performer, @""); SCAssert(bookKeeper, @""); _bookKeeper = bookKeeper; _performer = performer; _delegate = delegate; } return self; } - (SCCaptureStateMachineStateId)stateId { return SCCaptureBaseStateId; } - (void)didBecomeCurrentState:(SCStateTransitionPayload *)payload resource:(SCCaptureResource *)resource context:(NSString *)context { [self _handleBaseStateBehavior:@"didBecomeCurrentState" context:context]; } - (void)initializeCaptureWithDevicePosition:(SCManagedCaptureDevicePosition)devicePosition resource:(SCCaptureResource *)resource completionHandler:(dispatch_block_t)completionHandler context:(NSString *)context { [self _handleBaseStateBehavior:@"initializeCaptureWithDevicePosition" context:context]; } - (void)startRunningWithCapturerToken:(SCCapturerToken *)token resource:(SCCaptureResource *)resource completionHandler:(dispatch_block_t)completionHandler context:(NSString *)context { [self _handleBaseStateBehavior:@"startRunningWithCapturerToken" context:context]; } - (void)stopRunningWithCapturerToken:(SCCapturerToken *)token resource:(SCCaptureResource *)resource completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler context:(NSString *)context { SCAssertPerformer(_performer); BOOL actuallyStopped = [[SCManagedCapturerV1 sharedInstance] stopRunningWithCaptureToken:token completionHandler:completionHandler context:context]; // TODO: Fix CCAM-14450 // This is a temporary solution for https://jira.sc-corp.net/browse/CCAM-14450 // It is caused by switching from scanning state to stop running state when the view is disappearing in the scanning // state, which can be reproduced by triggering scanning and then switch to maps page. // We remove SCAssert to ingore the crashes in master branch and will find a solution for the illegal call for the // state machine later if (self.stateId != SCCaptureScanningStateId) { SCAssert(!actuallyStopped, @"actuallyStopped in state: %@ with context: %@", SCCaptureStateName([self stateId]), context); } else { SCLogCaptureStateMachineInfo(@"actuallyStopped:%d in state: %@ with context: %@", actuallyStopped, SCCaptureStateName([self stateId]), context); } if (actuallyStopped) { [_delegate currentState:self requestToTransferToNewState:SCCaptureInitializedStateId payload:nil context:context]; } } - (void)prepareForRecordingWithResource:(SCCaptureResource *)resource audioConfiguration:(SCAudioConfiguration *)configuration context:(NSString *)context { [self _handleBaseStateBehavior:@"prepareForRecordingWithResource" context:context]; } - (void)startRecordingWithResource:(SCCaptureResource *)resource audioConfiguration:(SCAudioConfiguration *)configuration outputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings maxDuration:(NSTimeInterval)maxDuration fileURL:(NSURL *)fileURL captureSessionID:(NSString *)captureSessionID completionHandler:(sc_managed_capturer_start_recording_completion_handler_t)completionHandler context:(NSString *)context { [self _handleBaseStateBehavior:@"startRecordingWithResource" context:context]; } - (void)stopRecordingWithResource:(SCCaptureResource *)resource context:(NSString *)context { [self _handleBaseStateBehavior:@"stopRecordingWithResource" context:context]; } - (void)cancelRecordingWithResource:(SCCaptureResource *)resource context:(NSString *)context { [self _handleBaseStateBehavior:@"cancelRecordingWithResource" context:context]; } - (void)captureStillImageWithResource:(SCCaptureResource *)resource aspectRatio:(CGFloat)aspectRatio captureSessionID:(NSString *)captureSessionID completionHandler:(sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler context:(NSString *)context { [self _handleBaseStateBehavior:@"captureStillImageWithResource" context:context]; } - (void)startScanWithScanConfiguration:(SCScanConfiguration *)configuration resource:(SCCaptureResource *)resource context:(NSString *)context { [self _handleBaseStateBehavior:@"startScanWithScanConfiguration" context:context]; } - (void)stopScanWithCompletionHandler:(dispatch_block_t)completionHandler resource:(SCCaptureResource *)resource context:(NSString *)context { // Temporary solution until IDT-12520 is resolved. [SCCaptureWorker stopScanWithCompletionHandler:completionHandler resource:resource]; //[self _handleBaseStateBehavior:@"stopScanWithCompletionHandler"]; } - (void)_handleBaseStateBehavior:(NSString *)illegalAPIName context:(NSString *)context { [_bookKeeper state:[self stateId] illegalAPIcalled:illegalAPIName callStack:[NSThread callStackSymbols] context:context]; if (SCIsDebugBuild()) { SCAssertFail(@"illegal API invoked on capture state machine"); } } - (SCCaptureStateMachineBookKeeper *)bookKeeper { return _bookKeeper; } @end ================================================ FILE: ManagedCapturer/StateMachine/SCCaptureStateDelegate.h ================================================ // // SCCaptureStateDelegate.h // Snapchat // // Created by Lin Jia on 10/27/17. // // #import "SCCaptureStateUtil.h" #import @class SCCaptureBaseState; @class SCStateTransitionPayload; /* The state machine state delegate is used by state machine states to hint to the system that "I am done, now transfer to other state". Currently, SCCaptureStateMachineContext is the central piece that glues all states together, and it is the delegate for those states. */ @protocol SCCaptureStateDelegate - (void)currentState:(SCCaptureBaseState *)state requestToTransferToNewState:(SCCaptureStateMachineStateId)newState payload:(SCStateTransitionPayload *)payload context:(NSString *)context; @end ================================================ FILE: ManagedCapturer/StateMachine/SCCaptureStateMachineBookKeeper.h ================================================ // // SCCaptureStateTransitionBookKeeper.h // Snapchat // // Created by Lin Jia on 10/27/17. // // #import "SCCaptureStateUtil.h" #import /* Book keeper is used to record every state transition, and every illegal API call. */ @interface SCCaptureStateMachineBookKeeper : NSObject - (void)stateTransitionFrom:(SCCaptureStateMachineStateId)fromId to:(SCCaptureStateMachineStateId)toId context:(NSString *)context; - (void)state:(SCCaptureStateMachineStateId)captureState illegalAPIcalled:(NSString *)illegalAPIName callStack:(NSArray *)callStack context:(NSString *)context; - (void)logAPICalled:(NSString *)apiName context:(NSString *)context; @end ================================================ FILE: ManagedCapturer/StateMachine/SCCaptureStateMachineBookKeeper.m ================================================ // // SCCaptureStateTransitionBookKeeper.m // Snapchat // // Created by Lin Jia on 10/27/17. // // #import "SCCaptureStateMachineBookKeeper.h" #import "SCCaptureStateUtil.h" #import "SCLogger+Camera.h" #import #import @interface SCCaptureStateMachineBookKeeper () { NSDate *_lastStateStartTime; } @end @implementation SCCaptureStateMachineBookKeeper - (void)stateTransitionFrom:(SCCaptureStateMachineStateId)fromId to:(SCCaptureStateMachineStateId)toId context:(NSString *)context { NSDate *date = [NSDate date]; SCLogCaptureStateMachineInfo(@"State %@ life span: %f seconds, transition to: %@, in context:%@, at: %@ \n", SCCaptureStateName(fromId), [date timeIntervalSinceDate:_lastStateStartTime], SCCaptureStateName(toId), context, date); _lastStateStartTime = date; } - (void)state:(SCCaptureStateMachineStateId)captureState illegalAPIcalled:(NSString *)illegalAPIName callStack:(NSArray *)callStack context:(NSString *)context { SCAssert(callStack, @"call stack empty"); SCAssert(illegalAPIName, @""); SCAssert(context, @"Context is empty"); SCLogCaptureStateMachineError(@"State: %@, illegal API invoke: %@, at: %@, callstack: %@ \n", SCCaptureStateName(captureState), illegalAPIName, [NSDate date], callStack); NSArray *reportedArray = [callStack count] > 15 ? [callStack subarrayWithRange:NSMakeRange(0, 15)] : callStack; [[SCLogger sharedInstance] logEvent:kSCCameraStateMachineIllegalAPICall parameters:@{ @"state" : SCCaptureStateName(captureState), @"API" : illegalAPIName, @"call_stack" : reportedArray, @"context" : context }]; } - (void)logAPICalled:(NSString *)apiName context:(NSString *)context { SCAssert(apiName, @"API name is empty"); SCAssert(context, @"Context is empty"); SCLogCaptureStateMachineInfo(@"api: %@ context: %@", apiName, context); } @end ================================================ FILE: ManagedCapturer/StateMachine/SCCaptureStateMachineContext.h ================================================ // // SCCaptureStateMachineContext.h // Snapchat // // Created by Lin Jia on 10/18/17. // // #import "SCCaptureCommon.h" #import "SCManagedCaptureDevice.h" #import #import /* SCCaptureStateMachineContext is the central piece that glues all states together. It will pass API calls to the current state. The classic state machine design pattern: https://en.wikipedia.org/wiki/State_pattern It is also the delegate for the states it manages, so that those states can tell stateMachineContext to transit to next state. */ @class SCCaptureResource; @class SCCapturerToken; @interface SCCaptureStateMachineContext : NSObject - (instancetype)initWithResource:(SCCaptureResource *)resource; - (void)initializeCaptureWithDevicePositionAsynchronously:(SCManagedCaptureDevicePosition)devicePosition completionHandler:(dispatch_block_t)completionHandler context:(NSString *)context; - (SCCapturerToken *)startRunningWithContext:(NSString *)context completionHandler:(dispatch_block_t)completionHandler; - (void)stopRunningWithCapturerToken:(SCCapturerToken *)token completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler context:(NSString *)context; - (void)stopRunningWithCapturerToken:(SCCapturerToken *)token after:(NSTimeInterval)delay completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler context:(NSString *)context; - (void)prepareForRecordingAsynchronouslyWithAudioConfiguration:(SCAudioConfiguration *)configuration context:(NSString *)context; - (void)startRecordingWithOutputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings audioConfiguration:(SCAudioConfiguration *)configuration maxDuration:(NSTimeInterval)maxDuration fileURL:(NSURL *)fileURL captureSessionID:(NSString *)captureSessionID completionHandler:(sc_managed_capturer_start_recording_completion_handler_t)completionHandler context:(NSString *)context; - (void)stopRecordingWithContext:(NSString *)context; - (void)cancelRecordingWithContext:(NSString *)context; - (void)captureStillImageAsynchronouslyWithAspectRatio:(CGFloat)aspectRatio captureSessionID:(NSString *)captureSessionID completionHandler: (sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler context:(NSString *)context; #pragma mark - Scanning - (void)startScanAsynchronouslyWithScanConfiguration:(SCScanConfiguration *)configuration context:(NSString *)context; - (void)stopScanAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler context:(NSString *)context; @end ================================================ FILE: ManagedCapturer/StateMachine/SCCaptureStateMachineContext.m ================================================ // // SCCaptureStateMachineContext.m // Snapchat // // Created by Lin Jia on 10/18/17. // // #import "SCCaptureStateMachineContext.h" #import "SCCaptureBaseState.h" #import "SCCaptureImageState.h" #import "SCCaptureImageWhileRecordingState.h" #import "SCCaptureInitializedState.h" #import "SCCaptureRecordingState.h" #import "SCCaptureResource.h" #import "SCCaptureRunningState.h" #import "SCCaptureScanningState.h" #import "SCCaptureStateMachineBookKeeper.h" #import "SCCaptureStateUtil.h" #import "SCCaptureUninitializedState.h" #import "SCCaptureWorker.h" #import "SCCapturerToken.h" #import "SCStateTransitionPayload.h" #import #import #import #import #import #import @interface SCCaptureStateMachineContext () { SCQueuePerformer *_queuePerformer; // Cache all the states. NSMutableDictionary *_states; SCCaptureBaseState *_currentState; SCCaptureStateMachineBookKeeper *_bookKeeper; SCCaptureResource *_captureResource; } @end @implementation SCCaptureStateMachineContext - (instancetype)initWithResource:(SCCaptureResource *)resource { self = [super init]; if (self) { SCAssert(resource, @""); SCAssert(resource.queuePerformer, @""); _captureResource = resource; _queuePerformer = resource.queuePerformer; _states = [[NSMutableDictionary alloc] init]; _bookKeeper = [[SCCaptureStateMachineBookKeeper alloc] init]; [self _setCurrentState:SCCaptureUninitializedStateId payload:nil context:SCCapturerContext]; } return self; } - (void)_setCurrentState:(SCCaptureStateMachineStateId)stateId payload:(SCStateTransitionPayload *)payload context:(NSString *)context { switch (stateId) { case SCCaptureUninitializedStateId: if (![_states objectForKey:@(stateId)]) { SCCaptureUninitializedState *uninitializedState = [[SCCaptureUninitializedState alloc] initWithPerformer:_queuePerformer bookKeeper:_bookKeeper delegate:self]; [_states setObject:uninitializedState forKey:@(stateId)]; } _currentState = [_states objectForKey:@(stateId)]; break; case SCCaptureInitializedStateId: if (![_states objectForKey:@(stateId)]) { SCCaptureInitializedState *initializedState = [[SCCaptureInitializedState alloc] initWithPerformer:_queuePerformer bookKeeper:_bookKeeper delegate:self]; [_states setObject:initializedState forKey:@(stateId)]; } _currentState = [_states objectForKey:@(stateId)]; break; case SCCaptureRunningStateId: if (![_states objectForKey:@(stateId)]) { SCCaptureRunningState *runningState = [[SCCaptureRunningState alloc] initWithPerformer:_queuePerformer bookKeeper:_bookKeeper delegate:self]; [_states setObject:runningState forKey:@(stateId)]; } _currentState = [_states objectForKey:@(stateId)]; break; case SCCaptureImageStateId: if (![_states objectForKey:@(stateId)]) { SCCaptureImageState *captureImageState = [[SCCaptureImageState alloc] initWithPerformer:_queuePerformer bookKeeper:_bookKeeper delegate:self]; [_states setObject:captureImageState forKey:@(stateId)]; } _currentState = [_states objectForKey:@(stateId)]; break; case SCCaptureImageWhileRecordingStateId: if (![_states objectForKey:@(stateId)]) { SCCaptureImageWhileRecordingState *captureImageWhileRecordingState = [[SCCaptureImageWhileRecordingState alloc] initWithPerformer:_queuePerformer bookKeeper:_bookKeeper delegate:self]; [_states setObject:captureImageWhileRecordingState forKey:@(stateId)]; } _currentState = [_states objectForKey:@(stateId)]; break; case SCCaptureScanningStateId: if (![_states objectForKey:@(stateId)]) { SCCaptureScanningState *scanningState = [[SCCaptureScanningState alloc] initWithPerformer:_queuePerformer bookKeeper:_bookKeeper delegate:self]; [_states setObject:scanningState forKey:@(stateId)]; } _currentState = [_states objectForKey:@(stateId)]; break; case SCCaptureRecordingStateId: if (![_states objectForKey:@(stateId)]) { SCCaptureRecordingState *recordingState = [[SCCaptureRecordingState alloc] initWithPerformer:_queuePerformer bookKeeper:_bookKeeper delegate:self]; [_states setObject:recordingState forKey:@(stateId)]; } _currentState = [_states objectForKey:@(stateId)]; break; default: SCAssert(NO, @"illigal state Id"); break; } [_currentState didBecomeCurrentState:payload resource:_captureResource context:context]; } - (void)initializeCaptureWithDevicePositionAsynchronously:(SCManagedCaptureDevicePosition)devicePosition completionHandler:(dispatch_block_t)completionHandler context:(NSString *)context { [SCCaptureWorker setupCapturePreviewLayerController]; SCTraceResumeToken resumeToken = SCTraceCapture(); [_queuePerformer perform:^{ SCTraceResume(resumeToken); [_currentState initializeCaptureWithDevicePosition:devicePosition resource:_captureResource completionHandler:completionHandler context:context]; }]; } - (SCCapturerToken *)startRunningWithContext:(NSString *)context completionHandler:(dispatch_block_t)completionHandler { [[SCLogger sharedInstance] updateLogTimedEventStart:kSCCameraMetricsOpen uniqueId:@""]; SCCapturerToken *token = [[SCCapturerToken alloc] initWithIdentifier:context]; SCTraceResumeToken resumeToken = SCTraceCapture(); [_queuePerformer perform:^{ SCTraceResume(resumeToken); [_currentState startRunningWithCapturerToken:token resource:_captureResource completionHandler:completionHandler context:context]; }]; return token; } - (void)stopRunningWithCapturerToken:(SCCapturerToken *)token completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler context:(NSString *)context { SCTraceResumeToken resumeToken = SCTraceCapture(); [_queuePerformer perform:^{ SCTraceResume(resumeToken); [_currentState stopRunningWithCapturerToken:token resource:_captureResource completionHandler:completionHandler context:context]; }]; } - (void)stopRunningWithCapturerToken:(SCCapturerToken *)token after:(NSTimeInterval)delay completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler context:(NSString *)context { SCTraceResumeToken resumeToken = SCTraceCapture(); [_queuePerformer perform:^{ SCTraceResume(resumeToken); [_currentState stopRunningWithCapturerToken:token resource:_captureResource completionHandler:completionHandler context:context]; } after:delay]; } - (void)prepareForRecordingAsynchronouslyWithAudioConfiguration:(SCAudioConfiguration *)configuration context:(NSString *)context { SCTraceResumeToken resumeToken = SCTraceCapture(); [_queuePerformer perform:^{ SCTraceResume(resumeToken); [_currentState prepareForRecordingWithResource:_captureResource audioConfiguration:configuration context:context]; }]; } - (void)startRecordingWithOutputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings audioConfiguration:(SCAudioConfiguration *)configuration maxDuration:(NSTimeInterval)maxDuration fileURL:(NSURL *)fileURL captureSessionID:(NSString *)captureSessionID completionHandler:(sc_managed_capturer_start_recording_completion_handler_t)completionHandler context:(NSString *)context { SCTraceResumeToken resumeToken = SCTraceCapture(); [_queuePerformer perform:^{ SCTraceResume(resumeToken); [_currentState startRecordingWithResource:_captureResource audioConfiguration:configuration outputSettings:outputSettings maxDuration:maxDuration fileURL:fileURL captureSessionID:captureSessionID completionHandler:completionHandler context:context]; }]; } - (void)stopRecordingWithContext:(NSString *)context { SCTraceResumeToken resumeToken = SCTraceCapture(); [_queuePerformer perform:^{ SCTraceResume(resumeToken); [_currentState stopRecordingWithResource:_captureResource context:context]; }]; } - (void)cancelRecordingWithContext:(NSString *)context { SCTraceResumeToken resumeToken = SCTraceCapture(); [_queuePerformer perform:^{ SCTraceResume(resumeToken); [_currentState cancelRecordingWithResource:_captureResource context:context]; }]; } - (void)captureStillImageAsynchronouslyWithAspectRatio:(CGFloat)aspectRatio captureSessionID:(NSString *)captureSessionID completionHandler: (sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler context:(NSString *)context { [_queuePerformer perform:^() { [_currentState captureStillImageWithResource:_captureResource aspectRatio:aspectRatio captureSessionID:captureSessionID completionHandler:completionHandler context:context]; }]; } - (void)startScanAsynchronouslyWithScanConfiguration:(SCScanConfiguration *)configuration context:(NSString *)context { [_queuePerformer perform:^() { [_currentState startScanWithScanConfiguration:configuration resource:_captureResource context:context]; }]; } - (void)stopScanAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler context:(NSString *)context { [_queuePerformer perform:^() { [_currentState stopScanWithCompletionHandler:completionHandler resource:_captureResource context:context]; }]; } - (void)currentState:(SCCaptureBaseState *)state requestToTransferToNewState:(SCCaptureStateMachineStateId)newState payload:(SCStateTransitionPayload *)payload context:(NSString *)context { SCAssertPerformer(_queuePerformer); SCAssert(_currentState == state, @"state: %@ newState: %@ context:%@", SCCaptureStateName([state stateId]), SCCaptureStateName(newState), context); if (payload) { SCAssert(payload.fromState == [state stateId], @"From state id check"); SCAssert(payload.toState == newState, @"To state id check"); } if (_currentState != state) { return; } [_bookKeeper stateTransitionFrom:[state stateId] to:newState context:context]; [self _setCurrentState:newState payload:payload context:context]; } @end ================================================ FILE: ManagedCapturer/StateMachine/SCCaptureStateUtil.h ================================================ // // SCCaptureStateUtil.h // Snapchat // // Created by Lin Jia on 10/27/17. // // #import "SCLogger+Camera.h" #import #import #import #define SCLogCaptureStateMachineInfo(fmt, ...) SCLogCoreCameraInfo(@"[SCCaptureStateMachine] " fmt, ##__VA_ARGS__) #define SCLogCaptureStateMachineError(fmt, ...) SCLogCoreCameraError(@"[SCCaptureStateMachine] " fmt, ##__VA_ARGS__) typedef NSNumber SCCaptureStateKey; typedef NS_ENUM(NSUInteger, SCCaptureStateMachineStateId) { SCCaptureBaseStateId = 0, SCCaptureUninitializedStateId, SCCaptureInitializedStateId, SCCaptureImageStateId, SCCaptureImageWhileRecordingStateId, SCCaptureRunningStateId, SCCaptureRecordingStateId, SCCaptureScanningStateId, SCCaptureStateMachineStateIdCount }; SC_EXTERN_C_BEGIN NSString *SCCaptureStateName(SCCaptureStateMachineStateId stateId); SC_EXTERN_C_END ================================================ FILE: ManagedCapturer/StateMachine/SCCaptureStateUtil.m ================================================ // // SCCaptureStateUtil.m // Snapchat // // Created by Lin Jia on 10/27/17. // // #import "SCCaptureStateUtil.h" #import #import NSString *SCCaptureStateName(SCCaptureStateMachineStateId stateId) { switch (stateId) { case SCCaptureBaseStateId: return @"SCCaptureBaseStateId"; case SCCaptureUninitializedStateId: return @"SCCaptureUninitializedStateId"; case SCCaptureInitializedStateId: return @"SCCaptureInitializedStateId"; case SCCaptureImageStateId: return @"SCCaptureImageStateId"; case SCCaptureImageWhileRecordingStateId: return @"SCCaptureImageWhileRecordingStateId"; case SCCaptureRunningStateId: return @"SCCaptureRunningStateId"; case SCCaptureRecordingStateId: return @"SCCaptureRecordingStateId"; case SCCaptureScanningStateId: return @"SCCaptureScanningStateId"; default: SCCAssert(NO, @"illegate state id"); break; } return @"SCIllegalStateId"; } ================================================ FILE: ManagedCapturer/StateMachine/SCManagedCapturerLogging.h ================================================ // // SCManagedCapturerLogging.h // Snapchat // // Created by Lin Jia on 11/13/17. // #import #define SCLogCapturerInfo(fmt, ...) SCLogCoreCameraInfo(@"[SCManagedCapturer] " fmt, ##__VA_ARGS__) #define SCLogCapturerWarning(fmt, ...) SCLogCoreCameraWarning(@"[SCManagedCapturer] " fmt, ##__VA_ARGS__) #define SCLogCapturerError(fmt, ...) SCLogCoreCameraError(@"[SCManagedCapturer] " fmt, ##__VA_ARGS__) ================================================ FILE: ManagedCapturer/StateMachine/States/SCCaptureImageState.h ================================================ // // SCCaptureImageState.h // Snapchat // // Created by Lin Jia on 1/8/18. // #import "SCCaptureBaseState.h" #import @class SCQueuePerformer; @interface SCCaptureImageState : SCCaptureBaseState SC_INIT_AND_NEW_UNAVAILABLE - (instancetype)initWithPerformer:(SCQueuePerformer *)performer bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper delegate:(id)delegate; @end ================================================ FILE: ManagedCapturer/StateMachine/States/SCCaptureImageState.m ================================================ // // SCCaptureImageState.m // Snapchat // // Created by Lin Jia on 1/8/18. // #import "SCCaptureImageState.h" #import "SCCaptureImageStateTransitionPayload.h" #import "SCManagedCapturerV1_Private.h" #import "SCStateTransitionPayload.h" #import #import @interface SCCaptureImageState () { __weak id _delegate; SCQueuePerformer *_performer; } @end @implementation SCCaptureImageState - (instancetype)initWithPerformer:(SCQueuePerformer *)performer bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper delegate:(id)delegate { self = [super initWithPerformer:performer bookKeeper:bookKeeper delegate:delegate]; if (self) { _delegate = delegate; _performer = performer; } return self; } - (void)didBecomeCurrentState:(SCStateTransitionPayload *)payload resource:(SCCaptureResource *)resource context:(NSString *)context { SCAssertPerformer(_performer); SCAssert(payload.toState == [self stateId], @""); if (![payload isKindOfClass:[SCCaptureImageStateTransitionPayload class]]) { SCAssertFail(@"wrong payload pass in"); [_delegate currentState:self requestToTransferToNewState:payload.fromState payload:nil context:context]; return; } SCCaptureImageStateTransitionPayload *captureImagePayload = (SCCaptureImageStateTransitionPayload *)payload; [SCCaptureWorker captureStillImageWithCaptureResource:resource aspectRatio:captureImagePayload.aspectRatio captureSessionID:captureImagePayload.captureSessionID shouldCaptureFromVideo:[SCCaptureWorker shouldCaptureImageFromVideoWithResource:resource] completionHandler:captureImagePayload.block context:context]; [_delegate currentState:self requestToTransferToNewState:SCCaptureRunningStateId payload:nil context:context]; } - (SCCaptureStateMachineStateId)stateId { return SCCaptureImageStateId; } @end ================================================ FILE: ManagedCapturer/StateMachine/States/SCCaptureImageStateTransitionPayload.h ================================================ // // SCCaptureImageStateTransitionPayload.h // Snapchat // // Created by Lin Jia on 1/9/18. // #import "SCCaptureCommon.h" #import "SCStateTransitionPayload.h" #import @interface SCCaptureImageStateTransitionPayload : SCStateTransitionPayload @property (nonatomic, readonly, strong) NSString *captureSessionID; @property (nonatomic, readonly, copy) sc_managed_capturer_capture_still_image_completion_handler_t block; @property (nonatomic, readonly, assign) CGFloat aspectRatio; SC_INIT_AND_NEW_UNAVAILABLE - (instancetype)initWithFromState:(SCCaptureStateMachineStateId)fromState toState:(SCCaptureStateMachineStateId)toState captureSessionId:(NSString *)captureSessionID aspectRatio:(CGFloat)aspectRatio completionHandler:(sc_managed_capturer_capture_still_image_completion_handler_t)block; @end ================================================ FILE: ManagedCapturer/StateMachine/States/SCCaptureImageStateTransitionPayload.m ================================================ // // SCCaptureImageStateTransitionPayload.m // Snapchat // // Created by Lin Jia on 1/9/18. // #import "SCCaptureImageStateTransitionPayload.h" @implementation SCCaptureImageStateTransitionPayload - (instancetype)initWithFromState:(SCCaptureStateMachineStateId)fromState toState:(SCCaptureStateMachineStateId)toState captureSessionId:(NSString *)captureSessionID aspectRatio:(CGFloat)aspectRatio completionHandler:(sc_managed_capturer_capture_still_image_completion_handler_t)block { self = [super initWithFromState:fromState toState:toState]; if (self) { _captureSessionID = captureSessionID; _aspectRatio = aspectRatio; _block = block; } return self; } @end ================================================ FILE: ManagedCapturer/StateMachine/States/SCCaptureImageWhileRecordingState.h ================================================ // // SCCaptureImageWhileRecordingState.h // Snapchat // // Created by Sun Lei on 22/02/2018. // #import "SCCaptureBaseState.h" #import @class SCQueuePerformer; @interface SCCaptureImageWhileRecordingState : SCCaptureBaseState SC_INIT_AND_NEW_UNAVAILABLE - (instancetype)initWithPerformer:(SCQueuePerformer *)performer bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper delegate:(id)delegate; @end ================================================ FILE: ManagedCapturer/StateMachine/States/SCCaptureImageWhileRecordingState.m ================================================ // // SCCaptureImageWhileRecordingState.m // Snapchat // // Created by Sun Lei on 22/02/2018. // #import "SCCaptureImageWhileRecordingState.h" #import "SCCaptureImageWhileRecordingStateTransitionPayload.h" #import "SCManagedCapturerV1_Private.h" #import #import @interface SCCaptureImageWhileRecordingState () { __weak id _delegate; SCQueuePerformer *_performer; } @end @implementation SCCaptureImageWhileRecordingState - (instancetype)initWithPerformer:(SCQueuePerformer *)performer bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper delegate:(id)delegate { self = [super initWithPerformer:performer bookKeeper:bookKeeper delegate:delegate]; if (self) { _delegate = delegate; _performer = performer; } return self; } - (SCCaptureStateMachineStateId)stateId { return SCCaptureImageWhileRecordingStateId; } - (void)didBecomeCurrentState:(SCStateTransitionPayload *)payload resource:(SCCaptureResource *)resource context:(NSString *)context { SCAssertPerformer(_performer); SCAssert(payload.fromState == SCCaptureRecordingStateId, @""); SCAssert(payload.toState == [self stateId], @""); SCAssert([payload isKindOfClass:[SCCaptureImageWhileRecordingStateTransitionPayload class]], @""); ; SCCaptureImageWhileRecordingStateTransitionPayload *captureImagePayload = (SCCaptureImageWhileRecordingStateTransitionPayload *)payload; @weakify(self); sc_managed_capturer_capture_still_image_completion_handler_t block = ^(UIImage *fullScreenImage, NSDictionary *metadata, NSError *error, SCManagedCapturerState *state) { captureImagePayload.block(fullScreenImage, metadata, error, state); [_performer perform:^{ @strongify(self); [self _cancelRecordingWithContext:context resource:resource]; }]; }; [SCCaptureWorker captureStillImageWithCaptureResource:resource aspectRatio:captureImagePayload.aspectRatio captureSessionID:captureImagePayload.captureSessionID shouldCaptureFromVideo:[SCCaptureWorker shouldCaptureImageFromVideoWithResource:resource] completionHandler:block context:context]; [_delegate currentState:self requestToTransferToNewState:SCCaptureRunningStateId payload:nil context:context]; } - (void)_cancelRecordingWithContext:(NSString *)context resource:(SCCaptureResource *)resource { SCTraceODPCompatibleStart(2); SCAssertPerformer(_performer); [SCCaptureWorker cancelRecordingWithCaptureResource:resource]; NSString *apiName = [NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)]; [self.bookKeeper logAPICalled:apiName context:context]; } @end ================================================ FILE: ManagedCapturer/StateMachine/States/SCCaptureImageWhileRecordingStateTransitionPayload.h ================================================ // // SCCaptureImageWhileRecordingStateTransitionPayload.h // Snapchat // // Created by Sun Lei on 22/02/2018. // #import "SCCaptureCommon.h" #import "SCStateTransitionPayload.h" #import @interface SCCaptureImageWhileRecordingStateTransitionPayload : SCStateTransitionPayload @property (nonatomic, readonly, strong) NSString *captureSessionID; @property (nonatomic, readonly, copy) sc_managed_capturer_capture_still_image_completion_handler_t block; @property (nonatomic, readonly, assign) CGFloat aspectRatio; SC_INIT_AND_NEW_UNAVAILABLE - (instancetype)initWithFromState:(SCCaptureStateMachineStateId)fromState toState:(SCCaptureStateMachineStateId)toState captureSessionId:(NSString *)captureSessionID aspectRatio:(CGFloat)aspectRatio completionHandler:(sc_managed_capturer_capture_still_image_completion_handler_t)block; @end ================================================ FILE: ManagedCapturer/StateMachine/States/SCCaptureImageWhileRecordingStateTransitionPayload.m ================================================ // // SCCaptureImageWhileRecordingStateTransitionPayload.m // Snapchat // // Created by Sun Lei on 22/02/2018. // #import "SCCaptureImageWhileRecordingStateTransitionPayload.h" @implementation SCCaptureImageWhileRecordingStateTransitionPayload - (instancetype)initWithFromState:(SCCaptureStateMachineStateId)fromState toState:(SCCaptureStateMachineStateId)toState captureSessionId:(NSString *)captureSessionID aspectRatio:(CGFloat)aspectRatio completionHandler:(sc_managed_capturer_capture_still_image_completion_handler_t)block { self = [super initWithFromState:fromState toState:toState]; if (self) { _captureSessionID = captureSessionID; _aspectRatio = aspectRatio; _block = block; } return self; } @end ================================================ FILE: ManagedCapturer/StateMachine/States/SCCaptureInitializedState.h ================================================ // // SCCaptureInitializedState.h // Snapchat // // Created by Jingtian Yang on 20/12/2017. // #import "SCCaptureBaseState.h" #import @class SCQueuePerformer; @interface SCCaptureInitializedState : SCCaptureBaseState - (instancetype)init NS_UNAVAILABLE; - (instancetype)initWithPerformer:(SCQueuePerformer *)performer bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper delegate:(id)delegate; @end ================================================ FILE: ManagedCapturer/StateMachine/States/SCCaptureInitializedState.m ================================================ // // SCCaptureInitializedState.m // Snapchat // // Created by Jingtian Yang on 20/12/2017. // #import "SCCaptureInitializedState.h" #import "SCCapturerToken.h" #import "SCManagedCapturerLogging.h" #import "SCManagedCapturerV1_Private.h" #import #import @interface SCCaptureInitializedState () { __weak id _delegate; SCQueuePerformer *_performer; } @end @implementation SCCaptureInitializedState - (instancetype)initWithPerformer:(SCQueuePerformer *)performer bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper delegate:(id)delegate { self = [super initWithPerformer:performer bookKeeper:bookKeeper delegate:delegate]; if (self) { _delegate = delegate; _performer = performer; } return self; } - (void)didBecomeCurrentState:(SCStateTransitionPayload *)payload resource:(SCCaptureResource *)resource context:(NSString *)context { // No op. } - (SCCaptureStateMachineStateId)stateId { return SCCaptureInitializedStateId; } - (void)startRunningWithCapturerToken:(SCCapturerToken *)token resource:(SCCaptureResource *)resource completionHandler:(dispatch_block_t)completionHandler context:(NSString *)context { SCAssertPerformer(_performer); SCTraceODPCompatibleStart(2); SCLogCapturerInfo(@"startRunningAsynchronouslyWithCompletionHandler called. token: %@", token); [SCCaptureWorker startRunningWithCaptureResource:resource token:token completionHandler:completionHandler]; [_delegate currentState:self requestToTransferToNewState:SCCaptureRunningStateId payload:nil context:context]; NSString *apiName = [NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)]; [self.bookKeeper logAPICalled:apiName context:context]; } @end ================================================ FILE: ManagedCapturer/StateMachine/States/SCCaptureRecordingState.h ================================================ // // SCCaptureRecordingState.h // Snapchat // // Created by Jingtian Yang on 12/01/2018. // #import "SCCaptureBaseState.h" #import @class SCQueuePerformer; @interface SCCaptureRecordingState : SCCaptureBaseState SC_INIT_AND_NEW_UNAVAILABLE - (instancetype)initWithPerformer:(SCQueuePerformer *)performer bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper delegate:(id)delegate; @end ================================================ FILE: ManagedCapturer/StateMachine/States/SCCaptureRecordingState.m ================================================ // // SCCaptureRecordingState.m // Snapchat // // Created by Jingtian Yang on 12/01/2018. // #import "SCCaptureRecordingState.h" #import "SCCaptureImageWhileRecordingStateTransitionPayload.h" #import "SCCaptureRecordingStateTransitionPayload.h" #import "SCManagedCapturerV1_Private.h" #import "SCStateTransitionPayload.h" #import #import @interface SCCaptureRecordingState () { __weak id _delegate; SCQueuePerformer *_performer; } @end @implementation SCCaptureRecordingState - (instancetype)initWithPerformer:(SCQueuePerformer *)performer bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper delegate:(id)delegate { self = [super initWithPerformer:performer bookKeeper:bookKeeper delegate:delegate]; if (self) { _delegate = delegate; _performer = performer; } return self; } - (void)didBecomeCurrentState:(SCStateTransitionPayload *)payload resource:(SCCaptureResource *)resource context:(NSString *)context { SCAssertPerformer(resource.queuePerformer); SCAssert(payload.toState == [self stateId], @""); if (![payload isKindOfClass:[SCCaptureRecordingStateTransitionPayload class]]) { SCAssertFail(@"wrong payload pass in"); [_delegate currentState:self requestToTransferToNewState:payload.fromState payload:nil context:context]; return; } SCCaptureRecordingStateTransitionPayload *recordingPayload = (SCCaptureRecordingStateTransitionPayload *)payload; [SCCaptureWorker startRecordingWithCaptureResource:resource outputSettings:recordingPayload.outputSettings audioConfiguration:recordingPayload.configuration maxDuration:recordingPayload.maxDuration fileURL:recordingPayload.fileURL captureSessionID:recordingPayload.captureSessionID completionHandler:recordingPayload.block]; } - (void)stopRecordingWithResource:(SCCaptureResource *)resource context:(NSString *)context { SCTraceODPCompatibleStart(2); SCAssertPerformer(_performer); [SCCaptureWorker stopRecordingWithCaptureResource:resource]; [_delegate currentState:self requestToTransferToNewState:SCCaptureRunningStateId payload:nil context:context]; NSString *apiName = [NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)]; [self.bookKeeper logAPICalled:apiName context:context]; } - (void)cancelRecordingWithResource:(SCCaptureResource *)resource context:(NSString *)context { SCTraceODPCompatibleStart(2); SCAssertPerformer(_performer); [SCCaptureWorker cancelRecordingWithCaptureResource:resource]; [_delegate currentState:self requestToTransferToNewState:SCCaptureRunningStateId payload:nil context:context]; NSString *apiName = [NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)]; [self.bookKeeper logAPICalled:apiName context:context]; } - (SCCaptureStateMachineStateId)stateId { return SCCaptureRecordingStateId; } - (void)captureStillImageWithResource:(SCCaptureResource *)resource aspectRatio:(CGFloat)aspectRatio captureSessionID:(NSString *)captureSessionID completionHandler:(sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler context:(NSString *)context { SCAssertPerformer(_performer); SCCaptureImageWhileRecordingStateTransitionPayload *payload = [ [SCCaptureImageWhileRecordingStateTransitionPayload alloc] initWithFromState:SCCaptureRecordingStateId toState:SCCaptureImageWhileRecordingStateId captureSessionId:captureSessionID aspectRatio:aspectRatio completionHandler:completionHandler]; [_delegate currentState:self requestToTransferToNewState:SCCaptureImageWhileRecordingStateId payload:payload context:context]; NSString *apiName = [NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)]; [self.bookKeeper logAPICalled:apiName context:context]; } @end ================================================ FILE: ManagedCapturer/StateMachine/States/SCCaptureRecordingStateTransitionPayload.h ================================================ // // SCCaptureRecordingStateTransitionPayload.h // Snapchat // // Created by Jingtian Yang on 12/01/2018. // #import "SCCaptureCommon.h" #import "SCManagedVideoCapturerOutputSettings.h" #import "SCStateTransitionPayload.h" #import #import @interface SCCaptureRecordingStateTransitionPayload : SCStateTransitionPayload @property (nonatomic, readonly, strong) SCManagedVideoCapturerOutputSettings *outputSettings; @property (nonatomic, readonly, strong) SCAudioConfiguration *configuration; @property (nonatomic, readonly, assign) NSTimeInterval maxDuration; @property (nonatomic, readonly, strong) NSURL *fileURL; @property (nonatomic, readonly, strong) NSString *captureSessionID; @property (nonatomic, readonly, copy) sc_managed_capturer_start_recording_completion_handler_t block; SC_INIT_AND_NEW_UNAVAILABLE - (instancetype)initWithFromState:(SCCaptureStateMachineStateId)fromState toState:(SCCaptureStateMachineStateId)toState outputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings audioConfiguration:(SCAudioConfiguration *)configuration maxDuration:(NSTimeInterval)maxDuration fileURL:(NSURL *)fileURL captureSessionID:(NSString *)captureSessionID completionHandler:(sc_managed_capturer_start_recording_completion_handler_t)block; @end ================================================ FILE: ManagedCapturer/StateMachine/States/SCCaptureRecordingStateTransitionPayload.m ================================================ // // SCCaptureRecordingStateTransitionPayload.m // Snapchat // // Created by Jingtian Yang on 12/01/2018. // #import "SCCaptureRecordingStateTransitionPayload.h" @implementation SCCaptureRecordingStateTransitionPayload - (instancetype)initWithFromState:(SCCaptureStateMachineStateId)fromState toState:(SCCaptureStateMachineStateId)toState outputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings audioConfiguration:configuration maxDuration:(NSTimeInterval)maxDuration fileURL:(NSURL *)fileURL captureSessionID:(NSString *)captureSessionID completionHandler:(sc_managed_capturer_start_recording_completion_handler_t)block { self = [super initWithFromState:fromState toState:toState]; if (self) { _outputSettings = outputSettings; _configuration = configuration; _maxDuration = maxDuration; _fileURL = fileURL; _captureSessionID = captureSessionID; _block = block; } return self; } @end ================================================ FILE: ManagedCapturer/StateMachine/States/SCCaptureRunningState.h ================================================ // // SCCaptureRunningState.h // Snapchat // // Created by Jingtian Yang on 08/01/2018. // #import "SCCaptureBaseState.h" #import @class SCQueuePerformer; @interface SCCaptureRunningState : SCCaptureBaseState - (instancetype)init NS_UNAVAILABLE; - (instancetype)initWithPerformer:(SCQueuePerformer *)performer bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper delegate:(id)delegate; @end ================================================ FILE: ManagedCapturer/StateMachine/States/SCCaptureRunningState.m ================================================ // // SCCaptureRunningState.m // Snapchat // // Created by Jingtian Yang on 08/01/2018. // #import "SCCaptureRunningState.h" #import "SCCaptureImageStateTransitionPayload.h" #import "SCCaptureRecordingStateTransitionPayload.h" #import "SCCaptureWorker.h" #import "SCManagedCapturerLogging.h" #import "SCManagedCapturerV1_Private.h" #import "SCScanConfiguration.h" #import #import #import @interface SCCaptureRunningState () { __weak id _delegate; SCQueuePerformer *_performer; } @end @implementation SCCaptureRunningState - (instancetype)initWithPerformer:(SCQueuePerformer *)performer bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper delegate:(id)delegate { self = [super initWithPerformer:performer bookKeeper:bookKeeper delegate:delegate]; if (self) { _delegate = delegate; _performer = performer; } return self; } - (void)didBecomeCurrentState:(SCStateTransitionPayload *)payload resource:(SCCaptureResource *)resource context:(NSString *)context { // No op. } - (void)captureStillImageWithResource:(SCCaptureResource *)resource aspectRatio:(CGFloat)aspectRatio captureSessionID:(NSString *)captureSessionID completionHandler:(sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler context:(NSString *)context { SCAssertPerformer(_performer); SCCaptureImageStateTransitionPayload *payload = [[SCCaptureImageStateTransitionPayload alloc] initWithFromState:SCCaptureRunningStateId toState:SCCaptureImageStateId captureSessionId:captureSessionID aspectRatio:aspectRatio completionHandler:completionHandler]; [_delegate currentState:self requestToTransferToNewState:SCCaptureImageStateId payload:payload context:context]; NSString *apiName = [NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)]; [self.bookKeeper logAPICalled:apiName context:context]; } - (SCCaptureStateMachineStateId)stateId { return SCCaptureRunningStateId; } - (void)startRunningWithCapturerToken:(SCCapturerToken *)token resource:(SCCaptureResource *)resource completionHandler:(dispatch_block_t)completionHandler context:(NSString *)context { SCAssertPerformer(_performer); SCTraceODPCompatibleStart(2); SCLogCapturerInfo(@"startRunningAsynchronouslyWithCompletionHandler called. token: %@", token); [SCCaptureWorker startRunningWithCaptureResource:resource token:token completionHandler:completionHandler]; NSString *apiName = [NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)]; [self.bookKeeper logAPICalled:apiName context:context]; } - (void)stopRunningWithCapturerToken:(SCCapturerToken *)token resource:(SCCaptureResource *)resource completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler context:(NSString *)context { SCTraceODPCompatibleStart(2); SCAssertPerformer(_performer); SCLogCapturerInfo(@"Stop running asynchronously. token:%@", token); if ([[SCManagedCapturerV1 sharedInstance] stopRunningWithCaptureToken:token completionHandler:completionHandler context:context]) { [_delegate currentState:self requestToTransferToNewState:SCCaptureInitializedStateId payload:nil context:context]; } NSString *apiName = [NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)]; [self.bookKeeper logAPICalled:apiName context:context]; } - (void)startScanWithScanConfiguration:(SCScanConfiguration *)configuration resource:(SCCaptureResource *)resource context:(NSString *)context { SCTraceODPCompatibleStart(2); SCLogCapturerInfo(@"Start scan on preview asynchronously. configuration:%@", configuration); SCAssertPerformer(_performer); [SCCaptureWorker startScanWithScanConfiguration:configuration resource:resource]; [_delegate currentState:self requestToTransferToNewState:SCCaptureScanningStateId payload:nil context:context]; NSString *apiName = [NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)]; [self.bookKeeper logAPICalled:apiName context:context]; } - (void)prepareForRecordingWithResource:(SCCaptureResource *)resource audioConfiguration:(SCAudioConfiguration *)configuration context:(NSString *)context { SCAssertPerformer(_performer); SCTraceODPCompatibleStart(2); [SCCaptureWorker prepareForRecordingWithAudioConfiguration:configuration resource:resource]; NSString *apiName = [NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)]; [self.bookKeeper logAPICalled:apiName context:context]; } - (void)startRecordingWithResource:(SCCaptureResource *)resource audioConfiguration:(SCAudioConfiguration *)configuration outputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings maxDuration:(NSTimeInterval)maxDuration fileURL:(NSURL *)fileURL captureSessionID:(NSString *)captureSessionID completionHandler:(sc_managed_capturer_start_recording_completion_handler_t)completionHandler context:(NSString *)context { SCTraceODPCompatibleStart(2); SCAssertPerformer(_performer); SCCaptureRecordingStateTransitionPayload *payload = [[SCCaptureRecordingStateTransitionPayload alloc] initWithFromState:SCCaptureRunningStateId toState:SCCaptureRecordingStateId outputSettings:outputSettings audioConfiguration:configuration maxDuration:maxDuration fileURL:fileURL captureSessionID:captureSessionID completionHandler:completionHandler]; [_delegate currentState:self requestToTransferToNewState:SCCaptureRecordingStateId payload:payload context:context]; NSString *apiName = [NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)]; [self.bookKeeper logAPICalled:apiName context:context]; } - (void)cancelRecordingWithResource:(SCCaptureResource *)resource context:(NSString *)context { // Intentionally No Op, this will be removed once CCAM-13851 gets resolved. NSString *apiName = [NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)]; [self.bookKeeper logAPICalled:apiName context:context]; } @end ================================================ FILE: ManagedCapturer/StateMachine/States/SCCaptureScanningState.h ================================================ // // SCCaptureScanningState.h // Snapchat // // Created by Xiaokang Liu on 09/01/2018. // #import "SCCaptureBaseState.h" @class SCQueuePerformer; @interface SCCaptureScanningState : SCCaptureBaseState - (instancetype)init NS_UNAVAILABLE; - (instancetype)initWithPerformer:(SCQueuePerformer *)performer bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper delegate:(id)delegate; @end ================================================ FILE: ManagedCapturer/StateMachine/States/SCCaptureScanningState.m ================================================ // // SCCaptureScanningState.m // Snapchat // // Created by Xiaokang Liu on 09/01/2018. // #import "SCCaptureScanningState.h" #import "SCManagedCapturerLogging.h" #import "SCManagedCapturerV1_Private.h" #import #import #import @interface SCCaptureScanningState () { __weak id _delegate; SCQueuePerformer *_performer; } @end @implementation SCCaptureScanningState - (instancetype)initWithPerformer:(SCQueuePerformer *)performer bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper delegate:(id)delegate { self = [super initWithPerformer:performer bookKeeper:bookKeeper delegate:delegate]; if (self) { SCAssert(delegate, @""); SCAssert(performer, @""); SCAssert(bookKeeper, @""); _delegate = delegate; _performer = performer; } return self; } - (void)didBecomeCurrentState:(SCStateTransitionPayload *)payload resource:(SCCaptureResource *)resource context:(NSString *)context { // No op. } - (SCCaptureStateMachineStateId)stateId { return SCCaptureScanningStateId; } - (void)stopScanWithCompletionHandler:(dispatch_block_t)completionHandler resource:(SCCaptureResource *)resource context:(NSString *)context { SCAssertPerformer(_performer); SCTraceODPCompatibleStart(2); SCLogCapturerInfo(@"stop scan asynchronously."); [SCCaptureWorker stopScanWithCompletionHandler:completionHandler resource:resource]; [_delegate currentState:self requestToTransferToNewState:SCCaptureRunningStateId payload:nil context:context]; NSString *apiName = [NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)]; [self.bookKeeper logAPICalled:apiName context:context]; } - (void)cancelRecordingWithResource:(SCCaptureResource *)resource context:(NSString *)context { // Intentionally No Op, this will be removed once CCAM-13851 gets resolved. NSString *apiName = [NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)]; [self.bookKeeper logAPICalled:apiName context:context]; } @end ================================================ FILE: ManagedCapturer/StateMachine/States/SCCaptureUninitializedState.h ================================================ // // SCCaptureUninitializedState.h // Snapchat // // Created by Lin Jia on 10/19/17. // // #import "SCCaptureBaseState.h" #import /* State which handles capture initialialization, which should be used only once for every app life span. */ @class SCQueuePerformer; @interface SCCaptureUninitializedState : SCCaptureBaseState - (instancetype)init NS_UNAVAILABLE; - (instancetype)initWithPerformer:(SCQueuePerformer *)performer bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper delegate:(id)delegate; @end ================================================ FILE: ManagedCapturer/StateMachine/States/SCCaptureUninitializedState.m ================================================ // // SCCaptureUninitializedState.m // Snapchat // // Created by Lin Jia on 10/19/17. // // #import "SCCaptureUninitializedState.h" #import "SCManagedCapturerLogging.h" #import "SCManagedCapturerV1_Private.h" #import #import #import @interface SCCaptureUninitializedState () { __weak id _delegate; SCQueuePerformer *_performer; } @end @implementation SCCaptureUninitializedState - (instancetype)initWithPerformer:(SCQueuePerformer *)performer bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper delegate:(id)delegate { self = [super initWithPerformer:performer bookKeeper:bookKeeper delegate:delegate]; if (self) { _delegate = delegate; _performer = performer; } return self; } - (void)didBecomeCurrentState:(SCStateTransitionPayload *)payload resource:(SCCaptureResource *)resource context:(NSString *)context { // No op. } - (SCCaptureStateMachineStateId)stateId { return SCCaptureUninitializedStateId; } - (void)initializeCaptureWithDevicePosition:(SCManagedCaptureDevicePosition)devicePosition resource:(SCCaptureResource *)resource completionHandler:(dispatch_block_t)completionHandler context:(NSString *)context { SCAssertPerformer(_performer); SCTraceODPCompatibleStart(2); SCLogCapturerInfo(@"Setting up with devicePosition:%lu", (unsigned long)devicePosition); // TODO: we need to push completionHandler to a payload and let intializedState handle. [[SCManagedCapturerV1 sharedInstance] setupWithDevicePosition:devicePosition completionHandler:completionHandler]; [_delegate currentState:self requestToTransferToNewState:SCCaptureInitializedStateId payload:nil context:context]; NSString *apiName = [NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)]; [self.bookKeeper logAPICalled:apiName context:context]; } @end ================================================ FILE: ManagedCapturer/StateMachine/States/SCStateTransitionPayload.h ================================================ // // SCStateTransitionPayload.h // Snapchat // // Created by Lin Jia on 1/8/18. // #import "SCCaptureStateUtil.h" #import @interface SCStateTransitionPayload : NSObject @property (nonatomic, readonly, assign) SCCaptureStateMachineStateId fromState; @property (nonatomic, readonly, assign) SCCaptureStateMachineStateId toState; SC_INIT_AND_NEW_UNAVAILABLE - (instancetype)initWithFromState:(SCCaptureStateMachineStateId)fromState toState:(SCCaptureStateMachineStateId)toState; @end ================================================ FILE: ManagedCapturer/StateMachine/States/SCStateTransitionPayload.m ================================================ // // SCStateTransitionPayload.m // Snapchat // // Created by Lin Jia on 1/8/18. // #import "SCStateTransitionPayload.h" #import @implementation SCStateTransitionPayload - (instancetype)initWithFromState:(SCCaptureStateMachineStateId)fromState toState:(SCCaptureStateMachineStateId)toState { self = [super init]; if (self) { SCAssert(fromState != toState, @""); SCAssert(fromState > SCCaptureBaseStateId && fromState < SCCaptureStateMachineStateIdCount, @""); SCAssert(toState > SCCaptureBaseStateId && toState < SCCaptureStateMachineStateIdCount, @""); _fromState = fromState; _toState = toState; } return self; } @end ================================================ FILE: ManagedCapturer/UIScreen+Debug.h ================================================ // // UIScreen+Debug.h // Snapchat // // Created by Derek Peirce on 6/1/17. // Copyright © 2017 Snapchat, Inc. All rights reserved. // #import @interface UIScreen (Debug) @end ================================================ FILE: ManagedCapturer/UIScreen+Debug.m ================================================ #import "UIScreen+Debug.h" #import #import #import @implementation UIScreen (Debug) + (void)load { if (SCIsPerformanceLoggingEnabled()) { static dispatch_once_t once_token; dispatch_once(&once_token, ^{ SEL setBrightnessSelector = @selector(setBrightness:); SEL setBrightnessLoggerSelector = @selector(logged_setBrightness:); Method originalMethod = class_getInstanceMethod(self, setBrightnessSelector); Method extendedMethod = class_getInstanceMethod(self, setBrightnessLoggerSelector); method_exchangeImplementations(originalMethod, extendedMethod); }); } } - (void)logged_setBrightness:(CGFloat)brightness { SCLogGeneralInfo(@"Setting brightness from %f to %f", self.brightness, brightness); [self logged_setBrightness:brightness]; } @end ================================================ FILE: README.md ================================================ # Snapchat-Source-Code-Leak Reupload of the Epic Leak by Khaled Alshehri ![Alt text](https://i.imgur.com/3Cz0sof.png "SnapChat_Leak_2018-by-Jonny-Banana") https://github.com/i5xx Enjoy it!!! https://www.youtube.com/watch?v=xEqCbRevNlo
Best VPN

================================================ FILE: Resource/SCCaptureResource.h ================================================ // // SCCaptureResource.h // Snapchat // // Created by Lin Jia on 10/19/17. // // #import "SCManagedCapturerLensAPI.h" #import "SCManagedCapturerListenerAnnouncer.h" #import "SCSnapCreationTriggers.h" #import #import #import /* In general, the function of SCCapturer is to use some resources (such as SCManagedCapturerListenerAnnouncer), to do something (such as announce an event). SCCaptureResource abstract away the "resources" part of SCCapturer. It has no APIs itself, it is used to be the resource which gets passed arround for capturer V2 state machine. */ @class SCManagedDeviceCapacityAnalyzer; @class SCManagedCapturePreviewLayerController; @class ARSession; @class SCManagedVideoScanner; @class LSAGLView; @protocol SCManagedCapturerLSAComponentTrackerAPI; @class SCManagedStillImageCapturer; @class SCManagedVideoCapturer; @class SCQueuePerformer; @class SCManagedVideoFrameSampler; @class SCManagedDroppedFramesReporter; @class SCManagedVideoStreamReporter; @protocol SCManagedCapturerGLViewManagerAPI; @class SCCapturerToken; @class SCSingleFrameStreamCapturer; @class SCManagedFrontFlashController; @class SCManagedVideoCapturerHandler; @class SCManagedStillImageCapturerHandler; @class SCManagedDeviceCapacityAnalyzerHandler; @class SCManagedCaptureDeviceDefaultZoomHandler; @class SCManagedCaptureDeviceHandler; @class SCBlackCameraNoOutputDetector; @class SCCaptureSessionFixer; @protocol SCCaptureFaceDetector; @protocol SCManagedCapturerLensAPI; @protocol SCManagedCapturerARImageCaptureProvider; @class SCManagedCapturerARSessionHandler; @class SCManagedCaptureDeviceSubjectAreaHandler; @class SCManagedCaptureSession; @class SCBlackCameraDetector; @protocol SCLensProcessingCore; @protocol SCManagedCapturerLensAPI; @protocol SCManagedCapturePreviewLayerControllerDelegate; typedef enum : NSUInteger { SCManagedCapturerStatusUnknown = 0, SCManagedCapturerStatusReady, SCManagedCapturerStatusRunning, } SCManagedCapturerStatus; @protocol SCDeviceMotionProvider @property (nonatomic, readonly) BOOL isUnderDeviceMotion; @end @protocol SCFileInputDecider @property (nonatomic, readonly) BOOL shouldProcessFileInput; @property (nonatomic, readonly) NSURL *fileURL; @end @interface SCCaptureResource : NSObject @property (nonatomic, readwrite, assign) SCManagedCapturerStatus status; @property (nonatomic, readwrite, strong) SCManagedCapturerState *state; @property (nonatomic, readwrite, strong) SCManagedCaptureDevice *device; @property (nonatomic, readwrite, strong) id lensProcessingCore; @property (nonatomic, readwrite, strong) id lensAPIProvider; @property (nonatomic, readwrite, strong) ARSession *arSession NS_AVAILABLE_IOS(11_0); @property (nonatomic, readwrite, strong) SCManagedStillImageCapturer *arImageCapturer NS_AVAILABLE_IOS(11_0); @property (nonatomic, readwrite, strong) SCManagedCaptureSession *managedSession; @property (nonatomic, readwrite, strong) id videoDataSource; @property (nonatomic, readwrite, strong) SCManagedDeviceCapacityAnalyzer *deviceCapacityAnalyzer; @property (nonatomic, readwrite, strong) SCManagedVideoScanner *videoScanner; @property (nonatomic, readwrite, strong) SCManagedCapturerListenerAnnouncer *announcer; @property (nonatomic, readwrite, strong) AVCaptureVideoPreviewLayer *videoPreviewLayer; @property (nonatomic, readwrite, strong) id videoPreviewGLViewManager; @property (nonatomic, readwrite, strong) SCManagedStillImageCapturer *stillImageCapturer; @property (nonatomic, readwrite, strong) SCManagedVideoCapturer *videoCapturer; @property (nonatomic, readwrite, strong) SCQueuePerformer *queuePerformer; @property (nonatomic, readwrite, strong) SCManagedVideoFrameSampler *videoFrameSampler; @property (nonatomic, readwrite, strong) SCManagedDroppedFramesReporter *droppedFramesReporter; @property (nonatomic, readwrite, strong) SCManagedVideoStreamReporter *videoStreamReporter; // INTERNAL USE ONLY @property (nonatomic, readwrite, strong) SCManagedFrontFlashController *frontFlashController; @property (nonatomic, readwrite, strong) SCManagedVideoCapturerHandler *videoCapturerHandler; @property (nonatomic, readwrite, strong) SCManagedStillImageCapturerHandler *stillImageCapturerHandler; @property (nonatomic, readwrite, strong) SCManagedDeviceCapacityAnalyzerHandler *deviceCapacityAnalyzerHandler; @property (nonatomic, readwrite, strong) SCManagedCaptureDeviceDefaultZoomHandler *deviceZoomHandler; @property (nonatomic, readwrite, strong) SCManagedCaptureDeviceHandler *captureDeviceHandler; @property (nonatomic, readwrite, strong) id captureFaceDetector; @property (nonatomic, readwrite, strong) FBKVOController *kvoController; @property (nonatomic, readwrite, strong) id lsaTrackingComponentHandler; @property (nonatomic, readwrite, strong) SCManagedCapturerARSessionHandler *arSessionHandler; @property (nonatomic, assign) SEL completeARSessionShutdown; @property (nonatomic, assign) SEL handleAVSessionStatusChange; @property (nonatomic, assign) BOOL videoRecording; @property (nonatomic, assign) NSInteger numRetriesFixAVCaptureSessionWithCurrentSession; @property (nonatomic, assign) BOOL appInBackground; @property (nonatomic, assign) NSUInteger streamingSequence; @property (nonatomic, assign) BOOL stillImageCapturing; @property (nonatomic, readwrite, strong) NSTimer *livenessTimer; @property (nonatomic, readwrite, strong) NSMutableSet *tokenSet; @property (nonatomic, readwrite, strong) SCSingleFrameStreamCapturer *frameCap; @property (nonatomic, readwrite, strong) id sampleBufferDisplayController; @property (nonatomic, readwrite, strong) SCSnapCreationTriggers *snapCreationTriggers; // Different from most properties above, following are main thread properties. @property (nonatomic, assign) BOOL allowsZoom; @property (nonatomic, assign) NSUInteger numRetriesFixInconsistencyWithCurrentSession; @property (nonatomic, readwrite, strong) NSMutableDictionary *debugInfoDict; @property (nonatomic, assign) BOOL notificationRegistered; @property (nonatomic, readwrite, strong) SCManagedCaptureDeviceSubjectAreaHandler *deviceSubjectAreaHandler; @property (nonatomic, assign) SEL sessionRuntimeError; @property (nonatomic, assign) SEL livenessConsistency; // TODO: these properties will be refactored into SCCaptureSessionFixer class // The refactor will be in a separate PR // Timestamp when _fixAVSessionIfNecessary is called @property (nonatomic, assign) NSTimeInterval lastFixSessionTimestamp; // Timestamp when session runtime error is handled @property (nonatomic, assign) NSTimeInterval lastSessionRuntimeErrorTime; // Wether we schedule fix of creating session already @property (nonatomic, assign) BOOL isRecreateSessionFixScheduled; @property (nonatomic, readwrite, strong) SCCaptureSessionFixer *captureSessionFixer; @property (nonatomic, readwrite, strong) SCBlackCameraDetector *blackCameraDetector; @property (nonatomic, readwrite, strong) id deviceMotionProvider; @property (nonatomic, readwrite, strong) id arImageCaptureProvider; @property (nonatomic, readwrite, strong) id fileInputDecider; @property (nonatomic, readwrite, strong) id previewLayerControllerDelegate; @end ================================================ FILE: Resource/SCCaptureResource.m ================================================ // // SCCaptureResource.m // Snapchat // // Created by Lin Jia on 10/19/17. // // #import "SCCaptureResource.h" #import "SCBlackCameraDetector.h" #import "SCManagedCapturerState.h" #import "SCManagedFrontFlashController.h" #import "SCManagedVideoCapturer.h" #import #import #import #import @interface SCCaptureResource () { FBKVOController *_previewHiddenKVO; } @end @implementation SCCaptureResource - (SCManagedFrontFlashController *)frontFlashController { SCTraceODPCompatibleStart(2); SCAssert([self.queuePerformer isCurrentPerformer], @""); if (!_frontFlashController) { _frontFlashController = [[SCManagedFrontFlashController alloc] init]; } return _frontFlashController; } - (void)setVideoPreviewLayer:(AVCaptureVideoPreviewLayer *)layer { SC_GUARD_ELSE_RETURN(layer != _videoPreviewLayer); if (_videoPreviewLayer) { [_previewHiddenKVO unobserve:_videoPreviewLayer]; } _videoPreviewLayer = layer; SC_GUARD_ELSE_RETURN(_videoPreviewLayer); if (!_previewHiddenKVO) { _previewHiddenKVO = [[FBKVOController alloc] initWithObserver:self]; } [_previewHiddenKVO observe:_videoPreviewLayer keyPath:@keypath(_videoPreviewLayer.hidden) options:NSKeyValueObservingOptionNew | NSKeyValueObservingOptionOld block:^(id observer, id object, NSDictionary *change) { BOOL oldValue = [change[NSKeyValueChangeOldKey] boolValue]; BOOL newValue = [change[NSKeyValueChangeNewKey] boolValue]; if (oldValue != newValue) { [_blackCameraDetector capturePreviewDidBecomeVisible:!newValue]; } }]; } @end ================================================ FILE: Tweaks/SCCameraTweaks.h ================================================ // // SCCameraTweaks.h // Snapchat // // Created by Liu Liu on 9/16/15. // Copyright © 2015 Snapchat, Inc. All rights reserved. // #import #import #import #import #import #import // Core Camera typedef NS_ENUM(NSUInteger, SCManagedCaptureDeviceZoomHandlerType) { SCManagedCaptureDeviceDefaultZoom, SCManagedCaptureDeviceSavitzkyGolayFilter, SCManagedCaptureDeviceLinearInterpolation, }; typedef NS_ENUM(NSUInteger, SCCameraTweaksStrategyType) { SCCameraTweaksStrategyFollowABTest = 0, SCCameraTweaksStrategyOverrideToYes, SCCameraTweaksStrategyOverrideToNo }; typedef NS_ENUM(NSUInteger, SCCameraHandsFreeModeType) { SCCameraHandsFreeModeTypeABTest = 0, SCCameraHandsFreeModeTypeDisabled, SCCameraHandsFreeModeTypeMainOnly, SCCameraHandsFreeModeTypeChatMoveCaptureButton, SCCameraHandsFreeModeTypeMainAndChat, SCCameraHandsFreeModeTypeLeftOfCapture, }; /// Face detection and focus strategy in Tweak. There are less options in internal Tweaks than the A/B testing /// strategies. typedef NS_ENUM(NSUInteger, SCCameraFaceFocusModeStrategyType) { SCCameraFaceFocusModeStrategyTypeABTest = 0, SCCameraFaceFocusModeStrategyTypeDisabled, // Disabled for both cameras. SCCameraFaceFocusModeStrategyTypeOffByDefault, // Enabled for both cameras, but is off by default. SCCameraFaceFocusModeStrategyTypeOnByDefault, // Enabled for both cameras, but is off by default. }; typedef NS_ENUM(NSUInteger, SCCameraFaceFocusDetectionMethodType) { SCCameraFaceFocusDetectionMethodTypeABTest = 0, SCCameraFaceFocusDetectionMethodTypeCIDetector, // Use SCCaptureCoreImageFaceDetector SCCameraFaceFocusDetectionMethodTypeAVMetadata, // Use SCCaptureMetadataOutputDetector }; SC_EXTERN_C_BEGIN extern SCManagedCaptureDeviceZoomHandlerType SCCameraTweaksDeviceZoomHandlerStrategy(void); extern BOOL SCCameraTweaksBlackCameraRecoveryEnabled(void); extern BOOL SCCameraTweaksMicPermissionEnabled(void); extern BOOL SCCameraTweaksEnableCaptureKeepRecordedVideo(void); extern BOOL SCCameraTweaksEnableHandsFreeXToCancel(void); extern SCCameraHandsFreeModeType SCCameraTweaksHandsFreeMode(void); BOOL SCCameraTweaksEnableShortPreviewTransitionAnimationDuration(void); extern BOOL SCCameraTweaksEnablePreviewPresenterFastPreview(void); extern BOOL SCCameraTweaksEnableCaptureSharePerformer(void); extern BOOL SCCameraTweaksEnableFaceDetectionFocus(SCManagedCaptureDevicePosition captureDevicePosition); extern BOOL SCCameraTweaksTurnOnFaceDetectionFocusByDefault(SCManagedCaptureDevicePosition captureDevicePosition); extern SCCameraFaceFocusDetectionMethodType SCCameraFaceFocusDetectionMethod(void); extern CGFloat SCCameraFaceFocusMinFaceSize(void); extern BOOL SCCameraTweaksSessionLightWeightFixEnabled(void); SC_EXTERN_C_END static inline BOOL SCCameraTweaksEnableVideoStabilization(void) { return FBTweakValue(@"Camera", @"Core Camera", @"Enable video stabilization", NO); } static inline BOOL SCCameraTweaksEnableForceTouchToToggleCamera(void) { return FBTweakValue(@"Camera", @"Recording", @"Force Touch to Toggle", NO); } static inline BOOL SCCameraTweaksEnableStayOnCameraAfterPostingStory(void) { return FBTweakValue(@"Camera", @"Story", @"Stay on camera after posting", NO); } static inline BOOL SCCameraTweaksEnableKeepLastFrameOnCamera(void) { return FBTweakValue(@"Camera", @"Core Camera", @"Keep last frame on camera", YES); } static inline BOOL SCCameraTweaksSmoothAutoFocusWhileRecording(void) { return FBTweakValue(@"Camera", @"Core Camera", @"Smooth autofocus while recording", YES); } static inline NSInteger SCCameraExposureAdjustmentMode(void) { return [FBTweakValue( @"Camera", @"Core Camera", @"Adjust Exposure", (id) @0, (@{ @0 : @"NO", @1 : @"Dynamic enhancement", @2 : @"Night vision", @3 : @"Inverted night vision" })) integerValue]; } static inline BOOL SCCameraTweaksRotateToggleCameraButton(void) { return SCTweakValueWithHalt(@"Camera", @"Core Camera", @"Rotate Toggle-Camera Button", NO); } static inline CGFloat SCCameraTweaksRotateToggleCameraButtonTime(void) { return FBTweakValue(@"Camera", @"Core Camera", @"Toggle-Camera Button Rotation Time", 0.3); } static inline BOOL SCCameraTweaksDefaultPortrait(void) { return FBTweakValue(@"Camera", @"Core Camera", @"Default to Portrait Orientation", YES); } // For test purpose static inline BOOL SCCameraTweaksTranscodingAlwaysFails(void) { return FBTweakValue(@"Camera", @"Core Camera", @"Transcoding always fails", NO); } // This tweak disables the video masking behavior of the snap overlays; // Intended to be used by curators who are on-site snapping special events. // Ping news-dev@snapchat.com for any questions/comments static inline BOOL SCCameraTweaksDisableOverlayVideoMask(void) { return FBTweakValue(@"Camera", @"Creative Tools", @"Disable Overlay Video Masking", NO); } static inline NSInteger SCCameraTweaksDelayTurnOnFilters(void) { return [FBTweakValue(@"Camera", @"Core Camera", @"Delay turn on filter", (id) @0, (@{ @0 : @"Respect A/B testing", @1 : @"Override to YES", @2 : @"Override to NO" })) integerValue]; } static inline BOOL SCCameraTweaksEnableExposurePointObservation(void) { return FBTweakValue(@"Camera", @"Core Camera - Face Focus", @"Observe Exposure Point", NO); } static inline BOOL SCCameraTweaksEnableFocusPointObservation(void) { return FBTweakValue(@"Camera", @"Core Camera - Face Focus", @"Observe Focus Point", NO); } static inline CGFloat SCCameraTweaksSmoothZoomThresholdTime() { return FBTweakValue(@"Camera", @"Zoom Strategy - Linear Interpolation", @"Threshold time", 0.3); } static inline CGFloat SCCameraTweaksSmoothZoomThresholdFactor() { return FBTweakValue(@"Camera", @"Zoom Strategy - Linear Interpolation", @"Threshold factor diff", 0.25); } static inline CGFloat SCCameraTweaksSmoothZoomIntermediateFramesPerSecond() { return FBTweakValue(@"Camera", @"Zoom Strategy - Linear Interpolation", @"Intermediate fps", 60); } static inline CGFloat SCCameraTweaksSmoothZoomDelayTolerantTime() { return FBTweakValue(@"Camera", @"Zoom Strategy - Linear Interpolation", @"Delay tolerant time", 0.15); } static inline CGFloat SCCameraTweaksSmoothZoomMinStepLength() { return FBTweakValue(@"Camera", @"Zoom Strategy - Linear Interpolation", @"Min step length", 0.05); } static inline CGFloat SCCameraTweaksExposureDeadline() { return FBTweakValue(@"Camera", @"Adjust Exposure", @"Exposure Deadline", 0.2); } static inline BOOL SCCameraTweaksKillFrontCamera(void) { return SCTweakValueWithHalt(@"Camera", @"Debugging", @"Kill Front Camera", NO); } static inline BOOL SCCameraTweaksKillBackCamera(void) { return SCTweakValueWithHalt(@"Camera", @"Debugging", @"Kill Back Camera", NO); } #if TARGET_IPHONE_SIMULATOR static inline BOOL SCCameraTweaksUseRealMockImage(void) { return FBTweakValue(@"Camera", @"Debugging", @"Use real mock image on simulator", YES); } #endif static inline CGFloat SCCameraTweaksShortPreviewTransitionAnimationDuration() { return FBTweakValue(@"Camera", @"Preview Transition", @"Short Animation Duration", 0.35); } static inline SCCameraTweaksStrategyType SCCameraTweaksPreviewTransitionAnimationDurationStrategy() { NSNumber *strategy = SCTweakValueWithHalt(@"Camera", @"Preview Transition", @"Enable Short Animation Duration", (id) @(SCCameraTweaksStrategyFollowABTest), (@{ @(SCCameraTweaksStrategyFollowABTest) : @"Respect A/B testing", @(SCCameraTweaksStrategyOverrideToYes) : @"Override to YES", @(SCCameraTweaksStrategyOverrideToNo) : @"Override to NO" })); return (SCCameraTweaksStrategyType)[strategy unsignedIntegerValue]; } static inline CGFloat SCCameraTweaksEnablePortraitModeButton(void) { return FBTweakValue(@"Camera", @"Core Camera - Portrait Mode", @"Enable Button", NO); } static inline CGFloat SCCameraTweaksDepthBlurForegroundThreshold(void) { return FBTweakValue(@"Camera", @"Core Camera - Portrait Mode", @"Foreground Blur Threshold", 0.3); } static inline CGFloat SCCameraTweaksDepthBlurBackgroundThreshold(void) { return FBTweakValue(@"Camera", @"Core Camera - Portrait Mode", @"Background Blur Threshold", 0.1); } static inline CGFloat SCCameraTweaksBlurSigma(void) { return FBTweakValue(@"Camera", @"Core Camera - Portrait Mode", @"Blur Sigma", 4.0); } static inline BOOL SCCameraTweaksEnableFilterInputFocusRect(void) { return FBTweakValue(@"Camera", @"Core Camera - Portrait Mode", @"Filter Input Focus Rect", NO); } static inline BOOL SCCameraTweaksEnablePortraitModeTapToFocus(void) { return FBTweakValue(@"Camera", @"Core Camera - Portrait Mode", @"Tap to Focus", NO); } static inline BOOL SCCameraTweaksEnablePortraitModeAutofocus(void) { return FBTweakValue(@"Camera", @"Core Camera - Portrait Mode", @"Autofocus", NO); } static inline BOOL SCCameraTweaksDepthToGrayscaleOverride(void) { return FBTweakValue(@"Camera", @"Core Camera - Portrait Mode", @"Depth to Grayscale Override", NO); } static inline SCCameraTweaksStrategyType SCCameraTweaksEnableHandsFreeXToCancelStrategy(void) { NSNumber *strategy = SCTweakValueWithHalt(@"Camera", @"Hands-Free Recording", @"X to Cancel", (id) @(SCCameraTweaksStrategyFollowABTest), (@{ @(SCCameraTweaksStrategyFollowABTest) : @"Respect A/B testing", @(SCCameraTweaksStrategyOverrideToYes) : @"Override to YES", @(SCCameraTweaksStrategyOverrideToNo) : @"Override to NO" })); return (SCCameraTweaksStrategyType)[strategy unsignedIntegerValue]; } static inline SCCameraHandsFreeModeType SCCameraTweaksHandsFreeModeType() { NSNumber *strategy = SCTweakValueWithHalt( @"Camera", @"Hands-Free Recording", @"Enabled", (id) @(SCCameraHandsFreeModeTypeABTest), (@{ @(SCCameraHandsFreeModeTypeABTest) : @"Respect A/B testing", @(SCCameraHandsFreeModeTypeDisabled) : @"Disable", @(SCCameraHandsFreeModeTypeMainOnly) : @"Main Camera only", @(SCCameraHandsFreeModeTypeChatMoveCaptureButton) : @"Main Camera + move Chat capture button", @(SCCameraHandsFreeModeTypeMainAndChat) : @"Main + Chat Cameras", @(SCCameraHandsFreeModeTypeLeftOfCapture) : @"Left of Main + Chat Cameras" })); return (SCCameraHandsFreeModeType)[strategy unsignedIntegerValue]; } static inline SCCameraTweaksStrategyType SCCameraTweaksPreviewPresenterFastPreviewStrategy(void) { NSNumber *strategy = SCTweakValueWithHalt(@"Camera", @"Preview Presenter", @"Fast Preview", (id) @(SCCameraTweaksStrategyFollowABTest), (@{ @(SCCameraTweaksStrategyFollowABTest) : @"Respect A/B testing", @(SCCameraTweaksStrategyOverrideToYes) : @"Override to YES", @(SCCameraTweaksStrategyOverrideToNo) : @"Override to NO" })); return (SCCameraTweaksStrategyType)[strategy unsignedIntegerValue]; } static inline NSInteger SCCameraTweaksEnableCaptureKeepRecordedVideoStrategy(void) { NSNumber *strategy = SCTweakValueWithHalt(@"Camera", @"Core Camera - Capture Keep Recorded Video", @"Enable Capture Keep Recorded Video", (id) @(SCCameraTweaksStrategyFollowABTest), (@{ @(SCCameraTweaksStrategyFollowABTest) : @"Respect A/B testing", @(SCCameraTweaksStrategyOverrideToYes) : @"Override to YES", @(SCCameraTweaksStrategyOverrideToNo) : @"Override to NO" })); return (SCCameraTweaksStrategyType)[strategy unsignedIntegerValue]; } static inline NSInteger SCCameraTweaksEnableCaptureSharePerformerStrategy(void) { NSNumber *strategy = SCTweakValueWithHalt(@"Camera", @"Core Camera - Capture Share Performer", @"Enable Capture Share Performer", (id) @(SCCameraTweaksStrategyFollowABTest), (@{ @(SCCameraTweaksStrategyFollowABTest) : @"Respect A/B testing", @(SCCameraTweaksStrategyOverrideToYes) : @"Override to YES", @(SCCameraTweaksStrategyOverrideToNo) : @"Override to NO" })); return (SCCameraTweaksStrategyType)[strategy unsignedIntegerValue]; } static inline SCCameraFaceFocusModeStrategyType SCCameraTweaksFaceFocusStrategy() { NSNumber *strategy = SCTweakValueWithHalt(@"Camera", @"Core Camera - Face Focus", @"Enable Face Focus", (id) @(SCCameraFaceFocusModeStrategyTypeABTest), (@{ @(SCCameraFaceFocusModeStrategyTypeABTest) : @"Respect A/B testing", @(SCCameraFaceFocusModeStrategyTypeDisabled) : @"Disabled", @(SCCameraFaceFocusModeStrategyTypeOffByDefault) : @"Enabled, off by default", @(SCCameraFaceFocusModeStrategyTypeOnByDefault) : @"Enabled, on by default", })); return (SCCameraFaceFocusModeStrategyType)[strategy unsignedIntegerValue]; } static inline SCCameraFaceFocusDetectionMethodType SCCameraTweaksFaceFocusDetectionMethodType() { NSNumber *strategy = SCTweakValueWithHalt(@"Camera", @"Core Camera - Face Focus", @"Detection Method", (id) @(SCCameraFaceFocusDetectionMethodTypeABTest), (@{ @(SCCameraFaceFocusDetectionMethodTypeABTest) : @"Respect A/B testing", @(SCCameraFaceFocusDetectionMethodTypeCIDetector) : @"CIDetector", @(SCCameraFaceFocusDetectionMethodTypeAVMetadata) : @"AVMetadata", })); return (SCCameraFaceFocusDetectionMethodType)[strategy unsignedIntegerValue]; } static inline int SCCameraTweaksFaceFocusDetectionFrequency() { return FBTweakValue(@"Camera", @"Core Camera - Face Focus", @"Detection Frequency", 3, 1, 30); } static inline BOOL SCCameraTweaksFaceFocusMinFaceSizeRespectABTesting() { return SCTweakValueWithHalt(@"Camera", @"Core Camera - Face Focus", @"Min Face Size Respect AB", YES); } static inline CGFloat SCCameraTweaksFaceFocusMinFaceSizeValue() { return FBTweakValue(@"Camera", @"Core Camera - Face Focus", @"Min Face Size", 0.25, 0.01, 0.5); } static inline BOOL SCCameraTweaksEnableDualCamera(void) { return SCTweakValueWithHalt(@"Camera", @"Core Camera - Dual Camera", @"Enable Dual Camera", NO); } ================================================ FILE: Tweaks/SCCameraTweaks.m ================================================ // // SCCameraTweaks.m // Snapchat // // Created by Liu Liu on 10/4/16. // Copyright © 2016 Snapchat, Inc. All rights reserved. // #import "SCCameraTweaks.h" #import "SCManagedCapturePreviewLayerController.h" #import #import #import SCManagedCaptureDeviceZoomHandlerType SCCameraTweaksDeviceZoomHandlerStrategy(void) { NSNumber *strategyNumber = SCTweakValueWithHalt( @"Camera", @"Core Camera", @"Zoom Strategy", @(SCIsMasterBuild() ? SCManagedCaptureDeviceLinearInterpolation : SCManagedCaptureDeviceDefaultZoom), (@{ @(SCManagedCaptureDeviceDefaultZoom) : @"Default", @(SCManagedCaptureDeviceSavitzkyGolayFilter) : @"Savitzky-Golay Filter", @(SCManagedCaptureDeviceLinearInterpolation) : @"Linear Interpolation" })); return (SCManagedCaptureDeviceZoomHandlerType)[strategyNumber integerValue]; } BOOL SCCameraTweaksEnableFaceDetectionFocus(SCManagedCaptureDevicePosition captureDevicePosition) { SC_GUARD_ELSE_RETURN_VALUE([SCDeviceName isIphone], NO); SC_GUARD_ELSE_RETURN_VALUE(captureDevicePosition != SCManagedCaptureDevicePositionBackDualCamera, NO); BOOL isFrontCamera = (captureDevicePosition == SCManagedCaptureDevicePositionFront); BOOL isEnabled = NO; SCCameraFaceFocusModeStrategyType option = SCCameraTweaksFaceFocusStrategy(); switch (option) { case SCCameraFaceFocusModeStrategyTypeABTest: if (isFrontCamera) { isEnabled = SCExperimentWithFaceDetectionFocusFrontCameraEnabled(); } else { isEnabled = SCExperimentWithFaceDetectionFocusBackCameraEnabled(); } break; case SCCameraFaceFocusModeStrategyTypeDisabled: isEnabled = NO; break; case SCCameraFaceFocusModeStrategyTypeOffByDefault: case SCCameraFaceFocusModeStrategyTypeOnByDefault: isEnabled = YES; break; } return isEnabled; } BOOL SCCameraTweaksTurnOnFaceDetectionFocusByDefault(SCManagedCaptureDevicePosition captureDevicePosition) { SC_GUARD_ELSE_RETURN_VALUE([SCDeviceName isIphone], NO); SC_GUARD_ELSE_RETURN_VALUE(captureDevicePosition != SCManagedCaptureDevicePositionBackDualCamera, NO); BOOL isFrontCamera = (captureDevicePosition == SCManagedCaptureDevicePositionFront); BOOL isOnByDefault = NO; SCCameraFaceFocusModeStrategyType option = SCCameraTweaksFaceFocusStrategy(); switch (option) { case SCCameraFaceFocusModeStrategyTypeABTest: if (isFrontCamera) { isOnByDefault = SCExperimentWithFaceDetectionFocusFrontCameraOnByDefault(); } else { isOnByDefault = SCExperimentWithFaceDetectionFocusBackCameraOnByDefault(); } break; case SCCameraFaceFocusModeStrategyTypeDisabled: case SCCameraFaceFocusModeStrategyTypeOffByDefault: isOnByDefault = NO; break; case SCCameraFaceFocusModeStrategyTypeOnByDefault: isOnByDefault = YES; break; } return isOnByDefault; } SCCameraFaceFocusDetectionMethodType SCCameraFaceFocusDetectionMethod() { static SCCameraFaceFocusDetectionMethodType detectionMethodType; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ SCCameraFaceFocusDetectionMethodType option = SCCameraTweaksFaceFocusDetectionMethodType(); switch (option) { case SCCameraFaceFocusDetectionMethodTypeABTest: { // Check the validity of AB value. NSUInteger experimentValue = SCExperimentWithFaceDetectionFocusDetectionMethod(); if (experimentValue >= SCCameraFaceFocusDetectionMethodTypeCIDetector && experimentValue <= SCCameraFaceFocusDetectionMethodTypeAVMetadata) { detectionMethodType = experimentValue; } else { // Use CIDetector by default. detectionMethodType = SCCameraFaceFocusDetectionMethodTypeCIDetector; } } break; case SCCameraFaceFocusDetectionMethodTypeAVMetadata: detectionMethodType = SCCameraFaceFocusDetectionMethodTypeAVMetadata; break; case SCCameraFaceFocusDetectionMethodTypeCIDetector: detectionMethodType = SCCameraFaceFocusDetectionMethodTypeCIDetector; break; } }); return detectionMethodType; } CGFloat SCCameraFaceFocusMinFaceSize(void) { static CGFloat minFaceSize; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ if (SCCameraTweaksFaceFocusMinFaceSizeRespectABTesting()) { minFaceSize = (CGFloat)SCExperimentWithFaceDetectionMinFaceSize(); } else { minFaceSize = SCCameraTweaksFaceFocusMinFaceSizeValue(); } if (minFaceSize < 0.01 || minFaceSize > 0.5) { minFaceSize = 0.25; // Default value is 0.25 } }); return minFaceSize; } BOOL SCCameraTweaksEnableCaptureKeepRecordedVideo(void) { static BOOL enableCaptureKeepRecordedVideo; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ switch (SCCameraTweaksEnableCaptureKeepRecordedVideoStrategy()) { case SCCameraTweaksStrategyOverrideToYes: { enableCaptureKeepRecordedVideo = YES; break; } case SCCameraTweaksStrategyOverrideToNo: { enableCaptureKeepRecordedVideo = NO; break; } case SCCameraTweaksStrategyFollowABTest: { enableCaptureKeepRecordedVideo = SCExperimentWithCaptureKeepRecordedVideo(); break; } default: { enableCaptureKeepRecordedVideo = NO; break; } } }); return enableCaptureKeepRecordedVideo; } static inline SCCameraTweaksStrategyType SCCameraTweaksBlackCameraRecoveryStrategy(void) { NSNumber *strategy = SCTweakValueWithHalt(@"Camera", @"Core Camera", @"Black Camera Recovery", (id) @(SCCameraTweaksStrategyFollowABTest), (@{ @(SCCameraTweaksStrategyFollowABTest) : @"Respect A/B testing", @(SCCameraTweaksStrategyOverrideToYes) : @"Override to YES", @(SCCameraTweaksStrategyOverrideToNo) : @"Override to NO" })); return (SCCameraTweaksStrategyType)[strategy unsignedIntegerValue]; } BOOL SCCameraTweaksBlackCameraRecoveryEnabled(void) { static BOOL enabled; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ switch (SCCameraTweaksBlackCameraRecoveryStrategy()) { case SCCameraTweaksStrategyOverrideToYes: enabled = YES; break; case SCCameraTweaksStrategyOverrideToNo: enabled = NO; break; case SCCameraTweaksStrategyFollowABTest: enabled = SCExperimentWithBlackCameraRecovery(); break; default: enabled = NO; break; } }); return enabled; } static inline SCCameraTweaksStrategyType SCCameraTweaksMicrophoneNotificationStrategy(void) { NSNumber *strategy = SCTweakValueWithHalt(@"Camera", @"Core Camera", @"Mic Notification", (id) @(SCCameraTweaksStrategyFollowABTest), (@{ @(SCCameraTweaksStrategyFollowABTest) : @"Respect A/B testing", @(SCCameraTweaksStrategyOverrideToYes) : @"Override to YES", @(SCCameraTweaksStrategyOverrideToNo) : @"Override to NO" })); return (SCCameraTweaksStrategyType)[strategy unsignedIntegerValue]; } BOOL SCCameraTweaksMicPermissionEnabled(void) { static BOOL enabled; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ switch (SCCameraTweaksMicrophoneNotificationStrategy()) { case SCCameraTweaksStrategyOverrideToYes: enabled = YES; break; case SCCameraTweaksStrategyOverrideToNo: enabled = NO; break; case SCCameraTweaksStrategyFollowABTest: enabled = SCExperimentWithMicrophonePermissionNotificationEnabled(); break; default: enabled = NO; break; } }); return enabled; } SCCameraHandsFreeModeType SCCameraTweaksHandsFreeMode(void) { static SCCameraHandsFreeModeType handsFreeModeType; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ SCCameraHandsFreeModeType option = SCCameraTweaksHandsFreeModeType(); switch (option) { case SCCameraHandsFreeModeTypeDisabled: handsFreeModeType = SCCameraHandsFreeModeTypeDisabled; break; case SCCameraHandsFreeModeTypeMainOnly: handsFreeModeType = SCCameraHandsFreeModeTypeMainOnly; break; case SCCameraHandsFreeModeTypeChatMoveCaptureButton: handsFreeModeType = SCCameraHandsFreeModeTypeChatMoveCaptureButton; break; case SCCameraHandsFreeModeTypeMainAndChat: handsFreeModeType = SCCameraHandsFreeModeTypeMainAndChat; break; case SCCameraHandsFreeModeTypeLeftOfCapture: handsFreeModeType = SCCameraHandsFreeModeTypeLeftOfCapture; break; case SCCameraHandsFreeModeTypeABTest: default: handsFreeModeType = SCExperimentWithHandsFreeMode(); break; } }); return handsFreeModeType; } BOOL SCCameraTweaksEnableHandsFreeXToCancel(void) { static BOOL enableHandsFreeXToCancel; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ switch (SCCameraTweaksEnableHandsFreeXToCancelStrategy()) { case SCCameraTweaksStrategyOverrideToYes: { enableHandsFreeXToCancel = YES; break; } case SCCameraTweaksStrategyOverrideToNo: { enableHandsFreeXToCancel = NO; break; } case SCCameraTweaksStrategyFollowABTest: { enableHandsFreeXToCancel = SCExperimentWithHandsFreeXToCancel(); break; } default: { enableHandsFreeXToCancel = NO; break; } } }); return enableHandsFreeXToCancel; } BOOL SCCameraTweaksEnableShortPreviewTransitionAnimationDuration(void) { static BOOL enableShortPreviewTransitionAnimationDuration; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ switch (SCCameraTweaksPreviewTransitionAnimationDurationStrategy()) { case SCCameraTweaksStrategyOverrideToYes: { enableShortPreviewTransitionAnimationDuration = YES; break; } case SCCameraTweaksStrategyOverrideToNo: { enableShortPreviewTransitionAnimationDuration = NO; break; } case SCCameraTweaksStrategyFollowABTest: { enableShortPreviewTransitionAnimationDuration = SCExperimentWithShortPreviewTransitionAnimationDuration(); break; } default: { enableShortPreviewTransitionAnimationDuration = YES; break; } } }); return enableShortPreviewTransitionAnimationDuration; } BOOL SCCameraTweaksEnablePreviewPresenterFastPreview(void) { static BOOL enablePreviewPresenterFastPreview; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ switch (SCCameraTweaksPreviewPresenterFastPreviewStrategy()) { case SCCameraTweaksStrategyOverrideToYes: { enablePreviewPresenterFastPreview = YES; break; } case SCCameraTweaksStrategyOverrideToNo: { enablePreviewPresenterFastPreview = NO; break; } case SCCameraTweaksStrategyFollowABTest: { enablePreviewPresenterFastPreview = SCExperimentWithPreviewPresenterFastPreview(); break; } default: { enablePreviewPresenterFastPreview = NO; break; } } }); return enablePreviewPresenterFastPreview; } BOOL SCCameraTweaksEnableCaptureSharePerformer(void) { static BOOL enableCaptureSharePerformer; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ switch (SCCameraTweaksEnableCaptureSharePerformerStrategy()) { case SCCameraTweaksStrategyOverrideToYes: { enableCaptureSharePerformer = YES; break; } case SCCameraTweaksStrategyOverrideToNo: { enableCaptureSharePerformer = NO; break; } case SCCameraTweaksStrategyFollowABTest: { enableCaptureSharePerformer = SCExperimentWithCaptureSharePerformer(); break; } default: { enableCaptureSharePerformer = NO; break; } } }); return enableCaptureSharePerformer; } static inline SCCameraTweaksStrategyType SCCameraTweaksSessionLightWeightFixStrategy(void) { NSNumber *strategy = SCTweakValueWithHalt(@"Camera", @"Core Camera", @"Light-weight Session Fix", (id) @(SCCameraTweaksStrategyFollowABTest), (@{ @(SCCameraTweaksStrategyFollowABTest) : @"Respect A/B testing", @(SCCameraTweaksStrategyOverrideToYes) : @"Override to YES", @(SCCameraTweaksStrategyOverrideToNo) : @"Override to NO" })); return (SCCameraTweaksStrategyType)[strategy unsignedIntegerValue]; } BOOL SCCameraTweaksSessionLightWeightFixEnabled(void) { static BOOL enabled; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ switch (SCCameraTweaksSessionLightWeightFixStrategy()) { case SCCameraTweaksStrategyOverrideToYes: enabled = YES; break; case SCCameraTweaksStrategyOverrideToNo: enabled = NO; break; case SCCameraTweaksStrategyFollowABTest: enabled = SCExperimentWithSessionLightWeightFix(); break; default: enabled = NO; break; } }); return enabled; } ================================================ FILE: UI/AVCameraViewEnums.h ================================================ // // AVCameraViewEnums.h // SCCamera // // Copyright © 2016 Snapchat, Inc. All rights reserved. // #import /** The context specifies the way in which the camera is presented to the user. The controller can be configured a variety of ways depending on the context. */ typedef NS_ENUM(NSUInteger, AVCameraViewControllerContext) { AVCameraViewControllerContextMainVC = 1, AVCameraViewControllerContextReply, AVCameraViewControllerContextDefault = AVCameraViewControllerContextReply, AVCameraViewControllerContextSnapAds, AVCameraViewControllerContextAddToStory, }; typedef NS_ENUM(NSInteger, AVCameraViewType) { AVCameraViewNoReply = 0, AVCameraViewReplyLeft, AVCameraViewReplyRight, AVCameraViewChat, AVCameraViewReplyHydra, AVCameraViewSnapAds, AVCameraViewGalleryMadeWithLenses, AVCameraViewSnapConnectSnapKit, AVCameraViewSnappable }; typedef NS_ENUM(NSUInteger, AVCameraViewControllerRecordingState) { AVCameraViewControllerRecordingStateDefault, // No capture activity AVCameraViewControllerRecordingStatePrepareRecording, // Preparing for recording with delay AVCameraViewControllerRecordingStateInitiatedRecording, // Actively recording AVCameraViewControllerRecordingStateTakingPicture, // Taking a still image AVCameraViewControllerRecordingStatePictureTaken, // Picture is taken AVCameraViewControllerRecordingStatePreview, // Preparing to present preview }; typedef NS_ENUM(NSUInteger, SCCameraRecordingMethod) { SCCameraRecordingMethodCameraButton, SCCameraRecordingMethodVolumeButton, SCCameraRecordingMethodLensInitiated }; ================================================ FILE: UI/Lenses/LensButtonZ/SCFeatureLensButtonZ.h ================================================ // // SCFeatureLensButtonZ.h // SCCamera // // Created by Anton Udovychenko on 4/24/18. // #import "AVCameraViewEnums.h" #import "SCFeature.h" #import @protocol SCFeatureLensButtonZ; @class SCGrowingButton, SCLens; NS_ASSUME_NONNULL_BEGIN @protocol SCFeatureLensButtonZDelegate - (void)featureLensZButton:(id)featureLensZButton didPressLensButton:(SCGrowingButton *)lensButton; - (nullable NSArray *)allLenses; @end @protocol SCFeatureLensButtonZ @property (nonatomic, weak) id delegate; - (void)setLensButtonActive:(BOOL)active; @end NS_ASSUME_NONNULL_END ================================================ FILE: UI/Lenses/LensSideButton/SCFeatureLensSideButton.h ================================================ // // SCFeatureLensSideButton.h // SCCamera // // Created by Anton Udovychenko on 4/12/18. // #import "AVCameraViewEnums.h" #import "SCFeature.h" #import @protocol SCFeatureLensSideButton; @class SCGrowingButton, SCLens; NS_ASSUME_NONNULL_BEGIN @protocol SCFeatureLensSideButtonDelegate - (void)featureLensSideButton:(id)featureLensSideButton didPressLensButton:(SCGrowingButton *)lensButton; - (nullable SCLens *)firstApplicableLens; @end @protocol SCFeatureLensSideButton @property (nonatomic, weak) id delegate; - (void)updateLensButtonVisibility:(CGFloat)visibilityPercentage; - (void)showLensButtonIfNeeded; @end NS_ASSUME_NONNULL_END ================================================ FILE: UI/SCLongPressGestureRecognizer.h ================================================ // // SCLongPressGestureRecognizer.h // SCCamera // // Created by Pavlo Antonenko on 4/28/15. // Copyright (c) 2015 Snapchat, Inc. All rights reserved. // #import // gesture recognizer cancels, if user moved finger more then defined value, even if long press started, unlike // UILongPressGestureRecognizer. But if user haven't moved finger for defined time, unlimited movement is allowed. @interface SCLongPressGestureRecognizer : UILongPressGestureRecognizer @property (nonatomic, assign) CGFloat allowableMovementAfterBegan; @property (nonatomic, assign) CGFloat timeBeforeUnlimitedMovementAllowed; @property (nonatomic, assign, readonly) CGFloat forceOfAllTouches; @property (nonatomic, assign, readonly) CGFloat maximumPossibleForceOfAllTouches; @property (nonatomic, strong) NSDictionary *userInfo; @property (nonatomic, assign) BOOL failedByMovement; - (BOOL)isUnlimitedMovementAllowed; @end ================================================ FILE: UI/SCLongPressGestureRecognizer.m ================================================ // // SCLongPressGestureRecognizer.m // SCCamera // // Created by Pavlo Antonenko on 4/28/15. // Copyright (c) 2015 Snapchat, Inc. All rights reserved. // #import "SCLongPressGestureRecognizer.h" #import #import @implementation SCLongPressGestureRecognizer { CGPoint _initialPoint; CGFloat _initialTime; } - (instancetype)initWithTarget:(id)target action:(SEL)action { self = [super initWithTarget:target action:action]; if (self) { _allowableMovementAfterBegan = FLT_MAX; _timeBeforeUnlimitedMovementAllowed = 0.0; } return self; } - (void)reset { [super reset]; _initialPoint = CGPointZero; _initialTime = 0; _forceOfAllTouches = 1.0; _maximumPossibleForceOfAllTouches = 1.0; self.failedByMovement = NO; } - (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event { [super touchesBegan:touches withEvent:event]; _initialPoint = [self locationInView:self.view]; _initialTime = CACurrentMediaTime(); _forceOfAllTouches = 1.0; for (UITouch *touch in touches) { _maximumPossibleForceOfAllTouches = MAX(touch.maximumPossibleForce, _maximumPossibleForceOfAllTouches); _forceOfAllTouches = MAX(touch.force, _forceOfAllTouches); } } - (void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event { [super touchesMoved:touches withEvent:event]; _forceOfAllTouches = 1.0; for (UITouch *touch in touches) { _maximumPossibleForceOfAllTouches = MAX(touch.maximumPossibleForce, _maximumPossibleForceOfAllTouches); _forceOfAllTouches = MAX(touch.force, _forceOfAllTouches); } if (!CGPointEqualToPoint(_initialPoint, CGPointZero)) { CGPoint currentPoint = [self locationInView:self.view]; CGFloat distance = hypot(_initialPoint.x - currentPoint.x, _initialPoint.y - currentPoint.y); CGFloat timeDifference = CACurrentMediaTime() - _initialTime; if (distance > self.allowableMovementAfterBegan && timeDifference < self.timeBeforeUnlimitedMovementAllowed) { SCLogGeneralInfo(@"Long press moved %.2f > %.2f after %.3f < %.3f seconds, and is cancelled", distance, self.allowableMovementAfterBegan, timeDifference, self.timeBeforeUnlimitedMovementAllowed); self.state = UIGestureRecognizerStateFailed; self.failedByMovement = YES; } } } - (void)setEnabled:(BOOL)enabled { SCLogGeneralInfo(@"Setting enabled: %d for %@", enabled, self); [super setEnabled:enabled]; } - (BOOL)isUnlimitedMovementAllowed { return CACurrentMediaTime() - _initialTime > self.timeBeforeUnlimitedMovementAllowed; } @end ================================================ FILE: VolumeButton/SCCameraVolumeButtonHandler.h ================================================ // // SCCameraVolumeButtonHandler.h // Snapchat // // Created by Xiaomu Wu on 2/27/15. // Copyright (c) 2015 Snapchat, Inc. All rights reserved. // #import @class SCCameraVolumeButtonHandler; @protocol SCCameraVolumeButtonHandlerDelegate - (void)volumeButtonHandlerDidBeginPressingVolumeButton:(SCCameraVolumeButtonHandler *)handler; - (void)volumeButtonHandlerDidEndPressingVolumeButton:(SCCameraVolumeButtonHandler *)handler; @end @interface SCCameraVolumeButtonHandler : NSObject @property (nonatomic, weak) id delegate; - (void)startHandlingVolumeButtonEvents; - (void)stopHandlingVolumeButtonEvents; - (void)stopHandlingVolumeButtonEventsWhenPressingEnds; - (BOOL)isHandlingVolumeButtonEvents; - (BOOL)isPressingVolumeButton; @end ================================================ FILE: VolumeButton/SCCameraVolumeButtonHandler.m ================================================ // // SCCameraVolumeButtonHandler.m // Snapchat // // Created by Xiaomu Wu on 2/27/15. // Copyright (c) 2015 Snapchat, Inc. All rights reserved. // #import "SCCameraVolumeButtonHandler.h" #import #import @implementation SCCameraVolumeButtonHandler { NSString *_secretFeatureToken; BOOL _pressingButton1; // volume down button BOOL _pressingButton2; // volume up button BOOL _stopsHandlingWhenPressingEnds; } #pragma mark - NSObject - (instancetype)init { self = [super init]; if (self) { NSNotificationCenter *notificationCenter = [NSNotificationCenter defaultCenter]; UIApplication *application = [UIApplication sharedApplication]; [notificationCenter addObserver:self selector:@selector(_handleButton1Down:) name:[application sc_eventNotificationName1] object:nil]; [notificationCenter addObserver:self selector:@selector(_handleButton1Up:) name:[application sc_eventNotificationName2] object:nil]; [notificationCenter addObserver:self selector:@selector(_handleButton2Down:) name:[application sc_eventNotificationName3] object:nil]; [notificationCenter addObserver:self selector:@selector(_handleButton2Up:) name:[application sc_eventNotificationName4] object:nil]; } return self; } - (void)dealloc { if (_secretFeatureToken) { [[UIApplication sharedApplication] sc_disableSecretFeature2:_secretFeatureToken]; } } #pragma mark - Public - (void)startHandlingVolumeButtonEvents { _stopsHandlingWhenPressingEnds = NO; [self _resetPressingButtons]; if ([self isHandlingVolumeButtonEvents]) { return; } SCLogGeneralInfo(@"[Volume Buttons] Start handling volume button events"); _secretFeatureToken = [[[UIApplication sharedApplication] sc_enableSecretFeature2] copy]; } - (void)stopHandlingVolumeButtonEvents { if (![self isHandlingVolumeButtonEvents]) { return; } SCLogGeneralInfo(@"[Volume Buttons] Stop handling volume button events"); [[UIApplication sharedApplication] sc_disableSecretFeature2:_secretFeatureToken]; _secretFeatureToken = nil; _stopsHandlingWhenPressingEnds = NO; } - (void)stopHandlingVolumeButtonEventsWhenPressingEnds { if (![self isHandlingVolumeButtonEvents]) { return; } if (![self isPressingVolumeButton]) { return; } SCLogGeneralInfo(@"[Volume Buttons] Stop handling volume button events when pressing ends"); _stopsHandlingWhenPressingEnds = YES; } - (BOOL)isHandlingVolumeButtonEvents { return (_secretFeatureToken != nil); } - (BOOL)isPressingVolumeButton { return _pressingButton1 || _pressingButton2; } - (void)_resetPressingButtons { _pressingButton1 = NO; _pressingButton2 = NO; } #pragma mark - Private - (void)_handleButton1Down:(NSNotification *)notification { if (![self isHandlingVolumeButtonEvents]) { SCLogGeneralInfo(@"[Volume Buttons] Volume button 1 down, not handled"); return; } if (_pressingButton1) { SCLogGeneralInfo(@"[Volume Buttons] Volume button 1 down, already down"); return; } SCLogGeneralInfo(@"[Volume Buttons] Volume button 1 down"); [self _changePressingButton:^{ _pressingButton1 = YES; }]; } - (void)_handleButton1Up:(NSNotification *)notification { if (![self isHandlingVolumeButtonEvents]) { SCLogGeneralInfo(@"[Volume Buttons] Volume button 1 up, not handled"); return; } if (!_pressingButton1) { SCLogGeneralInfo(@"[Volume Buttons] Volume button 1 up, already up"); return; } SCLogGeneralInfo(@"[Volume Buttons] Volume button 1 up"); [self _changePressingButton:^{ _pressingButton1 = NO; }]; } - (void)_handleButton2Down:(NSNotification *)notification { if (![self isHandlingVolumeButtonEvents]) { SCLogGeneralInfo(@"[Volume Buttons] Volume button 2 down, not handled"); return; } if (_pressingButton2) { SCLogGeneralInfo(@"[Volume Buttons] Volume button 2 down, already down"); return; } SCLogGeneralInfo(@"[Volume Buttons] Volume button 2 down"); [self _changePressingButton:^{ _pressingButton2 = YES; }]; } - (void)_handleButton2Up:(NSNotification *)notification { if (![self isHandlingVolumeButtonEvents]) { SCLogGeneralInfo(@"[Volume Buttons] Volume button 2 up, not handled"); return; } if (!_pressingButton2) { SCLogGeneralInfo(@"[Volume Buttons] Volume button 2 up, already up"); return; } SCLogGeneralInfo(@"[Volume Buttons] Volume button 2 up"); [self _changePressingButton:^{ _pressingButton2 = NO; }]; } - (void)_changePressingButton:(void (^)(void))change { BOOL oldPressingVolumeButton = [self isPressingVolumeButton]; change(); BOOL newPressingVolumeButton = [self isPressingVolumeButton]; if (!oldPressingVolumeButton && newPressingVolumeButton) { [_delegate volumeButtonHandlerDidBeginPressingVolumeButton:self]; } else if (oldPressingVolumeButton && !newPressingVolumeButton) { [_delegate volumeButtonHandlerDidEndPressingVolumeButton:self]; if (_stopsHandlingWhenPressingEnds) { [self stopHandlingVolumeButtonEvents]; } } } @end ================================================ FILE: Worker/SCCaptureWorker.h ================================================ // // SCCaptureWorker.h // Snapchat // // Created by Lin Jia on 10/19/17. // // #import "SCCaptureResource.h" #import #import /* In general, the function of SCCapturer is to use some resources (such as SCManagedCapturerListenerAnnouncer), to do something (such as announce an event). SCCaptureWorker abstract away the "do something" part of SCCapturer. It has very little internal states/resources. SCCaptureWorker is introduced to be shared between CaptureV1 and CaptureV2, to minimize duplication code. */ @interface SCCaptureWorker : NSObject + (SCCaptureResource *)generateCaptureResource; + (void)setupWithCaptureResource:(SCCaptureResource *)captureResource devicePosition:(SCManagedCaptureDevicePosition)devicePosition; + (void)setupCapturePreviewLayerController; + (void)startRunningWithCaptureResource:(SCCaptureResource *)captureResource token:(SCCapturerToken *)token completionHandler:(dispatch_block_t)completionHandler; + (BOOL)stopRunningWithCaptureResource:(SCCaptureResource *)captureResource token:(SCCapturerToken *)token completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler; + (void)setupVideoPreviewLayer:(SCCaptureResource *)resource; + (void)makeVideoPreviewLayer:(SCCaptureResource *)resource; + (void)redoVideoPreviewLayer:(SCCaptureResource *)resource; + (void)startStreaming:(SCCaptureResource *)resource; + (void)setupLivenessConsistencyTimerIfForeground:(SCCaptureResource *)resource; + (void)destroyLivenessConsistencyTimer:(SCCaptureResource *)resource; + (void)softwareZoomWithDevice:(SCManagedCaptureDevice *)device resource:(SCCaptureResource *)resource; + (void)captureStillImageWithCaptureResource:(SCCaptureResource *)captureResource aspectRatio:(CGFloat)aspectRatio captureSessionID:(NSString *)captureSessionID shouldCaptureFromVideo:(BOOL)shouldCaptureFromVideo completionHandler: (sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler context:(NSString *)context; + (void)startRecordingWithCaptureResource:(SCCaptureResource *)captureResource outputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings audioConfiguration:(SCAudioConfiguration *)configuration maxDuration:(NSTimeInterval)maxDuration fileURL:(NSURL *)fileURL captureSessionID:(NSString *)captureSessionID completionHandler:(sc_managed_capturer_start_recording_completion_handler_t)completionHandler; + (void)stopRecordingWithCaptureResource:(SCCaptureResource *)captureResource; + (void)cancelRecordingWithCaptureResource:(SCCaptureResource *)captureResource; + (SCVideoCaptureSessionInfo)activeSession:(SCCaptureResource *)resource; + (BOOL)canRunARSession:(SCCaptureResource *)resource; + (void)turnARSessionOn:(SCCaptureResource *)resource; + (void)turnARSessionOff:(SCCaptureResource *)resource; + (void)clearARKitData:(SCCaptureResource *)resource; + (void)updateLensesFieldOfViewTracking:(SCCaptureResource *)captureResource; + (CMTime)firstWrittenAudioBufferDelay:(SCCaptureResource *)resource; + (BOOL)audioQueueStarted:(SCCaptureResource *)resource; + (BOOL)isLensApplied:(SCCaptureResource *)resource; + (BOOL)isVideoMirrored:(SCCaptureResource *)resource; + (BOOL)shouldCaptureImageFromVideoWithResource:(SCCaptureResource *)resource; + (void)setPortraitModePointOfInterestAsynchronously:(CGPoint)pointOfInterest completionHandler:(dispatch_block_t)completionHandler resource:(SCCaptureResource *)resource; + (void)prepareForRecordingWithAudioConfiguration:(SCAudioConfiguration *)configuration resource:(SCCaptureResource *)resource; + (void)stopScanWithCompletionHandler:(dispatch_block_t)completionHandler resource:(SCCaptureResource *)resource; + (void)startScanWithScanConfiguration:(SCScanConfiguration *)configuration resource:(SCCaptureResource *)resource; @end ================================================ FILE: Worker/SCCaptureWorker.m ================================================ // // SCCaptureWorker.m // Snapchat // // Created by Lin Jia on 10/19/17. // // #import "SCCaptureWorker.h" #import "ARConfiguration+SCConfiguration.h" #import "SCBlackCameraDetector.h" #import "SCBlackCameraNoOutputDetector.h" #import "SCCameraTweaks.h" #import "SCCaptureCoreImageFaceDetector.h" #import "SCCaptureFaceDetector.h" #import "SCCaptureMetadataOutputDetector.h" #import "SCCaptureSessionFixer.h" #import "SCManagedCaptureDevice+SCManagedCapturer.h" #import "SCManagedCaptureDeviceDefaultZoomHandler.h" #import "SCManagedCaptureDeviceHandler.h" #import "SCManagedCaptureDeviceLinearInterpolationZoomHandler.h" #import "SCManagedCaptureDeviceSavitzkyGolayZoomHandler.h" #import "SCManagedCaptureDeviceSubjectAreaHandler.h" #import "SCManagedCapturePreviewLayerController.h" #import "SCManagedCaptureSession.h" #import "SCManagedCapturer.h" #import "SCManagedCapturerARImageCaptureProvider.h" #import "SCManagedCapturerARSessionHandler.h" #import "SCManagedCapturerGLViewManagerAPI.h" #import "SCManagedCapturerLensAPIProvider.h" #import "SCManagedCapturerLogging.h" #import "SCManagedCapturerState.h" #import "SCManagedCapturerStateBuilder.h" #import "SCManagedCapturerV1.h" #import "SCManagedDeviceCapacityAnalyzer.h" #import "SCManagedDeviceCapacityAnalyzerHandler.h" #import "SCManagedDroppedFramesReporter.h" #import "SCManagedFrontFlashController.h" #import "SCManagedStillImageCapturerHandler.h" #import "SCManagedVideoARDataSource.h" #import "SCManagedVideoCapturer.h" #import "SCManagedVideoCapturerHandler.h" #import "SCManagedVideoFileStreamer.h" #import "SCManagedVideoScanner.h" #import "SCManagedVideoStreamReporter.h" #import "SCManagedVideoStreamer.h" #import "SCMetalUtils.h" #import "SCProcessingPipelineBuilder.h" #import "SCVideoCaptureSessionInfo.h" #import #import #import #import #import #import #import #import #import #import @import ARKit; static const char *kSCManagedCapturerQueueLabel = "com.snapchat.managed_capturer"; static NSTimeInterval const kMaxDefaultScanFrameDuration = 1. / 15; // Restrict scanning to max 15 frames per second static NSTimeInterval const kMaxPassiveScanFrameDuration = 1.; // Restrict scanning to max 1 frame per second static float const kScanTargetCPUUtilization = 0.5; // 50% utilization static NSString *const kSCManagedCapturerErrorDomain = @"kSCManagedCapturerErrorDomain"; static NSInteger const kSCManagedCapturerRecordVideoBusy = 3001; static NSInteger const kSCManagedCapturerCaptureStillImageBusy = 3002; static UIImageOrientation SCMirroredImageOrientation(UIImageOrientation orientation) { switch (orientation) { case UIImageOrientationRight: return UIImageOrientationLeftMirrored; case UIImageOrientationLeftMirrored: return UIImageOrientationRight; case UIImageOrientationUp: return UIImageOrientationUpMirrored; case UIImageOrientationUpMirrored: return UIImageOrientationUp; case UIImageOrientationDown: return UIImageOrientationDownMirrored; case UIImageOrientationDownMirrored: return UIImageOrientationDown; case UIImageOrientationLeft: return UIImageOrientationRightMirrored; case UIImageOrientationRightMirrored: return UIImageOrientationLeft; } } @implementation SCCaptureWorker + (SCCaptureResource *)generateCaptureResource { SCCaptureResource *captureResource = [[SCCaptureResource alloc] init]; captureResource.queuePerformer = [[SCQueuePerformer alloc] initWithLabel:kSCManagedCapturerQueueLabel qualityOfService:QOS_CLASS_USER_INTERACTIVE queueType:DISPATCH_QUEUE_SERIAL context:SCQueuePerformerContextCamera]; captureResource.announcer = [[SCManagedCapturerListenerAnnouncer alloc] init]; captureResource.videoCapturerHandler = [[SCManagedVideoCapturerHandler alloc] initWithCaptureResource:captureResource]; captureResource.stillImageCapturerHandler = [[SCManagedStillImageCapturerHandler alloc] initWithCaptureResource:captureResource]; captureResource.deviceCapacityAnalyzerHandler = [[SCManagedDeviceCapacityAnalyzerHandler alloc] initWithCaptureResource:captureResource]; captureResource.deviceZoomHandler = ({ SCManagedCaptureDeviceDefaultZoomHandler *handler = nil; switch (SCCameraTweaksDeviceZoomHandlerStrategy()) { case SCManagedCaptureDeviceDefaultZoom: handler = [[SCManagedCaptureDeviceDefaultZoomHandler alloc] initWithCaptureResource:captureResource]; break; case SCManagedCaptureDeviceSavitzkyGolayFilter: handler = [[SCManagedCaptureDeviceSavitzkyGolayZoomHandler alloc] initWithCaptureResource:captureResource]; break; case SCManagedCaptureDeviceLinearInterpolation: handler = [[SCManagedCaptureDeviceLinearInterpolationZoomHandler alloc] initWithCaptureResource:captureResource]; break; } handler; }); captureResource.captureDeviceHandler = [[SCManagedCaptureDeviceHandler alloc] initWithCaptureResource:captureResource]; captureResource.arSessionHandler = [[SCManagedCapturerARSessionHandler alloc] initWithCaptureResource:captureResource]; captureResource.tokenSet = [NSMutableSet new]; captureResource.allowsZoom = YES; captureResource.debugInfoDict = [[NSMutableDictionary alloc] init]; captureResource.notificationRegistered = NO; return captureResource; } + (void)setupWithCaptureResource:(SCCaptureResource *)captureResource devicePosition:(SCManagedCaptureDevicePosition)devicePosition { SCTraceODPCompatibleStart(2); SCAssert(captureResource.status == SCManagedCapturerStatusUnknown, @"The status should be unknown"); captureResource.device = [SCManagedCaptureDevice deviceWithPosition:devicePosition]; if (!captureResource.device) { // Always prefer front camera over back camera if ([SCManagedCaptureDevice front]) { captureResource.device = [SCManagedCaptureDevice front]; devicePosition = SCManagedCaptureDevicePositionFront; } else { captureResource.device = [SCManagedCaptureDevice back]; devicePosition = SCManagedCaptureDevicePositionBack; } } // Initial state SCLogCapturerInfo(@"Init state with devicePosition:%lu, zoomFactor:%f, flashSupported:%d, " @"torchSupported:%d, flashActive:%d, torchActive:%d", (unsigned long)devicePosition, captureResource.device.zoomFactor, captureResource.device.isFlashSupported, captureResource.device.isTorchSupported, captureResource.device.flashActive, captureResource.device.torchActive); captureResource.state = [[SCManagedCapturerState alloc] initWithIsRunning:NO isNightModeActive:NO isPortraitModeActive:NO lowLightCondition:NO adjustingExposure:NO devicePosition:devicePosition zoomFactor:captureResource.device.zoomFactor flashSupported:captureResource.device.isFlashSupported torchSupported:captureResource.device.isTorchSupported flashActive:captureResource.device.flashActive torchActive:captureResource.device.torchActive lensesActive:NO arSessionActive:NO liveVideoStreaming:NO lensProcessorReady:NO]; [self configLensesProcessorWithCaptureResource:captureResource]; [self configARSessionWithCaptureResource:captureResource]; [self configCaptureDeviceHandlerWithCaptureResource:captureResource]; [self configAVCaptureSessionWithCaptureResource:captureResource]; [self configImageCapturerWithCaptureResource:captureResource]; [self configDeviceCapacityAnalyzerWithCaptureResource:captureResource]; [self configVideoDataSourceWithCaptureResource:captureResource devicePosition:devicePosition]; [self configVideoScannerWithCaptureResource:captureResource]; [self configVideoCapturerWithCaptureResource:captureResource]; if (!SCIsSimulator()) { // We don't want it enabled for simulator [self configBlackCameraDetectorWithCaptureResource:captureResource]; } if (SCCameraTweaksEnableFaceDetectionFocus(captureResource.state.devicePosition)) { [self configureCaptureFaceDetectorWithCaptureResource:captureResource]; } } + (void)setupCapturePreviewLayerController { SCAssert([[SCQueuePerformer mainQueuePerformer] isCurrentPerformer], @""); [[SCManagedCapturePreviewLayerController sharedInstance] setupPreviewLayer]; } + (void)configLensesProcessorWithCaptureResource:(SCCaptureResource *)captureResource { SCManagedCapturerStateBuilder *stateBuilder = [SCManagedCapturerStateBuilder withManagedCapturerState:captureResource.state]; [stateBuilder setLensProcessorReady:YES]; captureResource.state = [stateBuilder build]; captureResource.lensProcessingCore = [captureResource.lensAPIProvider lensAPIForCaptureResource:captureResource]; } + (void)configARSessionWithCaptureResource:(SCCaptureResource *)captureResource { if (@available(iOS 11.0, *)) { captureResource.arSession = [[ARSession alloc] init]; captureResource.arImageCapturer = [captureResource.arImageCaptureProvider arImageCapturerWith:captureResource.queuePerformer lensProcessingCore:captureResource.lensProcessingCore]; } } + (void)configAVCaptureSessionWithCaptureResource:(SCCaptureResource *)captureResource { #if !TARGET_IPHONE_SIMULATOR captureResource.numRetriesFixAVCaptureSessionWithCurrentSession = 0; // lazily initialize _captureResource.kvoController on background thread if (!captureResource.kvoController) { captureResource.kvoController = [[FBKVOController alloc] initWithObserver:[SCManagedCapturerV1 sharedInstance]]; } [captureResource.kvoController unobserve:captureResource.managedSession.avSession]; captureResource.managedSession = [[SCManagedCaptureSession alloc] initWithBlackCameraDetector:captureResource.blackCameraDetector]; [captureResource.kvoController observe:captureResource.managedSession.avSession keyPath:@keypath(captureResource.managedSession.avSession, running) options:NSKeyValueObservingOptionNew | NSKeyValueObservingOptionOld action:captureResource.handleAVSessionStatusChange]; #endif [captureResource.managedSession.avSession setAutomaticallyConfiguresApplicationAudioSession:NO]; [captureResource.device setDeviceAsInput:captureResource.managedSession.avSession]; } + (void)configDeviceCapacityAnalyzerWithCaptureResource:(SCCaptureResource *)captureResource { captureResource.deviceCapacityAnalyzer = [[SCManagedDeviceCapacityAnalyzer alloc] initWithPerformer:captureResource.videoDataSource.performer]; [captureResource.deviceCapacityAnalyzer addListener:captureResource.deviceCapacityAnalyzerHandler]; [captureResource.deviceCapacityAnalyzer setLowLightConditionEnabled:[SCManagedCaptureDevice isNightModeSupported]]; [captureResource.deviceCapacityAnalyzer addListener:captureResource.stillImageCapturer]; [captureResource.deviceCapacityAnalyzer setAsFocusListenerForDevice:captureResource.device]; } + (void)configVideoDataSourceWithCaptureResource:(SCCaptureResource *)captureResource devicePosition:(SCManagedCaptureDevicePosition)devicePosition { if (captureResource.fileInputDecider.shouldProcessFileInput) { captureResource.videoDataSource = [[SCManagedVideoFileStreamer alloc] initWithPlaybackForURL:captureResource.fileInputDecider.fileURL]; [captureResource.lensProcessingCore setLensesActive:YES videoOrientation:captureResource.videoDataSource.videoOrientation filterFactory:nil]; runOnMainThreadAsynchronously(^{ [captureResource.videoPreviewGLViewManager prepareViewIfNecessary]; }); } else { if (@available(iOS 11.0, *)) { captureResource.videoDataSource = [[SCManagedVideoStreamer alloc] initWithSession:captureResource.managedSession.avSession arSession:captureResource.arSession devicePosition:devicePosition]; [captureResource.videoDataSource addListener:captureResource.arImageCapturer]; if (captureResource.state.isPortraitModeActive) { [captureResource.videoDataSource setDepthCaptureEnabled:YES]; SCProcessingPipelineBuilder *processingPipelineBuilder = [[SCProcessingPipelineBuilder alloc] init]; processingPipelineBuilder.portraitModeEnabled = YES; SCProcessingPipeline *pipeline = [processingPipelineBuilder build]; [captureResource.videoDataSource addProcessingPipeline:pipeline]; } } else { captureResource.videoDataSource = [[SCManagedVideoStreamer alloc] initWithSession:captureResource.managedSession.avSession devicePosition:devicePosition]; } } [captureResource.videoDataSource addListener:captureResource.lensProcessingCore.capturerListener]; [captureResource.videoDataSource addListener:captureResource.deviceCapacityAnalyzer]; [captureResource.videoDataSource addListener:captureResource.stillImageCapturer]; if (SCIsMasterBuild()) { captureResource.videoStreamReporter = [[SCManagedVideoStreamReporter alloc] init]; [captureResource.videoDataSource addListener:captureResource.videoStreamReporter]; } } + (void)configVideoScannerWithCaptureResource:(SCCaptureResource *)captureResource { // When initializing video scanner: // Restrict default scanning to max 15 frames per second. // Restrict passive scanning to max 1 frame per second. // Give CPU time to rest. captureResource.videoScanner = [[SCManagedVideoScanner alloc] initWithMaxFrameDefaultDuration:kMaxDefaultScanFrameDuration maxFramePassiveDuration:kMaxPassiveScanFrameDuration restCycle:1 - kScanTargetCPUUtilization]; [captureResource.videoDataSource addListener:captureResource.videoScanner]; [captureResource.deviceCapacityAnalyzer addListener:captureResource.videoScanner]; } + (void)configVideoCapturerWithCaptureResource:(SCCaptureResource *)captureResource { if (SCCameraTweaksEnableCaptureSharePerformer()) { captureResource.videoCapturer = [[SCManagedVideoCapturer alloc] initWithQueuePerformer:captureResource.queuePerformer]; } else { captureResource.videoCapturer = [[SCManagedVideoCapturer alloc] init]; } [captureResource.videoCapturer addListener:captureResource.lensProcessingCore.capturerListener]; captureResource.videoCapturer.delegate = captureResource.videoCapturerHandler; } + (void)configImageCapturerWithCaptureResource:(SCCaptureResource *)captureResource { captureResource.stillImageCapturer = [SCManagedStillImageCapturer capturerWithCaptureResource:captureResource]; } + (void)startRunningWithCaptureResource:(SCCaptureResource *)captureResource token:(SCCapturerToken *)token completionHandler:(dispatch_block_t)completionHandler { [[SCLogger sharedInstance] logStepToEvent:kSCCameraMetricsOpen uniqueId:@"" stepName:@"startOpenCameraOnManagedCaptureQueue"]; SCTraceSignal(@"Add token %@ to set %@", token, captureResource.tokenSet); [captureResource.tokenSet addObject:token]; if (captureResource.appInBackground) { SCTraceSignal(@"Will skip startRunning on AVCaptureSession because we are in background"); } SCTraceStartSection("start session") { if (!SCDeviceSupportsMetal()) { SCCAssert(captureResource.videoPreviewLayer, @"videoPreviewLayer should be created already"); if (captureResource.status == SCManagedCapturerStatusReady) { // Need to wrap this into a CATransaction because startRunning will change // AVCaptureVideoPreviewLayer, // therefore, // without atomic update, will cause layer inconsistency. [CATransaction begin]; [CATransaction setDisableActions:YES]; captureResource.videoPreviewLayer.session = captureResource.managedSession.avSession; if (!captureResource.appInBackground) { SCGhostToSnappableSignalCameraStart(); [captureResource.managedSession startRunning]; } [self setupVideoPreviewLayer:captureResource]; [CATransaction commit]; SCLogCapturerInfo(@"[_captureResource.avSession startRunning] finished. token: %@", token); } // In case we don't use sample buffer, then we need to fake that we know when the first frame receieved. SCGhostToSnappableSignalDidReceiveFirstPreviewFrame(); } else { if (captureResource.status == SCManagedCapturerStatusReady) { if (!captureResource.appInBackground) { SCGhostToSnappableSignalCameraStart(); [captureResource.managedSession startRunning]; SCLogCapturerInfo( @"[_captureResource.avSession startRunning] finished using sample buffer. token: %@", token); } } } } SCTraceEndSection(); SCTraceStartSection("start streaming") { // Do the start streaming after start running, but make sure we start it // regardless if the status is ready or // not. [self startStreaming:captureResource]; } SCTraceEndSection(); if (!captureResource.notificationRegistered) { captureResource.notificationRegistered = YES; [captureResource.deviceSubjectAreaHandler startObserving]; [[NSNotificationCenter defaultCenter] addObserver:[SCManagedCapturerV1 sharedInstance] selector:captureResource.sessionRuntimeError name:AVCaptureSessionRuntimeErrorNotification object:nil]; } if (captureResource.status == SCManagedCapturerStatusReady) { // Schedule a timer to check the running state and fix any inconsistency. runOnMainThreadAsynchronously(^{ [self setupLivenessConsistencyTimerIfForeground:captureResource]; }); SCLogCapturerInfo(@"Setting isRunning to YES. token: %@", token); captureResource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:captureResource.state] setIsRunning:YES] build]; captureResource.status = SCManagedCapturerStatusRunning; } [[SCLogger sharedInstance] logStepToEvent:kSCCameraMetricsOpen uniqueId:@"" stepName:@"endOpenCameraOnManagedCaptureQueue"]; [[SCLogger sharedInstance] logTimedEventEnd:kSCCameraMetricsOpen uniqueId:@"" parameters:nil]; SCManagedCapturerState *state = [captureResource.state copy]; SCTraceResumeToken resumeToken = SCTraceCapture(); runOnMainThreadAsynchronously(^{ SCTraceResume(resumeToken); [captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didChangeState:state]; [captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didStartRunning:state]; [[SCBatteryLogger shared] logManagedCapturerDidStartRunning]; if (completionHandler) { completionHandler(); } if (!SCDeviceSupportsMetal()) { // To approximate this did render timer, it is not accurate. SCGhostToSnappableSignalDidRenderFirstPreviewFrame(CACurrentMediaTime()); } }); } + (BOOL)stopRunningWithCaptureResource:(SCCaptureResource *)captureResource token:(SCCapturerToken *)token completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler { SCTraceODPCompatibleStart(2); SCAssert([captureResource.queuePerformer isCurrentPerformer], @""); BOOL videoPreviewLayerChanged = NO; SCAssert([captureResource.tokenSet containsObject:token], @"It should be a valid token that is issued by startRunning method."); SCTraceSignal(@"Remove token %@, from set %@", token, captureResource.tokenSet); SCLogCapturerInfo(@"Stop running. token:%@ tokenSet:%@", token, captureResource.tokenSet); [captureResource.tokenSet removeObject:token]; BOOL succeed = (captureResource.tokenSet.count == 0); if (succeed && captureResource.status == SCManagedCapturerStatusRunning) { captureResource.status = SCManagedCapturerStatusReady; if (@available(iOS 11.0, *)) { [captureResource.arSession pause]; } [captureResource.managedSession stopRunning]; if (!SCDeviceSupportsMetal()) { [captureResource.videoDataSource stopStreaming]; [self redoVideoPreviewLayer:captureResource]; videoPreviewLayerChanged = YES; } else { [captureResource.videoDataSource pauseStreaming]; } if (captureResource.state.devicePosition == SCManagedCaptureDevicePositionBackDualCamera) { [[SCManagedCapturerV1 sharedInstance] setDevicePositionAsynchronously:SCManagedCaptureDevicePositionBack completionHandler:nil context:SCCapturerContext]; } // We always disable lenses and hide _captureResource.videoPreviewGLView when app goes into // the background // thus there is no need to clean up anything. // _captureResource.videoPreviewGLView will be shown again to the user only when the frame // will be processed by the lenses // processor // Remove the liveness timer which checks the health of the running state runOnMainThreadAsynchronously(^{ [self destroyLivenessConsistencyTimer:captureResource]; }); SCLogCapturerInfo(@"Setting isRunning to NO. removed token: %@", token); captureResource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:captureResource.state] setIsRunning:NO] build]; captureResource.notificationRegistered = NO; [captureResource.deviceSubjectAreaHandler stopObserving]; [[NSNotificationCenter defaultCenter] removeObserver:[SCManagedCapturerV1 sharedInstance] name:AVCaptureSessionRuntimeErrorNotification object:nil]; [captureResource.arSessionHandler stopObserving]; } SCManagedCapturerState *state = [captureResource.state copy]; AVCaptureVideoPreviewLayer *videoPreviewLayer = videoPreviewLayerChanged ? captureResource.videoPreviewLayer : nil; runOnMainThreadAsynchronously(^{ if (succeed) { [captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didChangeState:state]; [captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didStopRunning:state]; [[SCBatteryLogger shared] logManagedCapturerDidStopRunning]; if (videoPreviewLayerChanged) { [captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didChangeVideoPreviewLayer:videoPreviewLayer]; } } if (completionHandler) { completionHandler(succeed); } }); return succeed; } + (void)setupVideoPreviewLayer:(SCCaptureResource *)resource { SCTraceODPCompatibleStart(2); SCAssert([resource.queuePerformer isCurrentPerformer] || [[SCQueuePerformer mainQueuePerformer] isCurrentPerformer], @""); if ([resource.videoPreviewLayer.connection isVideoOrientationSupported]) { resource.videoPreviewLayer.connection.videoOrientation = AVCaptureVideoOrientationPortrait; } resource.videoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill; resource.videoPreviewLayer.hidden = !resource.managedSession.isRunning; SCLogCapturerInfo(@"Setup video preview layer with connect.enabled:%d, hidden:%d", resource.videoPreviewLayer.connection.enabled, resource.videoPreviewLayer.hidden); } + (void)makeVideoPreviewLayer:(SCCaptureResource *)resource { SCTraceODPCompatibleStart(2); // This can be called either from current queue or from main queue. SCAssert([resource.queuePerformer isCurrentPerformer] || [[SCQueuePerformer mainQueuePerformer] isCurrentPerformer], @""); #if !TARGET_IPHONE_SIMULATOR SCAssert(resource.managedSession.avSession, @"session shouldn't be nil"); #endif // Need to wrap this to a transcation otherwise this is happening off the main // thread, and the layer // won't be lay out correctly. [CATransaction begin]; [CATransaction setDisableActions:YES]; // Since _captureResource.avSession is always created / recreated on this private queue, and // videoPreviewLayer.session, // if not touched by anyone else, is also set on this private queue, it should // be safe to do this // If-clause check. resource.videoPreviewLayer = [AVCaptureVideoPreviewLayer new]; SCAssert(resource.videoPreviewLayer, @"_captureResource.videoPreviewLayer shouldn't be nil"); [self setupVideoPreviewLayer:resource]; if (resource.device.softwareZoom && resource.device.zoomFactor != 1) { [self softwareZoomWithDevice:resource.device resource:resource]; } [CATransaction commit]; SCLogCapturerInfo(@"Created AVCaptureVideoPreviewLayer:%@", resource.videoPreviewLayer); } + (void)redoVideoPreviewLayer:(SCCaptureResource *)resource { SCTraceODPCompatibleStart(2); SCLogCapturerInfo(@"redo video preview layer"); AVCaptureVideoPreviewLayer *videoPreviewLayer = resource.videoPreviewLayer; resource.videoPreviewLayer = nil; // This will do dispatch_sync on the main thread, since mainQueuePerformer // is reentrant, it should be fine // on iOS 7. [[SCQueuePerformer mainQueuePerformer] performAndWait:^{ // Hide and remove the session when stop the video preview layer at main // thread. // It seems that when we nil out the session, it will cause some relayout // on iOS 9 // and trigger an assertion. videoPreviewLayer.hidden = YES; videoPreviewLayer.session = nil; // We setup the video preview layer immediately after destroy it so // that when we start running again, we don't need to pay the setup // cost. [self makeVideoPreviewLayer:resource]; }]; } + (void)startStreaming:(SCCaptureResource *)resource { SCTraceODPCompatibleStart(2); ++resource.streamingSequence; SCLogCapturerInfo(@"Start streaming. streamingSequence:%lu", (unsigned long)resource.streamingSequence); [resource.videoDataSource startStreaming]; } + (void)setupLivenessConsistencyTimerIfForeground:(SCCaptureResource *)resource { SCTraceODPCompatibleStart(2); SCAssertMainThread(); if (resource.livenessTimer) { // If we have the liveness timer already, don't need to set it up. return; } // Check if the application state is in background now, if so, we don't need // to setup liveness timer if ([UIApplication sharedApplication].applicationState != UIApplicationStateBackground) { resource.livenessTimer = [NSTimer scheduledTimerWithTimeInterval:1 target:[SCManagedCapturerV1 sharedInstance] selector:resource.livenessConsistency userInfo:nil repeats:YES]; } } + (void)destroyLivenessConsistencyTimer:(SCCaptureResource *)resource { SCTraceODPCompatibleStart(2); SCAssertMainThread(); [resource.livenessTimer invalidate]; resource.livenessTimer = nil; } + (void)softwareZoomWithDevice:(SCManagedCaptureDevice *)device resource:(SCCaptureResource *)resource { [resource.deviceZoomHandler softwareZoomWithDevice:device]; } + (void)captureStillImageWithCaptureResource:(SCCaptureResource *)captureResource aspectRatio:(CGFloat)aspectRatio captureSessionID:(NSString *)captureSessionID shouldCaptureFromVideo:(BOOL)shouldCaptureFromVideo completionHandler: (sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler context:(NSString *)context { SCTraceODPCompatibleStart(2); if (captureResource.stillImageCapturing) { SCLogCapturerWarning(@"Another still image is capturing. aspectRatio:%f", aspectRatio); if (completionHandler) { SCManagedCapturerState *state = [captureResource.state copy]; runOnMainThreadAsynchronously(^{ completionHandler(nil, nil, [NSError errorWithDomain:kSCManagedCapturerErrorDomain code:kSCManagedCapturerCaptureStillImageBusy userInfo:nil], state); }); } } else { captureResource.stillImageCapturing = YES; [SCCaptureWorker _captureStillImageAsynchronouslyWithCaptureResource:captureResource aspectRatio:aspectRatio captureSessionID:captureSessionID shouldCaptureFromVideo:shouldCaptureFromVideo completionHandler:completionHandler]; } } + (void)_captureStillImageAsynchronouslyWithCaptureResource:(SCCaptureResource *)captureResource aspectRatio:(CGFloat)aspectRatio captureSessionID:(NSString *)captureSessionID shouldCaptureFromVideo:(BOOL)shouldCaptureFromVideo completionHandler: (sc_managed_capturer_capture_still_image_completion_handler_t) completionHandler { SCTraceODPCompatibleStart(2); SCAssert([captureResource.queuePerformer isCurrentPerformer], @""); SCAssert(completionHandler, @"completionHandler cannot be nil"); SCManagedCapturerState *state = [captureResource.state copy]; SCLogCapturerInfo(@"Capturing still image. aspectRatio:%f state:%@", aspectRatio, state); // If when we start capturing, the video streamer is not running yet, start // running it. [SCCaptureWorker startStreaming:captureResource]; SCManagedStillImageCapturer *stillImageCapturer = captureResource.stillImageCapturer; if (@available(iOS 11.0, *)) { if (state.arSessionActive) { stillImageCapturer = captureResource.arImageCapturer; } } dispatch_block_t stillImageCaptureHandler = ^{ SCCAssert(captureResource.stillImageCapturer, @"stillImageCapturer should be available"); float zoomFactor = captureResource.device.softwareZoom ? captureResource.device.zoomFactor : 1; [stillImageCapturer captureStillImageWithAspectRatio:aspectRatio atZoomFactor:zoomFactor fieldOfView:captureResource.device.fieldOfView state:state captureSessionID:captureSessionID shouldCaptureFromVideo:shouldCaptureFromVideo completionHandler:^(UIImage *fullScreenImage, NSDictionary *metadata, NSError *error) { SCTraceStart(); // We are done here, turn off front flash if needed, // this is dispatched in // SCManagedCapturer's private queue if (captureResource.state.flashActive && !captureResource.state.flashSupported && captureResource.state.devicePosition == SCManagedCaptureDevicePositionFront) { captureResource.frontFlashController.flashActive = NO; } if (state.devicePosition == SCManagedCaptureDevicePositionFront) { fullScreenImage = [UIImage imageWithCGImage:fullScreenImage.CGImage scale:1.0 orientation:SCMirroredImageOrientation(fullScreenImage.imageOrientation)]; } captureResource.stillImageCapturing = NO; runOnMainThreadAsynchronously(^{ completionHandler(fullScreenImage, metadata, error, state); }); }]; }; if (state.flashActive && !captureResource.state.flashSupported && state.devicePosition == SCManagedCaptureDevicePositionFront) { captureResource.frontFlashController.flashActive = YES; // Do the first capture only after 0.175 seconds so that the front flash is // already available [captureResource.queuePerformer perform:stillImageCaptureHandler after:0.175]; } else { stillImageCaptureHandler(); } } + (void)startRecordingWithCaptureResource:(SCCaptureResource *)captureResource outputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings audioConfiguration:(SCAudioConfiguration *)configuration maxDuration:(NSTimeInterval)maxDuration fileURL:(NSURL *)fileURL captureSessionID:(NSString *)captureSessionID completionHandler:(sc_managed_capturer_start_recording_completion_handler_t)completionHandler { SCTraceODPCompatibleStart(2); if (captureResource.videoRecording) { if (completionHandler) { runOnMainThreadAsynchronously(^{ completionHandler(SCVideoCaptureSessionInfoMake(kCMTimeInvalid, kCMTimeInvalid, 0), [NSError errorWithDomain:kSCManagedCapturerErrorDomain code:kSCManagedCapturerRecordVideoBusy userInfo:nil]); }); } // Don't start recording session SCLogCapturerInfo(@"*** Tries to start multiple video recording session ***"); return; } // Fix: https://jira.sc-corp.net/browse/CCAM-12322 // Fire this notification in recording state to let PlaybackSession stop runOnMainThreadAsynchronously(^{ [[NSNotificationCenter defaultCenter] postNotificationName:kSCImageProcessVideoPlaybackStopNotification object:[SCManagedCapturer sharedInstance] userInfo:nil]; }); SCLogCapturerInfo(@"Start recording. OutputSettigns:%@, maxDuration:%f, fileURL:%@", outputSettings, maxDuration, fileURL); // Turns on torch temporarily if we have Flash active if (!captureResource.state.torchActive) { if (captureResource.state.flashActive) { [captureResource.device setTorchActive:YES]; if (captureResource.state.devicePosition == SCManagedCaptureDevicePositionFront) { captureResource.frontFlashController.torchActive = YES; } } } if (captureResource.device.softwareZoom) { captureResource.device.zoomFactor = 1; [SCCaptureWorker softwareZoomWithDevice:captureResource.device resource:captureResource]; } // Lock focus on both front and back camera if not using ARKit if (!captureResource.state.arSessionActive) { SCManagedCaptureDevice *front = [SCManagedCaptureDevice front]; SCManagedCaptureDevice *back = [SCManagedCaptureDevice back]; [front setRecording:YES]; [back setRecording:YES]; } // Start streaming if we haven't already [self startStreaming:captureResource]; // Remove other listeners from video streamer [captureResource.videoDataSource removeListener:captureResource.deviceCapacityAnalyzer]; // If lenses is not actually applied, we should open sticky video tweak BOOL isLensApplied = [SCCaptureWorker isLensApplied:captureResource]; [captureResource.videoDataSource setKeepLateFrames:!isLensApplied]; SCLogCapturerInfo(@"Start recording. isLensApplied:%d", isLensApplied); [captureResource.videoDataSource addListener:captureResource.videoCapturer]; captureResource.videoRecording = YES; if (captureResource.state.lensesActive) { BOOL modifySource = captureResource.videoRecording || captureResource.state.liveVideoStreaming; [captureResource.lensProcessingCore setModifySource:modifySource]; } if (captureResource.fileInputDecider.shouldProcessFileInput) { [captureResource.videoDataSource stopStreaming]; } // The max video duration, we will stop process sample buffer if the current // time is larger than max video duration. // 0.5 so that we have a bit of lean way on video recording initialization, and // when NSTimer stucked in normal // recording sessions, we don't suck too much as breaking expections on how long // it is recorded. SCVideoCaptureSessionInfo sessionInfo = [captureResource.videoCapturer startRecordingAsynchronouslyWithOutputSettings:outputSettings audioConfiguration:configuration maxDuration:maxDuration + 0.5 toURL:fileURL deviceFormat:captureResource.device.activeFormat orientation:AVCaptureVideoOrientationLandscapeLeft captureSessionID:captureSessionID]; if (completionHandler) { runOnMainThreadAsynchronously(^{ completionHandler(sessionInfo, nil); }); } captureResource.droppedFramesReporter = [SCManagedDroppedFramesReporter new]; [captureResource.videoDataSource addListener:captureResource.droppedFramesReporter]; [[SCManagedCapturerV1 sharedInstance] addListener:captureResource.droppedFramesReporter]; } + (void)stopRecordingWithCaptureResource:(SCCaptureResource *)captureResource { SCTraceStart(); SCLogCapturerInfo(@"Stop recording asynchronously"); [captureResource.videoCapturer stopRecordingAsynchronously]; [captureResource.videoDataSource removeListener:captureResource.droppedFramesReporter]; SCManagedDroppedFramesReporter *droppedFramesReporter = captureResource.droppedFramesReporter; [[SCManagedCapturerV1 sharedInstance] removeListener:captureResource.droppedFramesReporter]; captureResource.droppedFramesReporter = nil; [captureResource.videoDataSource.performer perform:^{ // call on the same performer as that of managedVideoDataSource: didOutputSampleBuffer: devicePosition: BOOL keepLateFrames = [captureResource.videoDataSource getKeepLateFrames]; [droppedFramesReporter reportWithKeepLateFrames:keepLateFrames lensesApplied:[SCCaptureWorker isLensApplied:captureResource]]; // Disable keepLateFrames once stop recording to make sure the recentness of preview [captureResource.videoDataSource setKeepLateFrames:NO]; }]; } + (void)cancelRecordingWithCaptureResource:(SCCaptureResource *)captureResource { SCTraceStart(); SCLogCapturerInfo(@"Cancel recording asynchronously"); [captureResource.videoDataSource removeListener:captureResource.droppedFramesReporter]; [[SCManagedCapturerV1 sharedInstance] removeListener:captureResource.droppedFramesReporter]; captureResource.droppedFramesReporter = nil; [captureResource.videoDataSource removeListener:captureResource.videoCapturer]; // Add back other listeners to video streamer [captureResource.videoDataSource addListener:captureResource.deviceCapacityAnalyzer]; [captureResource.videoCapturer cancelRecordingAsynchronously]; captureResource.droppedFramesReporter = nil; } + (SCVideoCaptureSessionInfo)activeSession:(SCCaptureResource *)resource { if (resource.videoCapturer == nil) { SCLogCapturerWarning( @"Trying to retrieve SCVideoCaptureSessionInfo while _captureResource.videoCapturer is nil."); return SCVideoCaptureSessionInfoMake(kCMTimeInvalid, kCMTimeInvalid, 0); } else { return resource.videoCapturer.activeSession; } } + (BOOL)canRunARSession:(SCCaptureResource *)resource { SCTraceODPCompatibleStart(2); if (@available(iOS 11.0, *)) { return resource.state.lensesActive && [ARConfiguration sc_supportedForDevicePosition:resource.state.devicePosition]; } return NO; } + (void)turnARSessionOff:(SCCaptureResource *)resource { SCTraceODPCompatibleStart(2); SCAssert([resource.queuePerformer isCurrentPerformer], @""); if (@available(iOS 11.0, *)) { SC_GUARD_ELSE_RETURN(resource.state.arSessionActive); SCLogCapturerInfo(@"Stopping ARSession"); [resource.arSessionHandler stopARSessionRunning]; [resource.managedSession performConfiguration:^{ [resource.device updateActiveFormatWithSession:resource.managedSession.avSession]; }]; [resource.managedSession startRunning]; resource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:resource.state] setArSessionActive:NO] build]; [resource.lensProcessingCore setShouldProcessARFrames:resource.state.arSessionActive]; [self clearARKitData:resource]; [self updateLensesFieldOfViewTracking:resource]; runOnMainThreadAsynchronously(^{ [resource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didChangeState:resource.state]; [resource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didChangeARSessionActive:resource.state]; [[SCManagedCapturerV1 sharedInstance] unlockZoomWithContext:SCCapturerContext]; }); }; } + (void)clearARKitData:(SCCaptureResource *)resource { SCTraceODPCompatibleStart(2); if (@available(iOS 11.0, *)) { if ([resource.videoDataSource conformsToProtocol:@protocol(SCManagedVideoARDataSource)]) { id dataSource = (id)resource.videoDataSource; dataSource.currentFrame = nil; #ifdef SC_USE_ARKIT_FACE dataSource.lastDepthData = nil; #endif } } } + (void)turnARSessionOn:(SCCaptureResource *)resource { SCTraceODPCompatibleStart(2); SCAssert([resource.queuePerformer isCurrentPerformer], @""); if (@available(iOS 11.0, *)) { SC_GUARD_ELSE_RETURN(!resource.state.arSessionActive); SC_GUARD_ELSE_RETURN([self canRunARSession:resource]); SCLogCapturerInfo(@"Starting ARSession"); resource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:resource.state] setArSessionActive:YES] build]; // Make sure we commit any configurations that may be in flight. [resource.videoDataSource commitConfiguration]; runOnMainThreadAsynchronously(^{ [resource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didChangeState:resource.state]; [resource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didChangeARSessionActive:resource.state]; // Zooming on an ARSession breaks stuff in super weird ways. [[SCManagedCapturerV1 sharedInstance] lockZoomWithContext:SCCapturerContext]; }); [self clearARKitData:resource]; [resource.managedSession stopRunning]; [resource.arSession runWithConfiguration:[ARConfiguration sc_configurationForDevicePosition:resource.state.devicePosition] options:(ARSessionRunOptionResetTracking | ARSessionRunOptionRemoveExistingAnchors)]; [resource.lensProcessingCore setShouldProcessARFrames:resource.state.arSessionActive]; [self updateLensesFieldOfViewTracking:resource]; } } + (void)configBlackCameraDetectorWithCaptureResource:(SCCaptureResource *)captureResource { captureResource.captureSessionFixer = [[SCCaptureSessionFixer alloc] init]; captureResource.blackCameraDetector.blackCameraNoOutputDetector.delegate = captureResource.captureSessionFixer; [captureResource.videoDataSource addListener:captureResource.blackCameraDetector.blackCameraNoOutputDetector]; } + (void)configureCaptureFaceDetectorWithCaptureResource:(SCCaptureResource *)captureResource { if (SCCameraFaceFocusDetectionMethod() == SCCameraFaceFocusDetectionMethodTypeCIDetector) { SCCaptureCoreImageFaceDetector *detector = [[SCCaptureCoreImageFaceDetector alloc] initWithCaptureResource:captureResource]; captureResource.captureFaceDetector = detector; [captureResource.videoDataSource addListener:detector]; } else { captureResource.captureFaceDetector = [[SCCaptureMetadataOutputDetector alloc] initWithCaptureResource:captureResource]; } } + (void)configCaptureDeviceHandlerWithCaptureResource:(SCCaptureResource *)captureResource { captureResource.device.delegate = captureResource.captureDeviceHandler; } + (void)updateLensesFieldOfViewTracking:(SCCaptureResource *)captureResource { // 1. reset observers [captureResource.lensProcessingCore removeFieldOfViewListener]; if (@available(iOS 11.0, *)) { if (captureResource.state.arSessionActive && [captureResource.videoDataSource conformsToProtocol:@protocol(SCManagedVideoARDataSource)]) { // 2. handle ARKit case id arDataSource = (id)captureResource.videoDataSource; float fieldOfView = arDataSource.fieldOfView; if (fieldOfView > 0) { // 2.5 there will be no field of view [captureResource.lensProcessingCore setFieldOfView:fieldOfView]; } [captureResource.lensProcessingCore setAsFieldOfViewListenerForARDataSource:arDataSource]; return; } } // 3. fallback to regular device field of view float fieldOfView = captureResource.device.fieldOfView; [captureResource.lensProcessingCore setFieldOfView:fieldOfView]; [captureResource.lensProcessingCore setAsFieldOfViewListenerForDevice:captureResource.device]; } + (CMTime)firstWrittenAudioBufferDelay:(SCCaptureResource *)resource { return resource.videoCapturer.firstWrittenAudioBufferDelay; } + (BOOL)audioQueueStarted:(SCCaptureResource *)resource { return resource.videoCapturer.audioQueueStarted; } + (BOOL)isLensApplied:(SCCaptureResource *)resource { return resource.state.lensesActive && resource.lensProcessingCore.isLensApplied; } + (BOOL)isVideoMirrored:(SCCaptureResource *)resource { if ([resource.videoDataSource respondsToSelector:@selector(isVideoMirrored)]) { return [resource.videoDataSource isVideoMirrored]; } else { // Default is NO. return NO; } } + (BOOL)shouldCaptureImageFromVideoWithResource:(SCCaptureResource *)resource { SCTraceODPCompatibleStart(2); BOOL isIphone5Series = [SCDeviceName isSimilarToIphone5orNewer] && ![SCDeviceName isSimilarToIphone6orNewer]; return isIphone5Series && !resource.state.flashActive && ![SCCaptureWorker isLensApplied:resource]; } + (void)setPortraitModePointOfInterestAsynchronously:(CGPoint)pointOfInterest completionHandler:(dispatch_block_t)completionHandler resource:(SCCaptureResource *)resource { SCTraceODPCompatibleStart(2); if (resource.state.isPortraitModeActive) { SCTraceODPCompatibleStart(2); [resource.queuePerformer perform:^{ SCTraceStart(); if (resource.device.isConnected) { if (resource.device.softwareZoom) { CGPoint adjustedPoint = CGPointMake((pointOfInterest.x - 0.5) / resource.device.softwareZoom + 0.5, (pointOfInterest.y - 0.5) / resource.device.softwareZoom + 0.5); // Fix for the zooming factor [resource.videoDataSource setPortraitModePointOfInterest:adjustedPoint]; if (resource.state.arSessionActive) { if (@available(ios 11.0, *)) { [resource.arImageCapturer setPortraitModePointOfInterest:adjustedPoint]; } } else { [resource.stillImageCapturer setPortraitModePointOfInterest:adjustedPoint]; } } else { [resource.videoDataSource setPortraitModePointOfInterest:pointOfInterest]; if (resource.state.arSessionActive) { if (@available(ios 11.0, *)) { [resource.arImageCapturer setPortraitModePointOfInterest:pointOfInterest]; } } else { [resource.stillImageCapturer setPortraitModePointOfInterest:pointOfInterest]; } } } if (completionHandler) { runOnMainThreadAsynchronously(completionHandler); } }]; } } + (void)prepareForRecordingWithAudioConfiguration:(SCAudioConfiguration *)configuration resource:(SCCaptureResource *)resource { SCAssertPerformer(resource.queuePerformer); [resource.videoCapturer prepareForRecordingWithAudioConfiguration:configuration]; } + (void)stopScanWithCompletionHandler:(dispatch_block_t)completionHandler resource:(SCCaptureResource *)resource { SCTraceODPCompatibleStart(2); SCLogCapturerInfo(@"Stop scan"); [resource.videoScanner stopScanAsynchronously]; if (completionHandler) { runOnMainThreadAsynchronously(completionHandler); } } + (void)startScanWithScanConfiguration:(SCScanConfiguration *)configuration resource:(SCCaptureResource *)resource { SCTraceODPCompatibleStart(2); SCLogCapturerInfo(@"Start scan. ScanConfiguration:%@", configuration); [SCCaptureWorker startStreaming:resource]; [resource.videoScanner startScanAsynchronouslyWithScanConfiguration:configuration]; } @end