From f840c39ecb715a5ece0a2bfe57961e3bc765f0f3 Mon Sep 17 00:00:00 2001 From: gaberoze Date: Tue, 17 May 2016 19:44:16 -0400 Subject: [PATCH 01/55] fixed thumbnail generation - context wasn't loading properly --- Library/Sources/SCRecorder.m | 349 ++++++++++++++++++----------------- 1 file changed, 176 insertions(+), 173 deletions(-) diff --git a/Library/Sources/SCRecorder.m b/Library/Sources/SCRecorder.m index 97183d0d..93448fd3 100644 --- a/Library/Sources/SCRecorder.m +++ b/Library/Sources/SCRecorder.m @@ -52,18 +52,18 @@ @implementation SCRecorder - (id)init { self = [super init]; - + if (self) { _sessionQueue = dispatch_queue_create("me.corsin.SCRecorder.RecordSession", nil); - + dispatch_queue_set_specific(_sessionQueue, kSCRecorderRecordSessionQueueKey, "true", nil); dispatch_set_target_queue(_sessionQueue, dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0)); - + _captureSessionPreset = AVCaptureSessionPresetHigh; _previewLayer = [[AVCaptureVideoPreviewLayer alloc] init]; _previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill; _initializeSessionLazily = YES; - + _videoOrientation = AVCaptureVideoOrientationPortrait; _videoStabilizationMode = AVCaptureVideoStabilizationModeStandard; @@ -75,26 +75,29 @@ - (id)init { [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(mediaServicesWereReset:) name:AVAudioSessionMediaServicesWereResetNotification object:nil]; [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(mediaServicesWereLost:) name:AVAudioSessionMediaServicesWereLostNotification object:nil]; [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(deviceOrientationChanged:) name:UIDeviceOrientationDidChangeNotification object:nil]; - + _lastVideoBuffer = [SCSampleBufferHolder new]; _lastAudioBuffer = [SCSampleBufferHolder new]; _maxRecordDuration = kCMTimeInvalid; _resetZoomOnChangeDevice = YES; _mirrorOnFrontCamera = NO; _automaticallyConfiguresApplicationAudioSession = YES; - + self.device = AVCaptureDevicePositionBack; _videoConfiguration = [SCVideoConfiguration new]; _audioConfiguration = [SCAudioConfiguration new]; _photoConfiguration = [SCPhotoConfiguration new]; - + [_videoConfiguration addObserver:self forKeyPath:@"enabled" options:NSKeyValueObservingOptionNew context:SCRecorderVideoEnabledContext]; [_audioConfiguration addObserver:self forKeyPath:@"enabled" options:NSKeyValueObservingOptionNew context:SCRecorderAudioEnabledContext]; [_photoConfiguration addObserver:self forKeyPath:@"options" options:NSKeyValueObservingOptionNew context:SCRecorderPhotoOptionsContext]; - - _context = [SCContext new].CIContext; + + SCContext *context = [SCContext + contextWithType:SCContextTypeAuto + options:nil]; + _context = context.CIContext; } - + return self; } @@ -102,7 +105,7 @@ - (void)dealloc { [_videoConfiguration removeObserver:self forKeyPath:@"enabled"]; [_audioConfiguration removeObserver:self forKeyPath:@"enabled"]; [_photoConfiguration removeObserver:self forKeyPath:@"options"]; - + [[NSNotificationCenter defaultCenter] removeObserver:self]; [self unprepare]; } @@ -118,7 +121,7 @@ - (void)applicationDidEnterBackground:(id)sender { - (void)applicationDidBecomeActive:(id)sender { [self reconfigureVideoInput:self.videoConfiguration.enabled audioInput:self.audioConfiguration.enabled]; - + if (_shouldAutoresumeRecording) { _shouldAutoresumeRecording = NO; [self record]; @@ -141,27 +144,27 @@ - (void)updateVideoOrientation { if (!_session.currentSegmentHasAudio && !_session.currentSegmentHasVideo) { [_session deinitialize]; } - + AVCaptureVideoOrientation videoOrientation = [self actualVideoOrientation]; AVCaptureConnection *videoConnection = [_videoOutput connectionWithMediaType:AVMediaTypeVideo]; - + if ([videoConnection isVideoOrientationSupported]) { videoConnection.videoOrientation = videoOrientation; } if ([_previewLayer.connection isVideoOrientationSupported]) { _previewLayer.connection.videoOrientation = videoOrientation; } - + AVCaptureConnection *photoConnection = [_photoOutput connectionWithMediaType:AVMediaTypeVideo]; if ([photoConnection isVideoOrientationSupported]) { photoConnection.videoOrientation = videoOrientation; } - + AVCaptureConnection *movieOutputConnection = [_movieOutput connectionWithMediaType:AVMediaTypeVideo]; if (movieOutputConnection.isVideoOrientationSupported) { movieOutputConnection.videoOrientation = videoOrientation; } - + } - (void)beginConfiguration { @@ -184,12 +187,12 @@ - (void)commitConfiguration { - (BOOL)_reconfigureSession { NSError *newError = nil; - + AVCaptureSession *session = _captureSession; - + if (session != nil) { [self beginConfiguration]; - + if (![session.sessionPreset isEqualToString:_captureSessionPreset]) { if ([session canSetSessionPreset:_captureSessionPreset]) { session.sessionPreset = _captureSessionPreset; @@ -197,20 +200,20 @@ - (BOOL)_reconfigureSession { newError = [SCRecorder createError:@"Cannot set session preset"]; } } - + if (self.fastRecordMethodEnabled) { if (_movieOutput == nil) { _movieOutput = [AVCaptureMovieFileOutput new]; } - + if (_videoOutput != nil && [session.outputs containsObject:_videoOutput]) { [session removeOutput:_videoOutput]; } - + if (_audioOutput != nil && [session.outputs containsObject:_audioOutput]) { [session removeOutput:_audioOutput]; } - + if (![session.outputs containsObject:_movieOutput]) { if ([session canAddOutput:_movieOutput]) { [session addOutput:_movieOutput]; @@ -220,12 +223,12 @@ - (BOOL)_reconfigureSession { } } } - + } else { if (_movieOutput != nil && [session.outputs containsObject:_movieOutput]) { [session removeOutput:_movieOutput]; } - + _videoOutputAdded = NO; if (self.videoConfiguration.enabled) { if (_videoOutput == nil) { @@ -233,7 +236,7 @@ - (BOOL)_reconfigureSession { _videoOutput.alwaysDiscardsLateVideoFrames = NO; [_videoOutput setSampleBufferDelegate:self queue:_sessionQueue]; } - + if (![session.outputs containsObject:_videoOutput]) { if ([session canAddOutput:_videoOutput]) { [session addOutput:_videoOutput]; @@ -247,14 +250,14 @@ - (BOOL)_reconfigureSession { _videoOutputAdded = YES; } } - + _audioOutputAdded = NO; if (self.audioConfiguration.enabled) { if (_audioOutput == nil) { _audioOutput = [[AVCaptureAudioDataOutput alloc] init]; [_audioOutput setSampleBufferDelegate:self queue:_sessionQueue]; } - + if (![session.outputs containsObject:_audioOutput]) { if ([session canAddOutput:_audioOutput]) { [session addOutput:_audioOutput]; @@ -269,13 +272,13 @@ - (BOOL)_reconfigureSession { } } } - + if (self.photoConfiguration.enabled) { if (_photoOutput == nil) { _photoOutput = [[AVCaptureStillImageOutput alloc] init]; _photoOutput.outputSettings = [self.photoConfiguration createOutputSettings]; } - + if (![session.outputs containsObject:_photoOutput]) { if ([session canAddOutput:_photoOutput]) { [session addOutput:_photoOutput]; @@ -286,11 +289,11 @@ - (BOOL)_reconfigureSession { } } } - + [self commitConfiguration]; } _error = newError; - + return newError == nil; } @@ -298,26 +301,26 @@ - (BOOL)prepare:(NSError **)error { if (_captureSession != nil) { [NSException raise:@"SCCameraException" format:@"The session is already opened"]; } - + AVCaptureSession *session = [[AVCaptureSession alloc] init]; session.automaticallyConfiguresApplicationAudioSession = self.automaticallyConfiguresApplicationAudioSession; _beginSessionConfigurationCount = 0; _captureSession = session; - + [self beginConfiguration]; - + BOOL success = [self _reconfigureSession]; - + if (!success && error != nil) { *error = _error; } - + _previewLayer.session = session; - + [self reconfigureVideoInput:YES audioInput:YES]; - + [self commitConfiguration]; - + return success; } @@ -326,11 +329,11 @@ - (BOOL)startRunning { if (!self.isPrepared) { success = [self prepare:nil]; } - + if (!_captureSession.isRunning) { [_captureSession startRunning]; } - + return success; } @@ -340,9 +343,9 @@ - (void)stopRunning { - (void)_subjectAreaDidChange { id delegate = self.delegate; - + if (![delegate respondsToSelector:@selector(recorderShouldAutomaticallyRefocus:)] || [delegate recorderShouldAutomaticallyRefocus:self]) { - [self focusCenter]; + [self focusCenter]; } } @@ -350,26 +353,26 @@ - (UIImage *)_imageFromSampleBufferHolder:(SCSampleBufferHolder *)sampleBufferHo __block CMSampleBufferRef sampleBuffer = nil; dispatch_sync(_sessionQueue, ^{ sampleBuffer = sampleBufferHolder.sampleBuffer; - + if (sampleBuffer != nil) { CFRetain(sampleBuffer); } }); - + if (sampleBuffer == nil) { return nil; } - + CVPixelBufferRef buffer = CMSampleBufferGetImageBuffer(sampleBuffer); CIImage *ciImage = [CIImage imageWithCVPixelBuffer:buffer]; - + CGImageRef cgImage = [_context createCGImage:ciImage fromRect:CGRectMake(0, 0, CVPixelBufferGetWidth(buffer), CVPixelBufferGetHeight(buffer))]; - + UIImage *image = [UIImage imageWithCGImage:cgImage]; - + CGImageRelease(cgImage); CFRelease(sampleBuffer); - + return image; } @@ -382,7 +385,7 @@ - (void)capturePhoto:(void(^)(NSError*, UIImage*))completionHandler { if (connection != nil) { [_photoOutput captureStillImageAsynchronouslyFromConnection:connection completionHandler: ^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) { - + if (imageDataSampleBuffer != nil && error == nil) { NSData *jpegData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer]; if (jpegData) { @@ -416,11 +419,11 @@ - (void)unprepare { [self removeVideoObservers:input.device]; } } - + for (AVCaptureOutput *output in _captureSession.outputs) { [_captureSession removeOutput:output]; } - + _previewLayer.session = nil; _captureSession = nil; } @@ -429,11 +432,11 @@ - (void)unprepare { - (void)_progressTimerFired:(NSTimer *)progressTimer { CMTime recordedDuration = _movieOutput.recordedDuration; - + if (CMTIME_COMPARE_INLINE(recordedDuration, !=, _lastMovieFileOutputTime)) { SCRecordSession *recordSession = _session; id delegate = self.delegate; - + if (recordSession != nil) { if ([delegate respondsToSelector:@selector(recorder:didAppendVideoSampleBufferInSession:)]) { dispatch_async(dispatch_get_main_queue(), ^{ @@ -447,7 +450,7 @@ - (void)_progressTimerFired:(NSTimer *)progressTimer { } } } - + _lastMovieFileOutputTime = recordedDuration; } @@ -462,7 +465,7 @@ - (void)record { } } }; - + if ([SCRecorder isSessionQueue]) { block(); } else { @@ -476,10 +479,10 @@ - (void)pause { - (void)pause:(void(^)())completionHandler { _isRecording = NO; - + void (^block)() = ^{ SCRecordSession *recordSession = _session; - + if (recordSession != nil) { if (recordSession.recordSegmentReady) { NSDictionary *info = [self _createSegmentInfo]; @@ -509,7 +512,7 @@ - (void)pause:(void(^)())completionHandler { dispatch_handler(completionHandler); } }; - + if ([SCRecorder isSessionQueue]) { block(); } else { @@ -534,7 +537,7 @@ - (void)beginRecordSegmentIfNeeded:(SCRecordSession *)recordSession { } else { [recordSession beginSegment:&error]; } - + id delegate = self.delegate; if (beginSegment && [delegate respondsToSelector:@selector(recorder:didBeginSegmentInSession:error:)]) { dispatch_async(dispatch_get_main_queue(), ^{ @@ -547,18 +550,18 @@ - (void)beginRecordSegmentIfNeeded:(SCRecordSession *)recordSession { - (void)checkRecordSessionDuration:(SCRecordSession *)recordSession { CMTime currentRecordDuration = recordSession.duration; CMTime suggestedMaxRecordDuration = _maxRecordDuration; - + if (CMTIME_IS_VALID(suggestedMaxRecordDuration)) { if (CMTIME_COMPARE_INLINE(currentRecordDuration, >=, suggestedMaxRecordDuration)) { _isRecording = NO; - + dispatch_async(_sessionQueue, ^{ [recordSession endSegmentWithInfo:[self _createSegmentInfo] completionHandler:^(SCRecordSessionSegment *segment, NSError *error) { id delegate = self.delegate; if ([delegate respondsToSelector:@selector(recorder:didCompleteSegment:inSession:error:)]) { [delegate recorder:self didCompleteSegment:segment inSession:recordSession error:error]; } - + if ([delegate respondsToSelector:@selector(recorder:didCompleteSession:)]) { [delegate recorder:self didCompleteSession:recordSession]; } @@ -570,11 +573,11 @@ - (void)checkRecordSessionDuration:(SCRecordSession *)recordSession { - (CMTime)frameDurationFromConnection:(AVCaptureConnection *)connection { AVCaptureDevice *device = [self currentVideoDeviceInput].device; - + if ([device respondsToSelector:@selector(activeVideoMaxFrameDuration)]) { return device.activeVideoMinFrameDuration; } - + #pragma clang diagnostic push #pragma clang diagnostic ignored "-Wdeprecated-declarations" return connection.videoMinFrameDuration; @@ -584,66 +587,66 @@ - (CMTime)frameDurationFromConnection:(AVCaptureConnection *)connection { - (SCFilter *)_transformFilterUsingBufferWidth:(size_t)bufferWidth bufferHeight:(size_t)bufferHeight mirrored:(BOOL)mirrored { if (_transformFilter == nil || _transformFilterBufferWidth != bufferWidth || _transformFilterBufferHeight != bufferHeight) { BOOL shouldMirrorBuffer = _keepMirroringOnWrite && mirrored; - + if (!shouldMirrorBuffer) { _transformFilter = nil; } else { CGAffineTransform tx = CGAffineTransformIdentity; - + _transformFilter = [SCFilter filterWithAffineTransform:CGAffineTransformTranslate(CGAffineTransformScale(tx, -1, 1), -(CGFloat)bufferWidth, 0)]; } - + _transformFilterBufferWidth = bufferWidth; _transformFilterBufferHeight = bufferHeight; } - + return _transformFilter; } - (void)appendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer toRecordSession:(SCRecordSession *)recordSession duration:(CMTime)duration connection:(AVCaptureConnection *)connection completion:(void(^)(BOOL success))completion { CVPixelBufferRef sampleBufferImage = CMSampleBufferGetImageBuffer(sampleBuffer); - + size_t bufferWidth = (CGFloat)CVPixelBufferGetWidth(sampleBufferImage); size_t bufferHeight = (CGFloat)CVPixelBufferGetHeight(sampleBufferImage); - + CMTime time = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); SCFilter *filterGroup = _videoConfiguration.filter; SCFilter *transformFilter = [self _transformFilterUsingBufferWidth:bufferWidth bufferHeight:bufferHeight mirrored: _device == AVCaptureDevicePositionFront ]; - + if (filterGroup == nil && transformFilter == nil) { [recordSession appendVideoPixelBuffer:sampleBufferImage atTime:time duration:duration completion:completion]; return; } - + CVPixelBufferRef pixelBuffer = [recordSession createPixelBuffer]; - + if (pixelBuffer == nil) { completion(NO); return; } - + CIImage *image = [CIImage imageWithCVPixelBuffer:sampleBufferImage]; CFTimeInterval seconds = CMTimeGetSeconds(time); - + if (transformFilter != nil) { image = [transformFilter imageByProcessingImage:image atTime:seconds]; } - + if (filterGroup != nil) { image = [filterGroup imageByProcessingImage:image atTime:seconds]; } - + CVPixelBufferLockBaseAddress(pixelBuffer, 0); - + [_context render:image toCVPixelBuffer:pixelBuffer]; - + [recordSession appendVideoPixelBuffer:pixelBuffer atTime:time duration:duration completion:^(BOOL success) { CVPixelBufferUnlockBaseAddress(pixelBuffer, 0); - + CVPixelBufferRelease(pixelBuffer); - + completion(success); }]; } @@ -651,7 +654,7 @@ - (void)appendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer toRecordSession: - (void)captureOutput:(AVCaptureFileOutput *)captureOutput didStartRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections { dispatch_async(_sessionQueue, ^{ [_session notifyMovieFileOutputIsReady]; - + if (!_isRecording) { [self pause:_pauseCompletionHandler]; } @@ -660,7 +663,7 @@ - (void)captureOutput:(AVCaptureFileOutput *)captureOutput didStartRecordingToOu - (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error { _isRecording = NO; - + dispatch_async(_sessionQueue, ^{ BOOL hasComplete = NO; NSError *actualError = error; @@ -668,31 +671,31 @@ - (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToO actualError = nil; hasComplete = YES; } - + [_session appendRecordSegmentUrl:outputFileURL info:[self _createSegmentInfo] error:actualError completionHandler:^(SCRecordSessionSegment *segment, NSError *error) { void (^pauseCompletionHandler)() = _pauseCompletionHandler; _pauseCompletionHandler = nil; - + SCRecordSession *recordSession = _session; - + if (recordSession != nil) { id delegate = self.delegate; if ([delegate respondsToSelector:@selector(recorder:didCompleteSegment:inSession:error:)]) { [delegate recorder:self didCompleteSegment:segment inSession:recordSession error:error]; } - + if (hasComplete || (CMTIME_IS_VALID(_maxRecordDuration) && CMTIME_COMPARE_INLINE(recordSession.duration, >=, _maxRecordDuration))) { if ([delegate respondsToSelector:@selector(recorder:didCompleteSession:)]) { [delegate recorder:self didCompleteSession:recordSession]; } } } - + if (pauseCompletionHandler != nil) { pauseCompletionHandler(); } }]; - + if (_isRecording) { [self record]; } @@ -860,11 +863,11 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CM - (NSDictionary *)_createSegmentInfo { id delegate = self.delegate; NSDictionary *segmentInfo = nil; - + if ([delegate respondsToSelector:@selector(createSegmentInfoForRecorder:)]) { segmentInfo = [delegate createSegmentInfoForRecorder:self]; } - + return segmentInfo; } @@ -872,11 +875,11 @@ - (void)_focusDidComplete { id delegate = self.delegate; [self setAdjustingFocus:NO]; - + if ([delegate respondsToSelector:@selector(recorderDidEndFocus:)]) { [delegate recorderDidEndFocus:self]; } - + if (_needsSwitchBackToContinuousFocus) { _needsSwitchBackToContinuousFocus = NO; [self continuousFocusAtPoint:self.focusPointOfInterest]; @@ -885,12 +888,12 @@ - (void)_focusDidComplete { - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context { id delegate = self.delegate; - + if (context == SCRecorderFocusContext) { BOOL isFocusing = [[change objectForKey:NSKeyValueChangeNewKey] boolValue]; if (isFocusing) { [self setAdjustingFocus:YES]; - + if ([delegate respondsToSelector:@selector(recorderDidStartFocus:)]) { [delegate recorderDidStartFocus:self]; } @@ -899,13 +902,13 @@ - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(N } } else if (context == SCRecorderExposureContext) { BOOL isAdjustingExposure = [[change objectForKey:NSKeyValueChangeNewKey] boolValue]; - + [self setAdjustingExposure:isAdjustingExposure]; if (isAdjustingExposure) { if ([delegate respondsToSelector:@selector(recorderDidStartAdjustingExposure:)]) { [delegate recorderDidStartAdjustingExposure:self]; - } + } } else { if ([delegate respondsToSelector:@selector(recorderDidEndAdjustingExposure:)]) { [delegate recorderDidEndAdjustingExposure:self]; @@ -963,7 +966,7 @@ - (void)_configureFrontCameraMirroring:(BOOL)videoMirrored { - (void)configureDevice:(AVCaptureDevice*)newDevice mediaType:(NSString*)mediaType error:(NSError**)error { AVCaptureDeviceInput *currentInput = [self currentDeviceInputForMediaType:mediaType]; AVCaptureDevice *currentUsedDevice = currentInput.device; - + if (currentUsedDevice != newDevice) { if ([mediaType isEqualToString:AVMediaTypeVideo]) { NSError *error; @@ -972,7 +975,7 @@ - (void)configureDevice:(AVCaptureDevice*)newDevice mediaType:(NSString*)mediaTy newDevice.smoothAutoFocusEnabled = YES; } newDevice.subjectAreaChangeMonitoringEnabled = true; - + if (newDevice.isLowLightBoostSupported) { newDevice.automaticallyEnablesLowLightBoostWhenAvailable = YES; } @@ -984,13 +987,13 @@ - (void)configureDevice:(AVCaptureDevice*)newDevice mediaType:(NSString*)mediaTy } else { _audioInputAdded = NO; } - + AVCaptureDeviceInput *newInput = nil; - + if (newDevice != nil) { newInput = [[AVCaptureDeviceInput alloc] initWithDevice:newDevice error:error]; } - + if (*error == nil) { if (currentInput != nil) { [_captureSession removeInput:currentInput]; @@ -998,17 +1001,17 @@ - (void)configureDevice:(AVCaptureDevice*)newDevice mediaType:(NSString*)mediaTy [self removeVideoObservers:currentInput.device]; } } - + if (newInput != nil) { if ([_captureSession canAddInput:newInput]) { [_captureSession addInput:newInput]; if ([newInput.device hasMediaType:AVMediaTypeVideo]) { _videoInputAdded = YES; - + [self addVideoObservers:newInput.device]; [self _configureVideoStabilization]; [self _configureFrontCameraMirroring:_mirrorOnFrontCamera && newInput.device.position == AVCaptureDevicePositionFront]; - + } else { _audioInputAdded = YES; } @@ -1023,7 +1026,7 @@ - (void)configureDevice:(AVCaptureDevice*)newDevice mediaType:(NSString*)mediaTy - (void)reconfigureVideoInput:(BOOL)shouldConfigureVideo audioInput:(BOOL)shouldConfigureAudio { if (_captureSession != nil) { [self beginConfiguration]; - + NSError *videoError = nil; if (shouldConfigureVideo) { [self configureDevice:[self videoDevice] mediaType:AVMediaTypeVideo error:&videoError]; @@ -1032,15 +1035,15 @@ - (void)reconfigureVideoInput:(BOOL)shouldConfigureVideo audioInput:(BOOL)should [self updateVideoOrientation]; }); } - + NSError *audioError = nil; - + if (shouldConfigureAudio) { [self configureDevice:[self audioDevice] mediaType:AVMediaTypeAudio error:&audioError]; } - + [self commitConfiguration]; - + id delegate = self.delegate; if (shouldConfigureAudio) { if ([delegate respondsToSelector:@selector(recorder:didReconfigureAudioInput:)]) { @@ -1088,7 +1091,7 @@ - (void)mediaServicesWereLost:(NSNotification *)notification { - (void)sessionInterrupted:(NSNotification *)notification { NSNumber *interruption = [notification.userInfo objectForKey:AVAudioSessionInterruptionOptionKey]; - + if (interruption != nil) { AVAudioSessionInterruptionOptions options = interruption.unsignedIntValue; if (options == AVAudioSessionInterruptionOptionShouldResume) { @@ -1113,12 +1116,12 @@ - (void)_applyPointOfInterest:(CGPoint)point continuousMode:(BOOL)continuousMode AVCaptureFocusMode focusMode = continuousMode ? AVCaptureFocusModeContinuousAutoFocus : AVCaptureFocusModeAutoFocus; AVCaptureExposureMode exposureMode = continuousMode ? AVCaptureExposureModeContinuousAutoExposure : AVCaptureExposureModeAutoExpose; AVCaptureWhiteBalanceMode whiteBalanceMode = continuousMode ? AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance : AVCaptureWhiteBalanceModeAutoWhiteBalance; - + NSError *error; if ([device lockForConfiguration:&error]) { BOOL focusing = NO; BOOL adjustingExposure = NO; - + if (device.isFocusPointOfInterestSupported) { device.focusPointOfInterest = point; } @@ -1126,36 +1129,36 @@ - (void)_applyPointOfInterest:(CGPoint)point continuousMode:(BOOL)continuousMode device.focusMode = focusMode; focusing = YES; } - + if (device.isExposurePointOfInterestSupported) { device.exposurePointOfInterest = point; } - + if ([device isExposureModeSupported:exposureMode]) { device.exposureMode = exposureMode; adjustingExposure = YES; } - + if ([device isWhiteBalanceModeSupported:whiteBalanceMode]) { device.whiteBalanceMode = whiteBalanceMode; } - + device.subjectAreaChangeMonitoringEnabled = !continuousMode; [device unlockForConfiguration]; - + id delegate = self.delegate; if (focusMode != AVCaptureFocusModeContinuousAutoFocus && focusing) { if ([delegate respondsToSelector:@selector(recorderWillStartFocus:)]) { [delegate recorderWillStartFocus:self]; } - + [self setAdjustingFocus:YES]; } - + if (exposureMode != AVCaptureExposureModeContinuousAutoExposure && adjustingExposure) { [self setAdjustingExposure:YES]; - + if ([delegate respondsToSelector:@selector(recorderWillStartAdjustingExposure:)]) { [delegate recorderWillStartAdjustingExposure:self]; } @@ -1213,7 +1216,7 @@ - (AVCaptureDeviceInput*)currentDeviceInputForMediaType:(NSString*)mediaType { return deviceInput; } } - + return nil; } @@ -1221,7 +1224,7 @@ - (AVCaptureDevice*)audioDevice { if (!self.audioConfiguration.enabled) { return nil; } - + return [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio]; } @@ -1229,16 +1232,16 @@ - (AVCaptureDevice*)videoDevice { if (!self.videoConfiguration.enabled) { return nil; } - + return [SCRecorderTools videoDeviceForPosition:_device]; } - (AVCaptureVideoOrientation)actualVideoOrientation { AVCaptureVideoOrientation videoOrientation = _videoOrientation; - + if (_autoSetVideoOrientation) { UIDeviceOrientation deviceOrientation = [[UIDevice currentDevice] orientation]; - + switch (deviceOrientation) { case UIDeviceOrientationLandscapeLeft: videoOrientation = AVCaptureVideoOrientationLandscapeRight; @@ -1256,7 +1259,7 @@ - (AVCaptureVideoOrientation)actualVideoOrientation { break; } } - + return videoOrientation; } @@ -1266,12 +1269,12 @@ - (AVCaptureSession*)captureSession { - (void)setPreviewView:(UIView *)previewView { [_previewLayer removeFromSuperlayer]; - + _previewView = previewView; - + if (_previewView != nil) { [_previewView.layer insertSublayer:_previewLayer atIndex:0]; - + [self previewViewFrameChanged]; } } @@ -1290,7 +1293,7 @@ - (void)setPhotoOutputSettings:(NSDictionary *)photoOutputSettings { - (void)setDevice:(AVCaptureDevicePosition)device { [self willChangeValueForKey:@"device"]; - + _device = device; if (_resetZoomOnChangeDevice) { self.videoZoomFactor = 1; @@ -1298,14 +1301,14 @@ - (void)setDevice:(AVCaptureDevicePosition)device { if (_captureSession != nil) { [self reconfigureVideoInput:self.videoConfiguration.enabled audioInput:NO]; } - + [self didChangeValueForKey:@"device"]; } - (void)setFlashMode:(SCFlashMode)flashMode { AVCaptureDevice *currentDevice = [self videoDevice]; NSError *error = nil; - + if (currentDevice.hasFlash) { if ([currentDevice lockForConfiguration:&error]) { if (flashMode == SCFlashModeLight) { @@ -1323,18 +1326,18 @@ - (void)setFlashMode:(SCFlashMode)flashMode { [currentDevice setFlashMode:(AVCaptureFlashMode)flashMode]; } } - + [currentDevice unlockForConfiguration]; } } else { error = [SCRecorder createError:@"Current device does not support flash"]; } - + id delegate = self.delegate; if ([delegate respondsToSelector:@selector(recorder:didChangeFlashMode:error:)]) { [delegate recorder:self didChangeFlashMode:flashMode error:error]; } - + if (error == nil) { _flashMode = flashMode; } @@ -1355,7 +1358,7 @@ - (BOOL)isPrepared { - (void)setCaptureSessionPreset:(NSString *)sessionPreset { _captureSessionPreset = sessionPreset; - + if (_captureSession != nil) { [self _reconfigureSession]; _captureSessionPreset = _captureSession.sessionPreset; @@ -1371,9 +1374,9 @@ - (void)setSession:(SCRecordSession *)recordSession { if (_session != recordSession) { dispatch_sync(_sessionQueue, ^{ _session.recorder = nil; - + _session = recordSession; - + recordSession.recorder = self; }); } @@ -1390,9 +1393,9 @@ - (BOOL)isAdjustingFocus { - (void)setAdjustingExposure:(BOOL)adjustingExposure { if (_isAdjustingExposure != adjustingExposure) { [self willChangeValueForKey:@"isAdjustingExposure"]; - + _isAdjustingExposure = adjustingExposure; - + [self didChangeValueForKey:@"isAdjustingExposure"]; } } @@ -1400,9 +1403,9 @@ - (void)setAdjustingExposure:(BOOL)adjustingExposure { - (void)setAdjustingFocus:(BOOL)adjustingFocus { if (_adjustingFocus != adjustingFocus) { [self willChangeValueForKey:@"isAdjustingFocus"]; - + _adjustingFocus = adjustingFocus; - + [self didChangeValueForKey:@"isAdjustingFocus"]; } } @@ -1415,15 +1418,15 @@ - (AVCaptureConnection*)videoConnection { } } } - + return nil; } - (CMTimeScale)frameRate { AVCaptureDeviceInput * deviceInput = [self currentVideoDeviceInput]; - + CMTimeScale framerate = 0; - + if (deviceInput != nil) { if ([deviceInput.device respondsToSelector:@selector(activeVideoMaxFrameDuration)]) { framerate = deviceInput.device.activeVideoMaxFrameDuration.timescale; @@ -1435,19 +1438,19 @@ - (CMTimeScale)frameRate { #pragma clang diagnostic pop } } - + return framerate; } - (void)setFrameRate:(CMTimeScale)framePerSeconds { CMTime fps = CMTimeMake(1, framePerSeconds); - + AVCaptureDevice * device = [self videoDevice]; - + if (device != nil) { NSError * error = nil; BOOL formatSupported = [SCRecorderTools formatInRange:device.activeFormat frameRate:framePerSeconds]; - + if (formatSupported) { if ([device respondsToSelector:@selector(activeVideoMinFrameDuration)]) { if ([device lockForConfiguration:&error]) { @@ -1488,12 +1491,12 @@ - (BOOL)setActiveFormatWithFrameRate:(CMTimeScale)frameRate width:(int)width and CMVideoDimensions dimensions; dimensions.width = width; dimensions.height = height; - + BOOL foundSupported = NO; - + if (device != nil) { AVCaptureDeviceFormat *bestFormat = nil; - + for (AVCaptureDeviceFormat *format in device.formats) { if ([SCRecorderTools formatInRange:format frameRate:frameRate dimensions:dimensions]) { if (bestFormat == nil) { @@ -1501,7 +1504,7 @@ - (BOOL)setActiveFormatWithFrameRate:(CMTimeScale)frameRate width:(int)width and } else { CMVideoDimensions bestDimensions = CMVideoFormatDescriptionGetDimensions(bestFormat.formatDescription); CMVideoDimensions currentDimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription); - + if (currentDimensions.width < bestDimensions.width && currentDimensions.height < bestDimensions.height) { bestFormat = format; } else if (currentDimensions.width == bestDimensions.width && currentDimensions.height == bestDimensions.height) { @@ -1512,17 +1515,17 @@ - (BOOL)setActiveFormatWithFrameRate:(CMTimeScale)frameRate width:(int)width and } } } - + if (bestFormat != nil) { if ([device lockForConfiguration:error]) { CMTime frameDuration = CMTimeMake(1, frameRate); - + device.activeFormat = bestFormat; foundSupported = true; - + device.activeVideoMinFrameDuration = frameDuration; device.activeVideoMaxFrameDuration = frameDuration; - + [device unlockForConfiguration]; } } else { @@ -1535,24 +1538,24 @@ - (BOOL)setActiveFormatWithFrameRate:(CMTimeScale)frameRate width:(int)width and *error = [SCRecorder createError:@"The camera must be initialized before setting active format"]; } } - + if (foundSupported && error != nil) { *error = nil; } - + return foundSupported; } - (CGFloat)ratioRecorded { CGFloat ratio = 0; - + if (CMTIME_IS_VALID(_maxRecordDuration)) { Float64 maxRecordDuration = CMTimeGetSeconds(_maxRecordDuration); Float64 recordedTime = CMTimeGetSeconds(_session.duration); - + ratio = (CGFloat)(recordedTime / maxRecordDuration); } - + return ratio; } @@ -1585,11 +1588,11 @@ - (void)setKeepMirroringOnWrite:(BOOL)keepMirroringOnWrite { - (CGFloat)videoZoomFactor { AVCaptureDevice *device = [self videoDevice]; - + if ([device respondsToSelector:@selector(videoZoomFactor)]) { return device.videoZoomFactor; } - + return 1; } @@ -1604,7 +1607,7 @@ - (CGFloat)maxVideoZoomFactorForDevice:(AVCaptureDevicePosition)devicePosition - (void)setVideoZoomFactor:(CGFloat)videoZoomFactor { AVCaptureDevice *device = [self videoDevice]; - + if ([device respondsToSelector:@selector(videoZoomFactor)]) { NSError *error; if ([device lockForConfiguration:&error]) { @@ -1613,7 +1616,7 @@ - (void)setVideoZoomFactor:(CGFloat)videoZoomFactor { } else { NSLog(@"Unable to set videoZoom: (max %f, asked %f)", device.activeFormat.videoMaxZoomFactor, videoZoomFactor); } - + [device unlockForConfiguration]; } else { NSLog(@"Unable to set videoZoom: %@", error.localizedDescription); @@ -1624,7 +1627,7 @@ - (void)setVideoZoomFactor:(CGFloat)videoZoomFactor { - (void)setFastRecordMethodEnabled:(BOOL)fastRecordMethodEnabled { if (_fastRecordMethodEnabled != fastRecordMethodEnabled) { _fastRecordMethodEnabled = fastRecordMethodEnabled; - + [self _reconfigureSession]; } } @@ -1642,7 +1645,7 @@ + (SCRecorder *)sharedRecorder { dispatch_once(&onceToken, ^{ _sharedRecorder = [SCRecorder new]; }); - + return _sharedRecorder; } From a4c320e108b4dde4a6304b14d992854a0b3ce882 Mon Sep 17 00:00:00 2001 From: gaberoze Date: Tue, 17 May 2016 21:51:54 -0400 Subject: [PATCH 02/55] added export at URL --- Library/Sources/SCRecordSession.h | 4 + Library/Sources/SCRecordSession.m | 253 ++++++++++++++++++------------ 2 files changed, 156 insertions(+), 101 deletions(-) diff --git a/Library/Sources/SCRecordSession.h b/Library/Sources/SCRecordSession.h index da4c7147..d4b5295a 100644 --- a/Library/Sources/SCRecordSession.h +++ b/Library/Sources/SCRecordSession.h @@ -184,6 +184,10 @@ extern NSString *__nonnull const SCRecordSessionDocumentDirectory; */ - (AVAssetExportSession *__nullable)mergeSegmentsUsingPreset:(NSString *__nonnull)exportSessionPreset completionHandler:(void(^__nonnull)(NSURL *__nullable outputUrl, NSError *__nullable error))completionHandler; +- (AVAssetExportSession *)mergeSegmentsUsingPreset:(NSString *)exportSessionPreset + atURL:(NSURL *)url + completionHandler:(void(^)(NSURL *outputUrl, NSError *error))completionHandler; + /** Returns an asset representing all the record segments from this record session. This can be called anytime. diff --git a/Library/Sources/SCRecordSession.m b/Library/Sources/SCRecordSession.m index f45dfec4..6c0c849b 100644 --- a/Library/Sources/SCRecordSession.m +++ b/Library/Sources/SCRecordSession.m @@ -30,17 +30,17 @@ @implementation SCRecordSession - (id)initWithDictionaryRepresentation:(NSDictionary *)dictionaryRepresentation { self = [self init]; - + if (self) { NSString *directory = dictionaryRepresentation[SCRecordSessionDirectoryKey]; if (directory != nil) { _segmentsDirectory = directory; } - + NSArray *recordSegments = [dictionaryRepresentation objectForKey:SCRecordSessionSegmentFilenamesKey]; - + BOOL shouldRecomputeDuration = NO; - + // OLD WAY for (NSObject *recordSegment in recordSegments) { NSString *filename = nil; @@ -52,9 +52,9 @@ - (id)initWithDictionaryRepresentation:(NSDictionary *)dictionaryRepresentation // EVEN OLDER WAY filename = (NSString *)recordSegment; } - + NSURL *url = [SCRecordSessionSegment segmentURLForFilename:filename andDirectory:_segmentsDirectory]; - + if ([[NSFileManager defaultManager] fileExistsAtPath:url.path]) { [_segments addObject:[SCRecordSessionSegment segmentWithURL:url info:info]]; } else { @@ -62,12 +62,12 @@ - (id)initWithDictionaryRepresentation:(NSDictionary *)dictionaryRepresentation shouldRecomputeDuration = YES; } } - + // NEW WAY NSArray *segments = [dictionaryRepresentation objectForKey:SCRecordSessionSegmentsKey]; for (NSDictionary *segmentDictRepresentation in segments) { SCRecordSessionSegment *segment = [[SCRecordSessionSegment alloc] initWithDictionaryRepresentation:segmentDictRepresentation directory:_segmentsDirectory]; - + if (segment.fileUrlExists) { [_segments addObject:segment]; } else { @@ -76,38 +76,38 @@ - (id)initWithDictionaryRepresentation:(NSDictionary *)dictionaryRepresentation } } - + _currentSegmentCount = (int)_segments.count; - + NSNumber *recordDuration = [dictionaryRepresentation objectForKey:SCRecordSessionDurationKey]; if (recordDuration != nil) { _segmentsDuration = CMTimeMakeWithSeconds(recordDuration.doubleValue, 10000); } else { shouldRecomputeDuration = YES; } - + if (shouldRecomputeDuration) { _segmentsDuration = self.assetRepresentingSegments.duration; - + if (CMTIME_IS_INVALID(_segmentsDuration)) { NSLog(@"Unable to set the segments duration: one or most input assets are invalid"); NSLog(@"The imported SCRecordSession is probably not useable."); } } - + _identifier = [dictionaryRepresentation objectForKey:SCRecordSessionIdentifierKey]; _date = [dictionaryRepresentation objectForKey:SCRecordSessionDateKey]; } - + return self; } - (id)init { self = [super init]; - + if (self) { _segments = [[NSMutableArray alloc] init]; - + _assetWriter = nil; _videoInput = nil; _audioInput = nil; @@ -123,20 +123,20 @@ - (id)init { _identifier = [NSString stringWithFormat:@"%@-", [SCRecordSession newIdentifier:12]]; _audioQueue = dispatch_queue_create("me.corsin.SCRecorder.Audio", nil); } - + return self; } + (NSString *)newIdentifier:(NSUInteger)length { static NSString *letters = @"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"; - + NSMutableString *randomString = [NSMutableString stringWithCapacity:length]; - + for (int i = 0; i < length; i++) { [randomString appendFormat: @"%C", [letters characterAtIndex: arc4random_uniform((u_int32_t)[letters length])]]; } - + return randomString; } @@ -154,7 +154,7 @@ + (NSError*)createError:(NSString*)errorDescription { - (void)dispatchSyncOnSessionQueue:(void(^)())block { SCRecorder *recorder = self.recorder; - + if (recorder == nil || [SCRecorder isSessionQueue]) { block(); } else { @@ -180,9 +180,9 @@ - (void)removeSegmentAtIndex:(NSInteger)segmentIndex deleteFile:(BOOL)deleteFile [self dispatchSyncOnSessionQueue:^{ SCRecordSessionSegment *segment = [_segments objectAtIndex:segmentIndex]; [_segments removeObjectAtIndex:segmentIndex]; - + CMTime segmentDuration = segment.duration; - + if (CMTIME_IS_VALID(segmentDuration)) { // NSLog(@"Removed duration of %fs", CMTimeGetSeconds(segmentDuration)); _segmentsDuration = CMTimeSubtract(_segmentsDuration, segmentDuration); @@ -195,7 +195,7 @@ - (void)removeSegmentAtIndex:(NSInteger)segmentIndex deleteFile:(BOOL)deleteFile } _segmentsDuration = newDuration; } - + if (deleteFile) { [segment deleteFile]; } @@ -223,14 +223,14 @@ - (void)removeAllSegments:(BOOL)removeFiles { } [_segments removeObjectAtIndex:0]; } - + _segmentsDuration = kCMTimeZero; }]; } - (NSString*)_suggestedFileType { NSString *fileType = self.fileType; - + if (fileType == nil) { SCRecorder *recorder = self.recorder; if (recorder.videoEnabledAndReady) { @@ -239,23 +239,23 @@ - (NSString*)_suggestedFileType { fileType = AVFileTypeAppleM4A; } } - + return fileType; } - (NSString *)_suggestedFileExtension { NSString *extension = self.fileExtension; - + if (extension != nil) { return extension; } - + NSString *fileType = [self _suggestedFileType]; - + if (fileType == nil) { return nil; } - + if ([fileType isEqualToString:AVFileTypeMPEG4]) { return @"mp4"; } else if ([fileType isEqualToString:AVFileTypeAppleM4A]) { @@ -269,7 +269,7 @@ - (NSString *)_suggestedFileExtension { } else if ([fileType isEqualToString:AVFileTypeMPEGLayer3]) { return @"mp3"; } - + return nil; } @@ -279,11 +279,11 @@ - (NSURL *)nextFileURL:(NSError **)error { if (extension != nil) { NSString *filename = [NSString stringWithFormat:@"%@SCVideo.%d.%@", _identifier, _currentSegmentCount, extension]; NSURL *file = [SCRecordSessionSegment segmentURLForFilename:filename andDirectory:self.segmentsDirectory]; - + [self removeFile:file]; - + _currentSegmentCount++; - + return file; } else { @@ -298,12 +298,12 @@ - (NSURL *)nextFileURL:(NSError **)error { - (AVAssetWriter *)createWriter:(NSError **)error { NSError *theError = nil; AVAssetWriter *writer = nil; - + NSString *fileType = [self _suggestedFileType]; - + if (fileType != nil) { NSURL *file = [self nextFileURL:&theError]; - + if (file != nil) { writer = [[AVAssetWriter alloc] initWithURL:file fileType:fileType error:&theError]; writer.metadata = [SCRecorderTools assetWriterMetadata]; @@ -311,10 +311,10 @@ - (AVAssetWriter *)createWriter:(NSError **)error { } else { theError = [SCRecordSession createError:@"No fileType has been set in the SCRecordSession"]; } - + if (theError == nil) { writer.shouldOptimizeForNetworkUse = YES; - + if (_videoInput != nil) { if ([writer canAddInput:_videoInput]) { [writer addInput:_videoInput]; @@ -322,7 +322,7 @@ - (AVAssetWriter *)createWriter:(NSError **)error { theError = [SCRecordSession createError:@"Cannot add videoInput to the assetWriter with the currently applied settings"]; } } - + if (_audioInput != nil) { if ([writer canAddInput:_audioInput]) { [writer addInput:_audioInput]; @@ -330,7 +330,7 @@ - (AVAssetWriter *)createWriter:(NSError **)error { theError = [SCRecordSession createError:@"Cannot add audioInput to the assetWriter with the currently applied settings"]; } } - + if ([writer startWriting]) { // NSLog(@"Starting session at %fs", CMTimeGetSeconds(_lastTime)); _timeOffset = kCMTimeZero; @@ -343,18 +343,18 @@ - (AVAssetWriter *)createWriter:(NSError **)error { writer = nil; } } - + if (error != nil) { *error = theError; } - + return writer; } - (void)deinitialize { [self dispatchSyncOnSessionQueue:^{ [self endSegmentWithInfo:nil completionHandler:nil]; - + _audioConfiguration = nil; _videoConfiguration = nil; _audioInitializationFailed = NO; @@ -372,22 +372,22 @@ - (void)initializeVideo:(NSDictionary *)videoSettings formatDescription:(CMForma _videoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings sourceFormatHint:formatDescription]; _videoInput.expectsMediaDataInRealTime = YES; _videoInput.transform = _videoConfiguration.affineTransform; - + CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription); - + NSDictionary *pixelBufferAttributes = @{ (id)kCVPixelBufferPixelFormatTypeKey : [NSNumber numberWithInt:kCVPixelFormatType_32BGRA], (id)kCVPixelBufferWidthKey : [NSNumber numberWithInt:dimensions.width], (id)kCVPixelBufferHeightKey : [NSNumber numberWithInt:dimensions.height] }; - + _videoPixelBufferAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:_videoInput sourcePixelBufferAttributes:pixelBufferAttributes]; } @catch (NSException *exception) { theError = [SCRecordSession createError:exception.reason]; } - + _videoInitializationFailed = theError != nil; - + if (error != nil) { *error = theError; } @@ -402,9 +402,9 @@ - (void)initializeAudio:(NSDictionary *)audioSettings formatDescription:(CMForma } @catch (NSException *exception) { theError = [SCRecordSession createError:exception.reason]; } - + _audioInitializationFailed = theError != nil; - + if (error != nil) { *error = theError; } @@ -432,13 +432,13 @@ - (CMSampleBufferRef)adjustBuffer:(CMSampleBufferRef)sample withTimeOffset:(CMTi CMSampleBufferGetSampleTimingInfoArray(sample, 0, nil, &count); CMSampleTimingInfo *pInfo = malloc(sizeof(CMSampleTimingInfo) * count); CMSampleBufferGetSampleTimingInfoArray(sample, count, pInfo, &count); - + for (CMItemCount i = 0; i < count; i++) { pInfo[i].decodeTimeStamp = CMTimeSubtract(pInfo[i].decodeTimeStamp, offset); pInfo[i].presentationTimeStamp = CMTimeSubtract(pInfo[i].presentationTimeStamp, offset); pInfo[i].duration = duration; } - + CMSampleBufferRef sout; CMSampleBufferCreateCopyWithNewTiming(nil, sample, count, pInfo, &sout); free(pInfo); @@ -474,14 +474,14 @@ - (void)_destroyAssetWriter { - (void)appendRecordSegmentUrl:(NSURL *)url info:(NSDictionary *)info error:(NSError *)error completionHandler:(void (^)(SCRecordSessionSegment *, NSError *))completionHandler { [self dispatchSyncOnSessionQueue:^{ SCRecordSessionSegment *segment = nil; - + if (error == nil) { segment = [SCRecordSessionSegment segmentWithURL:url info:info]; [self addSegment:segment]; } - + [self _destroyAssetWriter]; - + dispatch_async(dispatch_get_main_queue(), ^{ if (completionHandler != nil) { completionHandler(segment, error); @@ -492,24 +492,24 @@ - (void)appendRecordSegmentUrl:(NSURL *)url info:(NSDictionary *)info error:(NSE - (BOOL)endSegmentWithInfo:(NSDictionary *)info completionHandler:(void(^)(SCRecordSessionSegment *segment, NSError* error))completionHandler { __block BOOL success = NO; - + [self dispatchSyncOnSessionQueue:^{ dispatch_sync(_audioQueue, ^{ if (_recordSegmentReady) { _recordSegmentReady = NO; success = YES; - + AVAssetWriter *writer = _assetWriter; - + if (writer != nil) { BOOL currentSegmentEmpty = (!_currentSegmentHasVideo && !_currentSegmentHasAudio); - + if (currentSegmentEmpty) { [writer cancelWriting]; [self _destroyAssetWriter]; - + [self removeFile:writer.outputURL]; - + if (completionHandler != nil) { dispatch_async(dispatch_get_main_queue(), ^{ completionHandler(nil, nil); @@ -535,7 +535,7 @@ - (BOOL)endSegmentWithInfo:(NSDictionary *)info completionHandler:(void(^)(SCRec } }); }]; - + return success; } @@ -545,15 +545,15 @@ - (void)notifyMovieFileOutputIsReady { - (void)beginRecordSegmentUsingMovieFileOutput:(AVCaptureMovieFileOutput *)movieFileOutput error:(NSError *__autoreleasing *)error delegate:(id)delegate { NSURL *url = [self nextFileURL:error]; - + if (url != nil) { _movieFileOutput = movieFileOutput; _movieFileOutput.metadata = [SCRecorderTools assetWriterMetadata]; - + if (movieFileOutput.isRecording) { [NSException raise:@"AlreadyRecordingException" format:@"The MovieFileOutput is already recording"]; } - + _recordSegmentReady = NO; [movieFileOutput startRecordingToOutputFileURL:url recordingDelegate:delegate]; } @@ -567,18 +567,18 @@ - (AVAssetExportSession *)mergeSegmentsUsingPreset:(NSString *)exportSessionPres [self dispatchSyncOnSessionQueue:^{ fileType = [self _suggestedFileType]; - + if (fileType == nil) { error = [SCRecordSession createError:@"No output fileType was set"]; return; } - + NSString *fileExtension = [self _suggestedFileExtension]; if (fileExtension == nil) { error = [SCRecordSession createError:@"Unable to figure out a file extension"]; return; } - + NSString *filename = [NSString stringWithFormat:@"%@SCVideo-Merged.%@", _identifier, fileExtension]; outputUrl = [SCRecordSessionSegment segmentURLForFilename:filename andDirectory:_segmentsDirectory]; [self removeFile:outputUrl]; @@ -616,6 +616,57 @@ - (AVAssetExportSession *)mergeSegmentsUsingPreset:(NSString *)exportSessionPres } } +- (AVAssetExportSession *)mergeSegmentsUsingPreset:(NSString *)exportSessionPreset + atURL:(NSURL *)url + completionHandler:(void(^)(NSURL *outputUrl, NSError *error))completionHandler { + __block AVAsset *asset = nil; + __block NSError *error = nil; + __block NSString *fileType = nil; + __block NSURL *outputUrl = url; + + [self dispatchSyncOnSessionQueue:^{ + fileType = [self _suggestedFileType]; + + if (fileType == nil) { + error = [SCRecordSession createError:@"No output fileType was set"]; + return; + } + + [self removeFile:outputUrl]; + + if (_segments.count == 0) { + error = [SCRecordSession createError:@"The session does not contains any record segment"]; + } else { + asset = [self assetRepresentingSegments]; + } + }]; + + if (error != nil) { + if (completionHandler != nil) { + completionHandler(nil, error); + } + + return nil; + } else { + AVAssetExportSession *exportSession = [AVAssetExportSession exportSessionWithAsset:asset presetName:exportSessionPreset]; + exportSession.outputURL = outputUrl; + exportSession.outputFileType = fileType; + exportSession.shouldOptimizeForNetworkUse = YES; + [exportSession exportAsynchronouslyWithCompletionHandler:^{ + NSError *error = exportSession.error; + + if (completionHandler != nil) { + dispatch_async(dispatch_get_main_queue(), ^{ + completionHandler(outputUrl, error); + }); + } + }]; + + return exportSession; + + } +} + - (void)finishEndSession:(NSError*)mergeError completionHandler:(void (^)(NSError *))completionHandler { if (mergeError == nil) { [self removeAllSegments]; @@ -650,11 +701,11 @@ - (void)cancelSession:(void (^)())completionHandler { - (CVPixelBufferRef)createPixelBuffer { CVPixelBufferRef outputPixelBuffer = nil; CVReturn ret = CVPixelBufferPoolCreatePixelBuffer(NULL, [_videoPixelBufferAdaptor pixelBufferPool], &outputPixelBuffer); - + if (ret != kCVReturnSuccess) { NSLog(@"UNABLE TO CREATE PIXEL BUFFER (CVReturnError: %d)", ret); } - + return outputPixelBuffer; } @@ -663,7 +714,7 @@ - (void)appendAudioSampleBuffer:(CMSampleBufferRef)audioSampleBuffer completion: CMTime duration = CMSampleBufferGetDuration(audioSampleBuffer); CMSampleBufferRef adjustedBuffer = [self adjustBuffer:audioSampleBuffer withTimeOffset:_timeOffset andDuration:duration]; - + CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(adjustedBuffer); CMTime lastTimeAudio = CMTimeAdd(presentationTime, duration); @@ -708,7 +759,7 @@ - (void)appendVideoPixelBuffer:(CVPixelBufferRef)videoPixelBuffer atTime:(CMTime } duration = computedFrameDuration; } - + // CMTime timeVideo = _lastTimeVideo; // CMTime actualBufferDuration = duration; // @@ -723,7 +774,7 @@ - (void)appendVideoPixelBuffer:(CVPixelBufferRef)videoPixelBuffer atTime:(CMTime if ([_videoPixelBufferAdaptor appendPixelBuffer:videoPixelBuffer withPresentationTime:bufferTimestamp]) { _currentSegmentDuration = CMTimeSubtract(CMTimeAdd(bufferTimestamp, duration), _sessionStartTime); _lastTimeVideo = actualBufferTime; - + _currentSegmentHasVideo = YES; completion(YES); } else { @@ -738,7 +789,7 @@ - (void)appendVideoPixelBuffer:(CVPixelBufferRef)videoPixelBuffer atTime:(CMTime - (CMTime)_appendTrack:(AVAssetTrack *)track toCompositionTrack:(AVMutableCompositionTrack *)compositionTrack atTime:(CMTime)time withBounds:(CMTime)bounds { CMTimeRange timeRange = track.timeRange; time = CMTimeAdd(time, timeRange.start); - + if (CMTIME_IS_VALID(bounds)) { CMTime currentBounds = CMTimeAdd(time, timeRange.duration); @@ -746,20 +797,20 @@ - (CMTime)_appendTrack:(AVAssetTrack *)track toCompositionTrack:(AVMutableCompos timeRange = CMTimeRangeMake(timeRange.start, CMTimeSubtract(timeRange.duration, CMTimeSubtract(currentBounds, bounds))); } } - + if (CMTIME_COMPARE_INLINE(timeRange.duration, >, kCMTimeZero)) { NSError *error = nil; [compositionTrack insertTimeRange:timeRange ofTrack:track atTime:time error:&error]; - + if (error != nil) { NSLog(@"Failed to insert append %@ track: %@", compositionTrack.mediaType, error); } else { // NSLog(@"Inserted %@ at %fs (%fs -> %fs)", track.mediaType, CMTimeGetSeconds(time), CMTimeGetSeconds(timeRange.start), CMTimeGetSeconds(timeRange.duration)); } - + return CMTimeAdd(time, timeRange.duration); } - + return time; } @@ -771,22 +822,22 @@ - (void)appendSegmentsToComposition:(AVMutableComposition *)composition audioMix [self dispatchSyncOnSessionQueue:^{ AVMutableCompositionTrack *audioTrack = nil; AVMutableCompositionTrack *videoTrack = nil; - + int currentSegment = 0; CMTime currentTime = composition.duration; for (SCRecordSessionSegment *recordSegment in _segments) { AVAsset *asset = recordSegment.asset; - + NSArray *audioAssetTracks = [asset tracksWithMediaType:AVMediaTypeAudio]; NSArray *videoAssetTracks = [asset tracksWithMediaType:AVMediaTypeVideo]; - + CMTime maxBounds = kCMTimeInvalid; - + CMTime videoTime = currentTime; for (AVAssetTrack *videoAssetTrack in videoAssetTracks) { if (videoTrack == nil) { NSArray *videoTracks = [composition tracksWithMediaType:AVMediaTypeVideo]; - + if (videoTracks.count > 0) { videoTrack = [videoTracks firstObject]; } else { @@ -794,28 +845,28 @@ - (void)appendSegmentsToComposition:(AVMutableComposition *)composition audioMix videoTrack.preferredTransform = videoAssetTrack.preferredTransform; } } - + videoTime = [self _appendTrack:videoAssetTrack toCompositionTrack:videoTrack atTime:videoTime withBounds:maxBounds]; maxBounds = videoTime; } - + CMTime audioTime = currentTime; for (AVAssetTrack *audioAssetTrack in audioAssetTracks) { if (audioTrack == nil) { NSArray *audioTracks = [composition tracksWithMediaType:AVMediaTypeAudio]; - + if (audioTracks.count > 0) { audioTrack = [audioTracks firstObject]; } else { audioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; } } - + audioTime = [self _appendTrack:audioAssetTrack toCompositionTrack:audioTrack atTime:audioTime withBounds:maxBounds]; } - + currentTime = composition.duration; - + currentSegment++; } }]; @@ -847,7 +898,7 @@ - (AVAsset *)assetRepresentingSegments { } else { AVMutableComposition *composition = [AVMutableComposition composition]; [self appendSegmentsToComposition:composition]; - + asset = composition; } }]; @@ -889,45 +940,45 @@ - (CMTime)duration { - (NSDictionary *)dictionaryRepresentation { NSMutableArray *recordSegments = [NSMutableArray array]; - + for (SCRecordSessionSegment *recordSegment in self.segments) { [recordSegments addObject:recordSegment.dictionaryRepresentation]; } - + return @{ SCRecordSessionSegmentsKey: recordSegments, SCRecordSessionDurationKey : [NSNumber numberWithDouble:CMTimeGetSeconds(_segmentsDuration)], SCRecordSessionIdentifierKey : _identifier, - SCRecordSessionDateKey : _date, + SCRecordSessionDateKey : _date, SCRecordSessionDirectoryKey : _segmentsDirectory }; } - (NSURL *)outputUrl { NSString *fileType = [self _suggestedFileType]; - + if (fileType == nil) { return nil; } - + NSString *fileExtension = [self _suggestedFileExtension]; if (fileExtension == nil) { return nil; } - + NSString *filename = [NSString stringWithFormat:@"%@SCVideo-Merged.%@", _identifier, fileExtension]; - + return [SCRecordSessionSegment segmentURLForFilename:filename andDirectory:_segmentsDirectory]; } - (void)setSegmentsDirectory:(NSString *)segmentsDirectory { _segmentsDirectory = [segmentsDirectory copy]; - + [self dispatchSyncOnSessionQueue:^{ NSFileManager *fileManager = [NSFileManager defaultManager]; for (SCRecordSessionSegment *recordSegment in self.segments) { NSURL *newUrl = [SCRecordSessionSegment segmentURLForFilename:recordSegment.url.lastPathComponent andDirectory:_segmentsDirectory]; - + if (![newUrl isEqual:recordSegment.url]) { NSError *error = nil; if ([fileManager moveItemAtURL:recordSegment.url toURL:newUrl error:&error]) { From ada3d74fc6b872371d27f671c7273e541c95bf76 Mon Sep 17 00:00:00 2001 From: gaberoze Date: Sat, 11 Jun 2016 18:54:19 -0400 Subject: [PATCH 03/55] added a fix to prevent video from recording until first audio has been captured --- Library/Sources/SCRecorder.m | 55 +++++++++++++++++++----------------- 1 file changed, 29 insertions(+), 26 deletions(-) diff --git a/Library/Sources/SCRecorder.m b/Library/Sources/SCRecorder.m index 93448fd3..73a5d6ab 100644 --- a/Library/Sources/SCRecorder.m +++ b/Library/Sources/SCRecorder.m @@ -30,6 +30,7 @@ @interface SCRecorder() { BOOL _shouldAutoresumeRecording; BOOL _needsSwitchBackToContinuousFocus; BOOL _adjustingFocus; + BOOL _didCaptureFirstAudioBuffer; int _beginSessionConfigurationCount; double _lastAppendedVideoTime; NSTimer *_movieOutputProgressTimer; @@ -80,8 +81,8 @@ - (id)init { _lastAudioBuffer = [SCSampleBufferHolder new]; _maxRecordDuration = kCMTimeInvalid; _resetZoomOnChangeDevice = YES; - _mirrorOnFrontCamera = NO; - _automaticallyConfiguresApplicationAudioSession = YES; + _mirrorOnFrontCamera = NO; + _automaticallyConfiguresApplicationAudioSession = YES; self.device = AVCaptureDevicePositionBack; _videoConfiguration = [SCVideoConfiguration new]; @@ -303,7 +304,7 @@ - (BOOL)prepare:(NSError **)error { } AVCaptureSession *session = [[AVCaptureSession alloc] init]; - session.automaticallyConfiguresApplicationAudioSession = self.automaticallyConfiguresApplicationAudioSession; + session.automaticallyConfiguresApplicationAudioSession = self.automaticallyConfiguresApplicationAudioSession; _beginSessionConfigurationCount = 0; _captureSession = session; @@ -384,26 +385,26 @@ - (void)capturePhoto:(void(^)(NSError*, UIImage*))completionHandler { AVCaptureConnection *connection = [_photoOutput connectionWithMediaType:AVMediaTypeVideo]; if (connection != nil) { [_photoOutput captureStillImageAsynchronouslyFromConnection:connection completionHandler: - ^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) { - - if (imageDataSampleBuffer != nil && error == nil) { - NSData *jpegData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer]; - if (jpegData) { - UIImage *image = [UIImage imageWithData:jpegData]; - if (completionHandler != nil) { - completionHandler(nil, image); - } - } else { - if (completionHandler != nil) { - completionHandler([SCRecorder createError:@"Failed to create jpeg data"], nil); - } - } - } else { - if (completionHandler != nil) { - completionHandler(error, nil); - } - } - }]; + ^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) { + + if (imageDataSampleBuffer != nil && error == nil) { + NSData *jpegData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer]; + if (jpegData) { + UIImage *image = [UIImage imageWithData:jpegData]; + if (completionHandler != nil) { + completionHandler(nil, image); + } + } else { + if (completionHandler != nil) { + completionHandler([SCRecorder createError:@"Failed to create jpeg data"], nil); + } + } + } else { + if (completionHandler != nil) { + completionHandler(error, nil); + } + } + }]; } else { if (completionHandler != nil) { completionHandler([SCRecorder createError:@"Camera session not started or Photo disabled"], nil); @@ -455,6 +456,7 @@ - (void)_progressTimerFired:(NSTimer *)progressTimer { } - (void)record { + _didCaptureFirstAudioBuffer = NO; void (^block)() = ^{ _isRecording = YES; if (_movieOutput != nil && _session != nil) { @@ -612,8 +614,8 @@ - (void)appendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer toRecordSession: CMTime time = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); SCFilter *filterGroup = _videoConfiguration.filter; SCFilter *transformFilter = [self _transformFilterUsingBufferWidth:bufferWidth bufferHeight:bufferHeight mirrored: - _device == AVCaptureDevicePositionFront - ]; + _device == AVCaptureDevicePositionFront + ]; if (filterGroup == nil && transformFilter == nil) { [recordSession appendVideoPixelBuffer:sampleBufferImage atTime:time duration:duration completion:completion]; @@ -841,6 +843,7 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CM } } else if (captureOutput == _audioOutput) { _lastAudioBuffer.sampleBuffer = sampleBuffer; + _didCaptureFirstAudioBuffer = YES; // NSLog(@"AUDIO BUFFER: %fs (%fs)", CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)), CMTimeGetSeconds(CMSampleBufferGetDuration(sampleBuffer))); if (_audioConfiguration.shouldIgnore) { @@ -851,7 +854,7 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CM if (!_initializeSessionLazily || _isRecording) { SCRecordSession *recordSession = _session; if (recordSession != nil) { - if (captureOutput == _videoOutput) { + if (captureOutput == _videoOutput && _didCaptureFirstAudioBuffer) { [self _handleVideoSampleBuffer:sampleBuffer withSession:recordSession connection:connection]; } else if (captureOutput == _audioOutput) { [self _handleAudioSampleBuffer:sampleBuffer withSession:recordSession]; From 3b351976124770ce4014ef67e1ff14e1eccbe637 Mon Sep 17 00:00:00 2001 From: gaberoze Date: Tue, 14 Jun 2016 18:30:28 -0400 Subject: [PATCH 04/55] changed videlo quality to kCVPixelFormatType_420YpCbCr8BiPlanarFullRange --- Library/Sources/SCRecordSession.m | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Library/Sources/SCRecordSession.m b/Library/Sources/SCRecordSession.m index 6c0c849b..427bdbf4 100644 --- a/Library/Sources/SCRecordSession.m +++ b/Library/Sources/SCRecordSession.m @@ -376,9 +376,9 @@ - (void)initializeVideo:(NSDictionary *)videoSettings formatDescription:(CMForma CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription); NSDictionary *pixelBufferAttributes = @{ - (id)kCVPixelBufferPixelFormatTypeKey : [NSNumber numberWithInt:kCVPixelFormatType_32BGRA], - (id)kCVPixelBufferWidthKey : [NSNumber numberWithInt:dimensions.width], - (id)kCVPixelBufferHeightKey : [NSNumber numberWithInt:dimensions.height] + (id)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange), + (id)kCVPixelBufferWidthKey : @(dimensions.width), + (id)kCVPixelBufferHeightKey : @(dimensions.height) }; _videoPixelBufferAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:_videoInput sourcePixelBufferAttributes:pixelBufferAttributes]; From 610f9ce21c118bce06c96d5f6b5f31ddff610875 Mon Sep 17 00:00:00 2001 From: gaberoze Date: Wed, 15 Jun 2016 14:39:10 -0400 Subject: [PATCH 05/55] fixed video/audio sync --- Library/Sources/SCRecordSession.m | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/Library/Sources/SCRecordSession.m b/Library/Sources/SCRecordSession.m index 427bdbf4..33ae2754 100644 --- a/Library/Sources/SCRecordSession.m +++ b/Library/Sources/SCRecordSession.m @@ -376,10 +376,10 @@ - (void)initializeVideo:(NSDictionary *)videoSettings formatDescription:(CMForma CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription); NSDictionary *pixelBufferAttributes = @{ - (id)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange), - (id)kCVPixelBufferWidthKey : @(dimensions.width), - (id)kCVPixelBufferHeightKey : @(dimensions.height) - }; + (id)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange), + (id)kCVPixelBufferWidthKey : @(dimensions.width), + (id)kCVPixelBufferHeightKey : @(dimensions.height) + }; _videoPixelBufferAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:_videoInput sourcePixelBufferAttributes:pixelBufferAttributes]; } @catch (NSException *exception) { @@ -713,7 +713,13 @@ - (void)appendAudioSampleBuffer:(CMSampleBufferRef)audioSampleBuffer completion: [self _startSessionIfNeededAtTime:CMSampleBufferGetPresentationTimeStamp(audioSampleBuffer)]; CMTime duration = CMSampleBufferGetDuration(audioSampleBuffer); - CMSampleBufferRef adjustedBuffer = [self adjustBuffer:audioSampleBuffer withTimeOffset:_timeOffset andDuration:duration]; + /* Removed call to adjustBuffer - it was causing video/audio sync + * super fucking annoying - not sure why its happening but removing it fixed it so im happy with that + * GabeRoze + * */ +// CMSampleBufferRef adjustedBuffer = [self adjustBuffer:audioSampleBuffer withTimeOffset:_timeOffset andDuration:duration]; + CMSampleBufferRef adjustedBuffer; + CMSampleBufferCreateCopy(kCFAllocatorDefault, audioSampleBuffer, &adjustedBuffer); CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(adjustedBuffer); CMTime lastTimeAudio = CMTimeAdd(presentationTime, duration); From ee4df5bb64b91b2ebff89e7a70288bccdaa763c6 Mon Sep 17 00:00:00 2001 From: gaberoze Date: Wed, 15 Jun 2016 16:27:10 -0400 Subject: [PATCH 06/55] modified preventing video capture until first audio captured --- Library/Sources/SCRecorder.m | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Library/Sources/SCRecorder.m b/Library/Sources/SCRecorder.m index 73a5d6ab..454f0350 100644 --- a/Library/Sources/SCRecorder.m +++ b/Library/Sources/SCRecorder.m @@ -843,7 +843,6 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CM } } else if (captureOutput == _audioOutput) { _lastAudioBuffer.sampleBuffer = sampleBuffer; - _didCaptureFirstAudioBuffer = YES; // NSLog(@"AUDIO BUFFER: %fs (%fs)", CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)), CMTimeGetSeconds(CMSampleBufferGetDuration(sampleBuffer))); if (_audioConfiguration.shouldIgnore) { @@ -857,6 +856,7 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CM if (captureOutput == _videoOutput && _didCaptureFirstAudioBuffer) { [self _handleVideoSampleBuffer:sampleBuffer withSession:recordSession connection:connection]; } else if (captureOutput == _audioOutput) { + _didCaptureFirstAudioBuffer = YES; [self _handleAudioSampleBuffer:sampleBuffer withSession:recordSession]; } } From 961a9f81126a6e52008916d9e947bd2c00378fee Mon Sep 17 00:00:00 2001 From: gaberoze Date: Wed, 7 Jun 2017 14:55:55 -0400 Subject: [PATCH 07/55] bug fix --- Library/Sources/SCRecorder.m | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Library/Sources/SCRecorder.m b/Library/Sources/SCRecorder.m index 454f0350..30a6054c 100644 --- a/Library/Sources/SCRecorder.m +++ b/Library/Sources/SCRecorder.m @@ -1006,7 +1006,7 @@ - (void)configureDevice:(AVCaptureDevice*)newDevice mediaType:(NSString*)mediaTy } if (newInput != nil) { - if ([_captureSession canAddInput:newInput]) { + if ([_captureSession canAddInput:newInput] && _captureSession.inputs.count < 1) { [_captureSession addInput:newInput]; if ([newInput.device hasMediaType:AVMediaTypeVideo]) { _videoInputAdded = YES; From 77f1bf6501ad16163365adc7b2d7f9fd5974132d Mon Sep 17 00:00:00 2001 From: gaberoze Date: Wed, 7 Jun 2017 23:13:55 -0400 Subject: [PATCH 08/55] bug fix --- Library/Sources/SCRecorder.m | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Library/Sources/SCRecorder.m b/Library/Sources/SCRecorder.m index 30a6054c..bcdfed29 100644 --- a/Library/Sources/SCRecorder.m +++ b/Library/Sources/SCRecorder.m @@ -331,7 +331,7 @@ - (BOOL)startRunning { success = [self prepare:nil]; } - if (!_captureSession.isRunning) { + if (!_captureSession.isRunning && _beginSessionConfigurationCount == 0) { [_captureSession startRunning]; } From 53f02b6d18ab690305fa01c94575aa8aa8ae05b8 Mon Sep 17 00:00:00 2001 From: gaberoze Date: Wed, 7 Jun 2017 23:41:27 -0400 Subject: [PATCH 09/55] undid fixes cuz they broke more --- Library/Sources/SCRecorder.m | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Library/Sources/SCRecorder.m b/Library/Sources/SCRecorder.m index bcdfed29..454f0350 100644 --- a/Library/Sources/SCRecorder.m +++ b/Library/Sources/SCRecorder.m @@ -331,7 +331,7 @@ - (BOOL)startRunning { success = [self prepare:nil]; } - if (!_captureSession.isRunning && _beginSessionConfigurationCount == 0) { + if (!_captureSession.isRunning) { [_captureSession startRunning]; } @@ -1006,7 +1006,7 @@ - (void)configureDevice:(AVCaptureDevice*)newDevice mediaType:(NSString*)mediaTy } if (newInput != nil) { - if ([_captureSession canAddInput:newInput] && _captureSession.inputs.count < 1) { + if ([_captureSession canAddInput:newInput]) { [_captureSession addInput:newInput]; if ([newInput.device hasMediaType:AVMediaTypeVideo]) { _videoInputAdded = YES; From 0842a6cb62d5d918843e3e91d3084c175f8edf12 Mon Sep 17 00:00:00 2001 From: gaberoze Date: Wed, 7 Jun 2017 23:52:03 -0400 Subject: [PATCH 10/55] added check to see if configuring --- Library/Sources/SCRecorder.h | 13 +++++++++---- Library/Sources/SCRecorder.m | 4 ++++ 2 files changed, 13 insertions(+), 4 deletions(-) diff --git a/Library/Sources/SCRecorder.h b/Library/Sources/SCRecorder.h index d5346c2d..e2623fde 100644 --- a/Library/Sources/SCRecorder.h +++ b/Library/Sources/SCRecorder.h @@ -53,6 +53,11 @@ */ @property (readonly, nonatomic) BOOL isRecording; +/* + * > 0 if configuring + * */ +@property (readonly, nonatomic) int beginSessionConfigurationCount; + /** Change the flash mode on the camera */ @@ -112,7 +117,7 @@ /** The value of this property defaults to YES, causing the capture session to automatically configure the app’s shared AVAudioSession instance for optimal recording. - + If you set this property’s value to NO, your app is responsible for selecting appropriate audio session settings. Recording may fail if the audio session’s settings are incompatible with the capture session. */ @property (assign, nonatomic) BOOL automaticallyConfiguresApplicationAudioSession; @@ -180,7 +185,7 @@ /** The maximum record duration. When the record session record duration reaches this bound, the recorder will automatically pause the recording, - end the current record segment and send recorder:didCompletesession: on the + end the current record segment and send recorder:didCompletesession: on the delegate. */ @property (assign, nonatomic) CMTime maxRecordDuration; @@ -188,7 +193,7 @@ /** Whether the fast recording method should be enabled. Enabling this will disallow pretty much every features provided - by SCVideoConfiguration and SCAudioConfiguration. It will internally + by SCVideoConfiguration and SCAudioConfiguration. It will internally uses a AVCaptureMovieFileOutput that provides no settings. If you have some performance issue, you can try enabling this. Default is NO. @@ -356,7 +361,7 @@ - (void)focusCenter; /** - Refocus at the current position + Refocus at the current position */ - (void)refocus; diff --git a/Library/Sources/SCRecorder.m b/Library/Sources/SCRecorder.m index 454f0350..70117b9c 100644 --- a/Library/Sources/SCRecorder.m +++ b/Library/Sources/SCRecorder.m @@ -1393,6 +1393,10 @@ - (BOOL)isAdjustingFocus { return _adjustingFocus; } +- (int)beginSessionConfigurationCount { + return _beginSessionConfigurationCount; +} + - (void)setAdjustingExposure:(BOOL)adjustingExposure { if (_isAdjustingExposure != adjustingExposure) { [self willChangeValueForKey:@"isAdjustingExposure"]; From 23c3e75fbff01e166ef4f603627c6c056a14dd76 Mon Sep 17 00:00:00 2001 From: gaberoze Date: Thu, 8 Jun 2017 00:15:48 -0400 Subject: [PATCH 11/55] crash fixes --- Library/Sources/SCRecorder.h | 1 + Library/Sources/SCRecorder.m | 2 ++ 2 files changed, 3 insertions(+) diff --git a/Library/Sources/SCRecorder.h b/Library/Sources/SCRecorder.h index e2623fde..67478dab 100644 --- a/Library/Sources/SCRecorder.h +++ b/Library/Sources/SCRecorder.h @@ -57,6 +57,7 @@ * > 0 if configuring * */ @property (readonly, nonatomic) int beginSessionConfigurationCount; +@property (assign, nonatomic) BOOL finishedCommit; /** Change the flash mode on the camera diff --git a/Library/Sources/SCRecorder.m b/Library/Sources/SCRecorder.m index 70117b9c..05de7ac7 100644 --- a/Library/Sources/SCRecorder.m +++ b/Library/Sources/SCRecorder.m @@ -172,6 +172,7 @@ - (void)beginConfiguration { if (_captureSession != nil) { _beginSessionConfigurationCount++; if (_beginSessionConfigurationCount == 1) { + self.finishedCommit = NO; [_captureSession beginConfiguration]; } } @@ -182,6 +183,7 @@ - (void)commitConfiguration { _beginSessionConfigurationCount--; if (_beginSessionConfigurationCount == 0) { [_captureSession commitConfiguration]; + self.finishedCommit = YES; } } } From 1740a5921d52c1f264dac08849ec335d511b5cca Mon Sep 17 00:00:00 2001 From: gaberoze Date: Fri, 16 Jun 2017 17:36:00 -0400 Subject: [PATCH 12/55] crash fixes --- Library/Sources/SCRecorder.m | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/Library/Sources/SCRecorder.m b/Library/Sources/SCRecorder.m index 05de7ac7..423b3207 100644 --- a/Library/Sources/SCRecorder.m +++ b/Library/Sources/SCRecorder.m @@ -31,6 +31,7 @@ @interface SCRecorder() { BOOL _needsSwitchBackToContinuousFocus; BOOL _adjustingFocus; BOOL _didCaptureFirstAudioBuffer; + BOOL _preparing; int _beginSessionConfigurationCount; double _lastAppendedVideoTime; NSTimer *_movieOutputProgressTimer; @@ -305,6 +306,12 @@ - (BOOL)prepare:(NSError **)error { [NSException raise:@"SCCameraException" format:@"The session is already opened"]; } + if (_preparing) { + return NO; + } + + _preparing = YES; + AVCaptureSession *session = [[AVCaptureSession alloc] init]; session.automaticallyConfiguresApplicationAudioSession = self.automaticallyConfiguresApplicationAudioSession; _beginSessionConfigurationCount = 0; @@ -324,12 +331,14 @@ - (BOOL)prepare:(NSError **)error { [self commitConfiguration]; + _preparing = NO; + return success; } - (BOOL)startRunning { BOOL success = YES; - if (!self.isPrepared) { + if (!self.isPrepared && !_preparing) { success = [self prepare:nil]; } From a4ecb92d544cbff854fc72134febcafa3f169ebe Mon Sep 17 00:00:00 2001 From: gaberoze Date: Tue, 25 Jul 2017 14:46:17 -0400 Subject: [PATCH 13/55] fix for double reconfigure being called --- Library/Sources/SCRecorder.m | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/Library/Sources/SCRecorder.m b/Library/Sources/SCRecorder.m index 423b3207..02d8a1b0 100644 --- a/Library/Sources/SCRecorder.m +++ b/Library/Sources/SCRecorder.m @@ -32,6 +32,7 @@ @interface SCRecorder() { BOOL _adjustingFocus; BOOL _didCaptureFirstAudioBuffer; BOOL _preparing; + BOOL _reconfiguring; int _beginSessionConfigurationCount; double _lastAppendedVideoTime; NSTimer *_movieOutputProgressTimer; @@ -1038,7 +1039,12 @@ - (void)configureDevice:(AVCaptureDevice*)newDevice mediaType:(NSString*)mediaTy } - (void)reconfigureVideoInput:(BOOL)shouldConfigureVideo audioInput:(BOOL)shouldConfigureAudio { + if (_reconfiguring) { + return; + } + if (_captureSession != nil) { + _reconfiguring = YES; [self beginConfiguration]; NSError *videoError = nil; @@ -1069,6 +1075,8 @@ - (void)reconfigureVideoInput:(BOOL)shouldConfigureVideo audioInput:(BOOL)should [delegate recorder:self didReconfigureVideoInput:videoError]; } } + + _reconfiguring = NO; } } From 77ec374562cf798acb29568409a59a31752383cf Mon Sep 17 00:00:00 2001 From: gaberoze Date: Tue, 25 Jul 2017 16:13:35 -0400 Subject: [PATCH 14/55] fix for double reconfigure being called (crash) --- Library/Sources/SCRecorder.m | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Library/Sources/SCRecorder.m b/Library/Sources/SCRecorder.m index 02d8a1b0..fd4816c1 100644 --- a/Library/Sources/SCRecorder.m +++ b/Library/Sources/SCRecorder.m @@ -339,7 +339,7 @@ - (BOOL)prepare:(NSError **)error { - (BOOL)startRunning { BOOL success = YES; - if (!self.isPrepared && !_preparing) { + if (!self.isPrepared && !_preparing && !_reconfiguring) { success = [self prepare:nil]; } From 736c59abec2c1d1779d6d8fb3c79dc5fef17a4a1 Mon Sep 17 00:00:00 2001 From: gaberoze Date: Mon, 31 Jul 2017 19:33:43 -0400 Subject: [PATCH 15/55] bug fix --- Library/Sources/SCRecorder.m | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Library/Sources/SCRecorder.m b/Library/Sources/SCRecorder.m index fd4816c1..8a32d02b 100644 --- a/Library/Sources/SCRecorder.m +++ b/Library/Sources/SCRecorder.m @@ -1320,7 +1320,7 @@ - (void)setDevice:(AVCaptureDevicePosition)device { if (_resetZoomOnChangeDevice) { self.videoZoomFactor = 1; } - if (_captureSession != nil) { + if (_captureSession != nil && !_reconfiguring) { [self reconfigureVideoInput:self.videoConfiguration.enabled audioInput:NO]; } From 8df62e2573a9330b5689c953ee7efbdf95dc0db7 Mon Sep 17 00:00:00 2001 From: gaberoze Date: Tue, 5 Sep 2017 16:34:08 -0400 Subject: [PATCH 16/55] bug fix --- Library/Sources/SCRecordSession.h | 4 ++-- Library/Sources/SCRecordSession.m | 30 ++++++++++++++++++------------ 2 files changed, 20 insertions(+), 14 deletions(-) diff --git a/Library/Sources/SCRecordSession.h b/Library/Sources/SCRecordSession.h index d4b5295a..bb676b32 100644 --- a/Library/Sources/SCRecordSession.h +++ b/Library/Sources/SCRecordSession.h @@ -157,12 +157,12 @@ extern NSString *__nonnull const SCRecordSessionDocumentDirectory; /** Remove all the record segments and their associated files. */ -- (void)removeAllSegments; +- (void)removeAllSegments:(void(^ __nullable)())completionHandler; /** Remove all the record segments and their associated files if deleteFiles is true. */ -- (void)removeAllSegments:(BOOL)deleteFiles; +- (void)removeAllSegments:(BOOL)deleteFiles withCompletion:(void(^ __nullable)())completionHandler; /** Remove the last segment safely. Does nothing if no segment were recorded. diff --git a/Library/Sources/SCRecordSession.m b/Library/Sources/SCRecordSession.m index 33ae2754..e9072f2c 100644 --- a/Library/Sources/SCRecordSession.m +++ b/Library/Sources/SCRecordSession.m @@ -210,11 +210,13 @@ - (void)removeLastSegment { }]; } -- (void)removeAllSegments { - [self removeAllSegments:YES]; +- (void)removeAllSegments:(void(^ __nullable)())completionHandler { + [self removeAllSegments:YES + withCompletion:completionHandler]; } -- (void)removeAllSegments:(BOOL)removeFiles { +- (void)removeAllSegments:(BOOL)removeFiles + withCompletion:(void(^ __nullable)())completionHandler;{ [self dispatchSyncOnSessionQueue:^{ while (_segments.count > 0) { if (removeFiles) { @@ -225,6 +227,10 @@ - (void)removeAllSegments:(BOOL)removeFiles { } _segmentsDuration = kCMTimeZero; + + if (completionHandler) { + completionHandler(); + } }]; } @@ -669,7 +675,7 @@ - (AVAssetExportSession *)mergeSegmentsUsingPreset:(NSString *)exportSessionPres - (void)finishEndSession:(NSError*)mergeError completionHandler:(void (^)(NSError *))completionHandler { if (mergeError == nil) { - [self removeAllSegments]; + [self removeAllSegments:nil]; if (completionHandler != nil) { completionHandler(nil); } @@ -683,13 +689,13 @@ - (void)finishEndSession:(NSError*)mergeError completionHandler:(void (^)(NSErro - (void)cancelSession:(void (^)())completionHandler { [self dispatchSyncOnSessionQueue:^{ if (_assetWriter == nil) { - [self removeAllSegments]; + [self removeAllSegments:nil]; if (completionHandler != nil) { completionHandler(); } } else { [self endSegmentWithInfo:nil completionHandler:^(SCRecordSessionSegment *segment, NSError *error) { - [self removeAllSegments]; + [self removeAllSegments:nil]; if (completionHandler != nil) { completionHandler(); } @@ -952,12 +958,12 @@ - (NSDictionary *)dictionaryRepresentation { } return @{ - SCRecordSessionSegmentsKey: recordSegments, - SCRecordSessionDurationKey : [NSNumber numberWithDouble:CMTimeGetSeconds(_segmentsDuration)], - SCRecordSessionIdentifierKey : _identifier, - SCRecordSessionDateKey : _date, - SCRecordSessionDirectoryKey : _segmentsDirectory - }; + SCRecordSessionSegmentsKey: recordSegments, + SCRecordSessionDurationKey : [NSNumber numberWithDouble:CMTimeGetSeconds(_segmentsDuration)], + SCRecordSessionIdentifierKey : _identifier, + SCRecordSessionDateKey : _date, + SCRecordSessionDirectoryKey : _segmentsDirectory + }; } - (NSURL *)outputUrl { From 09661c3c0f6f75256c097e2d54e5101d8df1d045 Mon Sep 17 00:00:00 2001 From: gaberoze Date: Wed, 6 Sep 2017 14:34:42 -0400 Subject: [PATCH 17/55] updated pixel format for all the things so video looks better --- Library/Sources/SCAssetExportSession.m | 86 +++++++++++++------------- 1 file changed, 43 insertions(+), 43 deletions(-) diff --git a/Library/Sources/SCAssetExportSession.m b/Library/Sources/SCAssetExportSession.m index 87676801..a8bd74ce 100644 --- a/Library/Sources/SCAssetExportSession.m +++ b/Library/Sources/SCAssetExportSession.m @@ -48,7 +48,7 @@ @implementation SCAssetExportSession -(instancetype)init { self = [super init]; - + if (self) { _audioQueue = dispatch_queue_create("me.corsin.SCAssetExportSession.AudioQueue", nil); _videoQueue = dispatch_queue_create("me.corsin.SCAssetExportSession.VideoQueue", nil); @@ -66,11 +66,11 @@ -(instancetype)init { - (instancetype)initWithAsset:(AVAsset *)inputAsset { self = [self init]; - + if (self) { self.inputAsset = inputAsset; } - + return self; } @@ -82,11 +82,11 @@ - (void)dealloc { - (AVAssetWriterInput *)addWriter:(NSString *)mediaType withSettings:(NSDictionary *)outputSettings { AVAssetWriterInput *writer = [AVAssetWriterInput assetWriterInputWithMediaType:mediaType outputSettings:outputSettings]; - + if ([_writer canAddInput:writer]) { [_writer addInput:writer]; } - + return writer; } @@ -97,20 +97,20 @@ - (BOOL)encodePixelBuffer:(CVPixelBufferRef)pixelBuffer presentationTime:(CMTime - (SCIOPixelBuffers *)createIOPixelBuffers:(CMSampleBufferRef)sampleBuffer { CVPixelBufferRef inputPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); CMTime time = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); - + if (_outputBufferDiffersFromInput) { CVPixelBufferRef outputPixelBuffer = nil; - + CVReturn ret = CVPixelBufferPoolCreatePixelBuffer(nil, _videoPixelAdaptor.pixelBufferPool, &outputPixelBuffer); - + if (ret != kCVReturnSuccess) { NSLog(@"Unable to allocate pixelBuffer: %d", ret); return nil; } - + SCIOPixelBuffers *pixelBuffers = [SCIOPixelBuffers IOPixelBuffersWithInputPixelBuffer:inputPixelBuffer outputPixelBuffer:outputPixelBuffer time:time]; CVPixelBufferRelease(outputPixelBuffer); - + return pixelBuffers; } else { return [SCIOPixelBuffers IOPixelBuffersWithInputPixelBuffer:inputPixelBuffer outputPixelBuffer:inputPixelBuffer time:time]; @@ -147,7 +147,7 @@ - (SCIOPixelBuffers *)renderIOPixelBuffersWithCI:(SCIOPixelBuffers *)pixelBuffer outputPixelBuffers = [SCIOPixelBuffers IOPixelBuffersWithInputPixelBuffer:pixelBuffers.outputPixelBuffer outputPixelBuffer:pixelBuffers.outputPixelBuffer time:pixelBuffers.time]; } - + return outputPixelBuffers; } @@ -168,7 +168,7 @@ static CGContextRef SCCreateContextFromPixelBuffer(CVPixelBufferRef pixelBuffer) - (void)CGRenderWithInputPixelBuffer:(CVPixelBufferRef)inputPixelBuffer toOutputPixelBuffer:(CVPixelBufferRef)outputPixelBuffer atTimeInterval:(NSTimeInterval)timeSeconds { UIView *overlay = self.videoConfiguration.overlay; - + if (overlay != nil) { CGSize videoSize = CGSizeMake(CVPixelBufferGetWidth(outputPixelBuffer), CVPixelBufferGetHeight(outputPixelBuffer)); @@ -196,7 +196,7 @@ - (void)CGRenderWithInputPixelBuffer:(CVPixelBufferRef)inputPixelBuffer toOutput } [overlay.layer renderInContext:ctx]; - + CGContextRelease(ctx); }; } @@ -227,28 +227,28 @@ - (void)beginReadWriteOnVideo { SCProcessingQueue *videoReadingQueue = [SCProcessingQueue new]; __weak typeof(self) wSelf = self; - + videoReadingQueue.maxQueueSize = 2; [videoReadingQueue startProcessingWithBlock:^id{ CMSampleBufferRef sampleBuffer = [wSelf.videoOutput copyNextSampleBuffer]; SCSampleBufferHolder *holder = nil; - + if (sampleBuffer != nil) { holder = [SCSampleBufferHolder sampleBufferHolderWithSampleBuffer:sampleBuffer]; CFRelease(sampleBuffer); } - + return holder; }]; - + if (_videoPixelAdaptor != nil) { filterRenderingQueue = [SCProcessingQueue new]; filterRenderingQueue.maxQueueSize = 2; [filterRenderingQueue startProcessingWithBlock:^id{ SCIOPixelBuffers *pixelBuffers = nil; SCSampleBufferHolder *bufferHolder = [videoReadingQueue dequeue]; - + if (bufferHolder != nil) { __strong typeof(self) strongSelf = wSelf; @@ -264,20 +264,20 @@ - (void)beginReadWriteOnVideo { return pixelBuffers; }]; - + videoProcessingQueue = [SCProcessingQueue new]; videoProcessingQueue.maxQueueSize = 2; [videoProcessingQueue startProcessingWithBlock:^id{ SCIOPixelBuffers *videoBuffers = [filterRenderingQueue dequeue]; - + if (videoBuffers != nil) { [wSelf CGRenderWithInputPixelBuffer:videoBuffers.inputPixelBuffer toOutputPixelBuffer:videoBuffers.outputPixelBuffer atTimeInterval:CMTimeGetSeconds(videoBuffers.time)]; } - + return videoBuffers; }]; } - + dispatch_group_enter(_dispatchGroup); _needsLeaveVideo = YES; @@ -321,7 +321,7 @@ - (void)beginReadWriteOnVideo { shouldReadNextBuffer = NO; } } - + if (!shouldReadNextBuffer) { [filterRenderingQueue stopProcessing]; [videoProcessingQueue stopProcessing]; @@ -347,20 +347,20 @@ - (void)beginReadWriteOnAudio { BOOL shouldReadNextBuffer = YES; while (strongSelf.audioInput.isReadyForMoreMediaData && shouldReadNextBuffer && !strongSelf.cancelled) { CMSampleBufferRef audioBuffer = [strongSelf.audioOutput copyNextSampleBuffer]; - + if (audioBuffer != nil) { shouldReadNextBuffer = [strongSelf.audioInput appendSampleBuffer:audioBuffer]; - + CMTime time = CMSampleBufferGetPresentationTimeStamp(audioBuffer); - + CFRelease(audioBuffer); - + [strongSelf _didAppendToInput:strongSelf.audioInput atTime:time]; } else { shouldReadNextBuffer = NO; } } - + if (!shouldReadNextBuffer) { [strongSelf markInputComplete:strongSelf.audioInput error:nil]; if (strongSelf.needsLeaveAudio) { @@ -374,11 +374,11 @@ - (void)beginReadWriteOnAudio { - (void)_setProgress:(float)progress { [self willChangeValueForKey:@"progress"]; - + _progress = progress; - + [self didChangeValueForKey:@"progress"]; - + id delegate = self.delegate; if ([delegate respondsToSelector:@selector(assetExportSessionDidProgress:)]) { [delegate assetExportSessionDidProgress:self]; @@ -453,11 +453,11 @@ - (void)_setupPixelBufferAdaptorIfNeeded:(BOOL)needed { if (needsPixelBuffer && _videoInput != nil) { NSDictionary *pixelBufferAttributes = @{ - (id)kCVPixelBufferPixelFormatTypeKey : [NSNumber numberWithInt:kCVPixelFormatType_32BGRA], + (id)kCVPixelBufferPixelFormatTypeKey : [NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange], (id)kCVPixelBufferWidthKey : [NSNumber numberWithFloat:_outputBufferSize.width], (id)kCVPixelBufferHeightKey : [NSNumber numberWithFloat:_outputBufferSize.height] }; - + _videoPixelAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:_videoInput sourcePixelBufferAttributes:pixelBufferAttributes]; } } @@ -621,12 +621,12 @@ - (void)_setupVideoUsingTracks:(NSArray *)videoTracks { NSDictionary *settings = nil; if (_filter != nil || self.videoConfiguration.overlay != nil) { settings = @{ - (id)kCVPixelBufferPixelFormatTypeKey : [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], + (id)kCVPixelBufferPixelFormatTypeKey : [NSNumber numberWithUnsignedInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange], (id)kCVPixelBufferIOSurfacePropertiesKey : [NSDictionary dictionary] }; } else { settings = @{ - (id)kCVPixelBufferPixelFormatTypeKey : [NSNumber numberWithUnsignedInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], + (id)kCVPixelBufferPixelFormatTypeKey : [NSNumber numberWithUnsignedInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange], (id)kCVPixelBufferIOSurfacePropertiesKey : [NSDictionary dictionary] }; } @@ -659,15 +659,15 @@ - (void)exportAsynchronouslyWithCompletionHandler:(void (^)())completionHandler _cancelled = NO; _nextAllowedVideoFrame = kCMTimeZero; NSError *error = nil; - + [[NSFileManager defaultManager] removeItemAtURL:self.outputUrl error:nil]; - + _writer = [AVAssetWriter assetWriterWithURL:self.outputUrl fileType:self.outputFileType error:&error]; _writer.shouldOptimizeForNetworkUse = _shouldOptimizeForNetworkUse; _writer.metadata = [SCRecorderTools assetWriterMetadata]; EnsureSuccess(error, completionHandler); - + _reader = [AVAssetReader assetReaderWithAsset:self.inputAsset error:&error]; _reader.timeRange = _timeRange; EnsureSuccess(error, completionHandler); @@ -683,23 +683,23 @@ - (void)exportAsynchronouslyWithCompletionHandler:(void (^)())completionHandler if (![_reader startReading]) { EnsureSuccess(_reader.error, completionHandler); } - + if (![_writer startWriting]) { EnsureSuccess(_writer.error, completionHandler); } - + [_writer startSessionAtSourceTime:kCMTimeZero]; - + _totalDuration = CMTimeGetSeconds(_inputAsset.duration); [self beginReadWriteOnAudio]; [self beginReadWriteOnVideo]; - + dispatch_group_notify(_dispatchGroup, dispatch_get_main_queue(), ^{ if (_error == nil) { _error = _writer.error; } - + if (_error == nil && _writer.status != AVAssetWriterStatusCancelled) { [_writer finishWritingWithCompletionHandler:^{ _error = _writer.error; From c133c3499b12a19c5c71b2db32da82a334829679 Mon Sep 17 00:00:00 2001 From: gaberoze Date: Thu, 19 Oct 2017 15:42:49 -0400 Subject: [PATCH 18/55] video range now --- Library/Sources/SCAssetExportSession.m | 86 +++++++++++++------------- Library/Sources/SCPlayer.m | 8 +-- Library/Sources/SCRecordSession.m | 2 +- 3 files changed, 48 insertions(+), 48 deletions(-) diff --git a/Library/Sources/SCAssetExportSession.m b/Library/Sources/SCAssetExportSession.m index 87676801..66ade86d 100644 --- a/Library/Sources/SCAssetExportSession.m +++ b/Library/Sources/SCAssetExportSession.m @@ -48,7 +48,7 @@ @implementation SCAssetExportSession -(instancetype)init { self = [super init]; - + if (self) { _audioQueue = dispatch_queue_create("me.corsin.SCAssetExportSession.AudioQueue", nil); _videoQueue = dispatch_queue_create("me.corsin.SCAssetExportSession.VideoQueue", nil); @@ -66,11 +66,11 @@ -(instancetype)init { - (instancetype)initWithAsset:(AVAsset *)inputAsset { self = [self init]; - + if (self) { self.inputAsset = inputAsset; } - + return self; } @@ -82,11 +82,11 @@ - (void)dealloc { - (AVAssetWriterInput *)addWriter:(NSString *)mediaType withSettings:(NSDictionary *)outputSettings { AVAssetWriterInput *writer = [AVAssetWriterInput assetWriterInputWithMediaType:mediaType outputSettings:outputSettings]; - + if ([_writer canAddInput:writer]) { [_writer addInput:writer]; } - + return writer; } @@ -97,20 +97,20 @@ - (BOOL)encodePixelBuffer:(CVPixelBufferRef)pixelBuffer presentationTime:(CMTime - (SCIOPixelBuffers *)createIOPixelBuffers:(CMSampleBufferRef)sampleBuffer { CVPixelBufferRef inputPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); CMTime time = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); - + if (_outputBufferDiffersFromInput) { CVPixelBufferRef outputPixelBuffer = nil; - + CVReturn ret = CVPixelBufferPoolCreatePixelBuffer(nil, _videoPixelAdaptor.pixelBufferPool, &outputPixelBuffer); - + if (ret != kCVReturnSuccess) { NSLog(@"Unable to allocate pixelBuffer: %d", ret); return nil; } - + SCIOPixelBuffers *pixelBuffers = [SCIOPixelBuffers IOPixelBuffersWithInputPixelBuffer:inputPixelBuffer outputPixelBuffer:outputPixelBuffer time:time]; CVPixelBufferRelease(outputPixelBuffer); - + return pixelBuffers; } else { return [SCIOPixelBuffers IOPixelBuffersWithInputPixelBuffer:inputPixelBuffer outputPixelBuffer:inputPixelBuffer time:time]; @@ -147,7 +147,7 @@ - (SCIOPixelBuffers *)renderIOPixelBuffersWithCI:(SCIOPixelBuffers *)pixelBuffer outputPixelBuffers = [SCIOPixelBuffers IOPixelBuffersWithInputPixelBuffer:pixelBuffers.outputPixelBuffer outputPixelBuffer:pixelBuffers.outputPixelBuffer time:pixelBuffers.time]; } - + return outputPixelBuffers; } @@ -168,7 +168,7 @@ static CGContextRef SCCreateContextFromPixelBuffer(CVPixelBufferRef pixelBuffer) - (void)CGRenderWithInputPixelBuffer:(CVPixelBufferRef)inputPixelBuffer toOutputPixelBuffer:(CVPixelBufferRef)outputPixelBuffer atTimeInterval:(NSTimeInterval)timeSeconds { UIView *overlay = self.videoConfiguration.overlay; - + if (overlay != nil) { CGSize videoSize = CGSizeMake(CVPixelBufferGetWidth(outputPixelBuffer), CVPixelBufferGetHeight(outputPixelBuffer)); @@ -196,7 +196,7 @@ - (void)CGRenderWithInputPixelBuffer:(CVPixelBufferRef)inputPixelBuffer toOutput } [overlay.layer renderInContext:ctx]; - + CGContextRelease(ctx); }; } @@ -227,28 +227,28 @@ - (void)beginReadWriteOnVideo { SCProcessingQueue *videoReadingQueue = [SCProcessingQueue new]; __weak typeof(self) wSelf = self; - + videoReadingQueue.maxQueueSize = 2; [videoReadingQueue startProcessingWithBlock:^id{ CMSampleBufferRef sampleBuffer = [wSelf.videoOutput copyNextSampleBuffer]; SCSampleBufferHolder *holder = nil; - + if (sampleBuffer != nil) { holder = [SCSampleBufferHolder sampleBufferHolderWithSampleBuffer:sampleBuffer]; CFRelease(sampleBuffer); } - + return holder; }]; - + if (_videoPixelAdaptor != nil) { filterRenderingQueue = [SCProcessingQueue new]; filterRenderingQueue.maxQueueSize = 2; [filterRenderingQueue startProcessingWithBlock:^id{ SCIOPixelBuffers *pixelBuffers = nil; SCSampleBufferHolder *bufferHolder = [videoReadingQueue dequeue]; - + if (bufferHolder != nil) { __strong typeof(self) strongSelf = wSelf; @@ -264,20 +264,20 @@ - (void)beginReadWriteOnVideo { return pixelBuffers; }]; - + videoProcessingQueue = [SCProcessingQueue new]; videoProcessingQueue.maxQueueSize = 2; [videoProcessingQueue startProcessingWithBlock:^id{ SCIOPixelBuffers *videoBuffers = [filterRenderingQueue dequeue]; - + if (videoBuffers != nil) { [wSelf CGRenderWithInputPixelBuffer:videoBuffers.inputPixelBuffer toOutputPixelBuffer:videoBuffers.outputPixelBuffer atTimeInterval:CMTimeGetSeconds(videoBuffers.time)]; } - + return videoBuffers; }]; } - + dispatch_group_enter(_dispatchGroup); _needsLeaveVideo = YES; @@ -321,7 +321,7 @@ - (void)beginReadWriteOnVideo { shouldReadNextBuffer = NO; } } - + if (!shouldReadNextBuffer) { [filterRenderingQueue stopProcessing]; [videoProcessingQueue stopProcessing]; @@ -347,20 +347,20 @@ - (void)beginReadWriteOnAudio { BOOL shouldReadNextBuffer = YES; while (strongSelf.audioInput.isReadyForMoreMediaData && shouldReadNextBuffer && !strongSelf.cancelled) { CMSampleBufferRef audioBuffer = [strongSelf.audioOutput copyNextSampleBuffer]; - + if (audioBuffer != nil) { shouldReadNextBuffer = [strongSelf.audioInput appendSampleBuffer:audioBuffer]; - + CMTime time = CMSampleBufferGetPresentationTimeStamp(audioBuffer); - + CFRelease(audioBuffer); - + [strongSelf _didAppendToInput:strongSelf.audioInput atTime:time]; } else { shouldReadNextBuffer = NO; } } - + if (!shouldReadNextBuffer) { [strongSelf markInputComplete:strongSelf.audioInput error:nil]; if (strongSelf.needsLeaveAudio) { @@ -374,11 +374,11 @@ - (void)beginReadWriteOnAudio { - (void)_setProgress:(float)progress { [self willChangeValueForKey:@"progress"]; - + _progress = progress; - + [self didChangeValueForKey:@"progress"]; - + id delegate = self.delegate; if ([delegate respondsToSelector:@selector(assetExportSessionDidProgress:)]) { [delegate assetExportSessionDidProgress:self]; @@ -453,11 +453,11 @@ - (void)_setupPixelBufferAdaptorIfNeeded:(BOOL)needed { if (needsPixelBuffer && _videoInput != nil) { NSDictionary *pixelBufferAttributes = @{ - (id)kCVPixelBufferPixelFormatTypeKey : [NSNumber numberWithInt:kCVPixelFormatType_32BGRA], + (id)kCVPixelBufferPixelFormatTypeKey : [NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], (id)kCVPixelBufferWidthKey : [NSNumber numberWithFloat:_outputBufferSize.width], (id)kCVPixelBufferHeightKey : [NSNumber numberWithFloat:_outputBufferSize.height] }; - + _videoPixelAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:_videoInput sourcePixelBufferAttributes:pixelBufferAttributes]; } } @@ -621,12 +621,12 @@ - (void)_setupVideoUsingTracks:(NSArray *)videoTracks { NSDictionary *settings = nil; if (_filter != nil || self.videoConfiguration.overlay != nil) { settings = @{ - (id)kCVPixelBufferPixelFormatTypeKey : [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], + (id)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange), //old setting = kCVPixelFormatType_32BGRA (id)kCVPixelBufferIOSurfacePropertiesKey : [NSDictionary dictionary] }; } else { settings = @{ - (id)kCVPixelBufferPixelFormatTypeKey : [NSNumber numberWithUnsignedInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], + (id)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange), (id)kCVPixelBufferIOSurfacePropertiesKey : [NSDictionary dictionary] }; } @@ -659,15 +659,15 @@ - (void)exportAsynchronouslyWithCompletionHandler:(void (^)())completionHandler _cancelled = NO; _nextAllowedVideoFrame = kCMTimeZero; NSError *error = nil; - + [[NSFileManager defaultManager] removeItemAtURL:self.outputUrl error:nil]; - + _writer = [AVAssetWriter assetWriterWithURL:self.outputUrl fileType:self.outputFileType error:&error]; _writer.shouldOptimizeForNetworkUse = _shouldOptimizeForNetworkUse; _writer.metadata = [SCRecorderTools assetWriterMetadata]; EnsureSuccess(error, completionHandler); - + _reader = [AVAssetReader assetReaderWithAsset:self.inputAsset error:&error]; _reader.timeRange = _timeRange; EnsureSuccess(error, completionHandler); @@ -683,23 +683,23 @@ - (void)exportAsynchronouslyWithCompletionHandler:(void (^)())completionHandler if (![_reader startReading]) { EnsureSuccess(_reader.error, completionHandler); } - + if (![_writer startWriting]) { EnsureSuccess(_writer.error, completionHandler); } - + [_writer startSessionAtSourceTime:kCMTimeZero]; - + _totalDuration = CMTimeGetSeconds(_inputAsset.duration); [self beginReadWriteOnAudio]; [self beginReadWriteOnVideo]; - + dispatch_group_notify(_dispatchGroup, dispatch_get_main_queue(), ^{ if (_error == nil) { _error = _writer.error; } - + if (_error == nil && _writer.status != AVAssetWriterStatusCancelled) { [_writer finishWritingWithCompletionHandler:^{ _error = _writer.error; diff --git a/Library/Sources/SCPlayer.m b/Library/Sources/SCPlayer.m index cad02b85..776f6a56 100644 --- a/Library/Sources/SCPlayer.m +++ b/Library/Sources/SCPlayer.m @@ -126,7 +126,7 @@ - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(N } else if (context == LoadedTimeRanges) { void (^block)() = ^{ id delegate = self.delegate; - + if ([delegate respondsToSelector:@selector(player:didUpdateLoadedTimeRanges:)]) { NSArray * array= self.currentItem.loadedTimeRanges; CMTimeRange range=[array.firstObject CMTimeRangeValue]; @@ -141,7 +141,7 @@ - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(N } else if (context == PlaybackBufferEmpty) { void (^block)() = ^{ id delegate = self.delegate; - + if ([delegate respondsToSelector:@selector(player:itemPlaybackBufferIsEmpty:)]) { [delegate player:self itemPlaybackBufferIsEmpty:self.currentItem]; } @@ -160,7 +160,7 @@ - (void)removeOldObservers { [_oldItem removeObserver:self forKeyPath:@"status"]; [_oldItem removeObserver:self forKeyPath:@"playbackBufferEmpty"]; [_oldItem removeObserver:self forKeyPath:@"loadedTimeRanges"]; - + [self unsetupVideoOutputToItem:_oldItem]; _oldItem = nil; @@ -252,7 +252,7 @@ - (void)unsetupDisplayLink { - (void)setupVideoOutputToItem:(AVPlayerItem *)item { if (_displayLink != nil && item != nil && _videoOutput == nil && item.status == AVPlayerItemStatusReadyToPlay) { - NSDictionary *pixBuffAttributes = @{(id)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)}; + NSDictionary *pixBuffAttributes = @{(id)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange)}; _videoOutput = [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:pixBuffAttributes]; [_videoOutput setDelegate:self queue:dispatch_get_main_queue()]; _videoOutput.suppressesPlayerRendering = self.shouldSuppressPlayerRendering; diff --git a/Library/Sources/SCRecordSession.m b/Library/Sources/SCRecordSession.m index 33ae2754..21f6a7dc 100644 --- a/Library/Sources/SCRecordSession.m +++ b/Library/Sources/SCRecordSession.m @@ -376,7 +376,7 @@ - (void)initializeVideo:(NSDictionary *)videoSettings formatDescription:(CMForma CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription); NSDictionary *pixelBufferAttributes = @{ - (id)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange), + (id)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange), (id)kCVPixelBufferWidthKey : @(dimensions.width), (id)kCVPixelBufferHeightKey : @(dimensions.height) }; From c1593e0d6f7545e704a7f5f62dec1d894fc33397 Mon Sep 17 00:00:00 2001 From: Roy Lovejoy Date: Mon, 12 Mar 2018 17:26:00 -0700 Subject: [PATCH 19/55] videoConfiguration.overlay kCVPixelBufferPixelFormatTypeKey --- Library/Sources/SCAssetExportSession.m | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Library/Sources/SCAssetExportSession.m b/Library/Sources/SCAssetExportSession.m index 66ade86d..f4eff6da 100644 --- a/Library/Sources/SCAssetExportSession.m +++ b/Library/Sources/SCAssetExportSession.m @@ -453,7 +453,7 @@ - (void)_setupPixelBufferAdaptorIfNeeded:(BOOL)needed { if (needsPixelBuffer && _videoInput != nil) { NSDictionary *pixelBufferAttributes = @{ - (id)kCVPixelBufferPixelFormatTypeKey : [NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], + (id)kCVPixelBufferPixelFormatTypeKey : [NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], // old setting = kCVPixelFormatType_32BGRA (id)kCVPixelBufferWidthKey : [NSNumber numberWithFloat:_outputBufferSize.width], (id)kCVPixelBufferHeightKey : [NSNumber numberWithFloat:_outputBufferSize.height] }; @@ -621,7 +621,7 @@ - (void)_setupVideoUsingTracks:(NSArray *)videoTracks { NSDictionary *settings = nil; if (_filter != nil || self.videoConfiguration.overlay != nil) { settings = @{ - (id)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange), //old setting = kCVPixelFormatType_32BGRA + (id)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA), (id)kCVPixelBufferIOSurfacePropertiesKey : [NSDictionary dictionary] }; } else { From 1e4ea86cdb6498696add27d501cb5407031a1a5b Mon Sep 17 00:00:00 2001 From: Roy Lovejoy Date: Tue, 13 Mar 2018 10:41:17 -0700 Subject: [PATCH 20/55] retention oddness --- Library/Sources/SCRecorder.m | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/Library/Sources/SCRecorder.m b/Library/Sources/SCRecorder.m index 8a32d02b..8cac653e 100644 --- a/Library/Sources/SCRecorder.m +++ b/Library/Sources/SCRecorder.m @@ -42,7 +42,7 @@ @interface SCRecorder() { size_t _transformFilterBufferWidth; size_t _transformFilterBufferHeight; } - +@property (nonatomic, strong) SCContext* scContext; @end @implementation SCRecorder @@ -95,10 +95,9 @@ - (id)init { [_audioConfiguration addObserver:self forKeyPath:@"enabled" options:NSKeyValueObservingOptionNew context:SCRecorderAudioEnabledContext]; [_photoConfiguration addObserver:self forKeyPath:@"options" options:NSKeyValueObservingOptionNew context:SCRecorderPhotoOptionsContext]; - SCContext *context = [SCContext - contextWithType:SCContextTypeAuto - options:nil]; - _context = context.CIContext; + self.scContext = [SCContext contextWithType:SCContextTypeAuto + options:nil]; + _context = self.scContext.CIContext; } return self; From 67a1b3a625e64e40dce0fcc93478b3867604b363 Mon Sep 17 00:00:00 2001 From: Roy Lovejoy Date: Tue, 13 Mar 2018 14:08:38 -0700 Subject: [PATCH 21/55] memory/arc fixes --- Library/Sources/SCAssetExportSession.h | 2 +- Library/Sources/SCAssetExportSession.m | 45 ++-- Library/Sources/SCPlayer.m | 6 +- Library/Sources/SCProcessingQueue.h | 2 +- Library/Sources/SCProcessingQueue.m | 4 +- Library/Sources/SCRecordSession.h | 14 +- Library/Sources/SCRecordSession.m | 228 ++++++++++++--------- Library/Sources/SCRecordSession_Internal.h | 1 + Library/Sources/SCRecorder.h | 2 +- Library/Sources/SCRecorder.m | 164 ++++++++------- Library/Sources/SCRecorderTools.m | 2 +- 11 files changed, 263 insertions(+), 207 deletions(-) diff --git a/Library/Sources/SCAssetExportSession.h b/Library/Sources/SCAssetExportSession.h index 28e48bbd..432b82c8 100644 --- a/Library/Sources/SCAssetExportSession.h +++ b/Library/Sources/SCAssetExportSession.h @@ -106,6 +106,6 @@ /** Starts the asynchronous execution of the export session */ -- (void)exportAsynchronouslyWithCompletionHandler:(void(^__nullable)())completionHandler; +- (void)exportAsynchronouslyWithCompletionHandler:(void(^__nullable)(void))completionHandler; @end diff --git a/Library/Sources/SCAssetExportSession.m b/Library/Sources/SCAssetExportSession.m index f4eff6da..67b897a9 100644 --- a/Library/Sources/SCAssetExportSession.m +++ b/Library/Sources/SCAssetExportSession.m @@ -179,7 +179,7 @@ - (void)CGRenderWithInputPixelBuffer:(CVPixelBufferRef)inputPixelBuffer toOutput CGContextRef ctx = SCCreateContextFromPixelBuffer(outputPixelBuffer); - void (^layoutBlock)() = ^{ + void (^layoutBlock)(void) = ^{ overlay.frame = CGRectMake(0, 0, videoSize.width, videoSize.height); if ([overlay respondsToSelector:@selector(updateWithVideoTime:)]) { @@ -288,7 +288,7 @@ - (void)beginReadWriteOnVideo { SCIOPixelBuffers *videoBuffer = nil; SCSampleBufferHolder *bufferHolder = nil; - CMTime time; + CMTime time = kCMTimeZero; if (videoProcessingQueue != nil) { videoBuffer = [videoProcessingQueue dequeue]; time = videoBuffer.time; @@ -385,7 +385,7 @@ - (void)_setProgress:(float)progress { } } -- (void)callCompletionHandler:(void (^)())completionHandler { +- (void)callCompletionHandler:(void (^)(void))completionHandler { if (!_cancelled) { [self _setProgress:1]; } @@ -465,22 +465,23 @@ - (void)_setupPixelBufferAdaptorIfNeeded:(BOOL)needed { - (void)cancelExport { _cancelled = YES; - + __weak typeof(self) wSelf = self; dispatch_sync(_videoQueue, ^{ - if (_needsLeaveVideo) { - _needsLeaveVideo = NO; - dispatch_group_leave(_dispatchGroup); + typeof(self) iSelf = wSelf; + if (iSelf->_needsLeaveVideo) { + iSelf->_needsLeaveVideo = NO; + dispatch_group_leave(iSelf->_dispatchGroup); } - dispatch_sync(_audioQueue, ^{ - if (_needsLeaveAudio) { - _needsLeaveAudio = NO; - dispatch_group_leave(_dispatchGroup); + dispatch_sync(iSelf->_audioQueue, ^{ + if (iSelf->_needsLeaveAudio) { + iSelf->_needsLeaveAudio = NO; + dispatch_group_leave(iSelf->_dispatchGroup); } }); - [_reader cancelReading]; - [_writer cancelWriting]; + [iSelf->_reader cancelReading]; + [iSelf->_writer cancelWriting]; }); } @@ -655,7 +656,7 @@ - (void)_setupVideoUsingTracks:(NSArray *)videoTracks { } } -- (void)exportAsynchronouslyWithCompletionHandler:(void (^)())completionHandler { +- (void)exportAsynchronouslyWithCompletionHandler:(void (^)(void))completionHandler { _cancelled = NO; _nextAllowedVideoFrame = kCMTimeZero; NSError *error = nil; @@ -695,18 +696,20 @@ - (void)exportAsynchronouslyWithCompletionHandler:(void (^)())completionHandler [self beginReadWriteOnAudio]; [self beginReadWriteOnVideo]; + __weak typeof(self) wSelf = self; dispatch_group_notify(_dispatchGroup, dispatch_get_main_queue(), ^{ - if (_error == nil) { - _error = _writer.error; + typeof(self) iSelf = wSelf; + if (iSelf->_error == nil) { + iSelf->_error = iSelf->_writer.error; } - if (_error == nil && _writer.status != AVAssetWriterStatusCancelled) { - [_writer finishWritingWithCompletionHandler:^{ - _error = _writer.error; - [self callCompletionHandler:completionHandler]; + if (iSelf->_error == nil && iSelf->_writer.status != AVAssetWriterStatusCancelled) { + [iSelf->_writer finishWritingWithCompletionHandler:^{ + iSelf->_error = iSelf->_writer.error; + [wSelf callCompletionHandler:completionHandler]; }]; } else { - [self callCompletionHandler:completionHandler]; + [wSelf callCompletionHandler:completionHandler]; } }); } diff --git a/Library/Sources/SCPlayer.m b/Library/Sources/SCPlayer.m index 776f6a56..4f3a6dbd 100644 --- a/Library/Sources/SCPlayer.m +++ b/Library/Sources/SCPlayer.m @@ -109,7 +109,7 @@ - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(N if (context == ItemChanged) { [self initObserver]; } else if (context == StatusChanged) { - void (^block)() = ^{ + void (^block)(void) = ^{ [self setupVideoOutputToItem:self.currentItem]; id delegate = self.delegate; @@ -124,7 +124,7 @@ - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(N dispatch_async(dispatch_get_main_queue(), block); } } else if (context == LoadedTimeRanges) { - void (^block)() = ^{ + void (^block)(void) = ^{ id delegate = self.delegate; if ([delegate respondsToSelector:@selector(player:didUpdateLoadedTimeRanges:)]) { @@ -139,7 +139,7 @@ - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(N dispatch_async(dispatch_get_main_queue(), block); } } else if (context == PlaybackBufferEmpty) { - void (^block)() = ^{ + void (^block)(void) = ^{ id delegate = self.delegate; if ([delegate respondsToSelector:@selector(player:itemPlaybackBufferIsEmpty:)]) { diff --git a/Library/Sources/SCProcessingQueue.h b/Library/Sources/SCProcessingQueue.h index f437053c..536aeb72 100644 --- a/Library/Sources/SCProcessingQueue.h +++ b/Library/Sources/SCProcessingQueue.h @@ -12,7 +12,7 @@ @property (assign, nonatomic) NSUInteger maxQueueSize; -- (void)startProcessingWithBlock:(id(^)())processingBlock; +- (void)startProcessingWithBlock:(id(^)(void))processingBlock; - (void)stopProcessing; diff --git a/Library/Sources/SCProcessingQueue.m b/Library/Sources/SCProcessingQueue.m index 8fd58808..f9de9552 100644 --- a/Library/Sources/SCProcessingQueue.m +++ b/Library/Sources/SCProcessingQueue.m @@ -51,7 +51,7 @@ - (void)setMaxQueueSize:(NSUInteger)maxQueueSize { _maxQueueSize = maxQueueSize; } -- (void)_process:(id (^)())processingBlock { +- (void)_process:(id (^)(void))processingBlock { @autoreleasepool { while (!_completed) { BOOL shouldProcess = NO; @@ -85,7 +85,7 @@ - (void)_process:(id (^)())processingBlock { } } -- (void)startProcessingWithBlock:(id (^)())processingBlock { +- (void)startProcessingWithBlock:(id (^)(void))processingBlock { [NSThread detachNewThreadSelector:@selector(_process:) toTarget:self withObject:processingBlock]; } diff --git a/Library/Sources/SCRecordSession.h b/Library/Sources/SCRecordSession.h index bb676b32..16a509a2 100644 --- a/Library/Sources/SCRecordSession.h +++ b/Library/Sources/SCRecordSession.h @@ -128,7 +128,7 @@ extern NSString *__nonnull const SCRecordSessionDocumentDirectory; within this block will ensure that you are the only one who has access to any modification on this SCRecordSession. */ -- (void)dispatchSyncOnSessionQueue:(void(^__nonnull)())block; +- (void)dispatchSyncOnSessionQueue:(void(^__nonnull)(void))block; ////////////////////// /////// SEGMENTS @@ -157,12 +157,12 @@ extern NSString *__nonnull const SCRecordSessionDocumentDirectory; /** Remove all the record segments and their associated files. */ -- (void)removeAllSegments:(void(^ __nullable)())completionHandler; +- (void)removeAllSegments:(void(^ __nullable)(void))completionHandler; /** Remove all the record segments and their associated files if deleteFiles is true. */ -- (void)removeAllSegments:(BOOL)deleteFiles withCompletion:(void(^ __nullable)())completionHandler; +- (void)removeAllSegments:(BOOL)deleteFiles withCompletion:(void(^ __nullable)(void))completionHandler; /** Remove the last segment safely. Does nothing if no segment were recorded. @@ -175,7 +175,7 @@ extern NSString *__nonnull const SCRecordSessionDocumentDirectory; If you don't want a segment to be automatically added when calling this method, you should remove the SCRecordSession from the SCRecorder */ -- (void)cancelSession:(void(^ __nullable)())completionHandler; +- (void)cancelSession:(void(^ __nullable)(void))completionHandler; /** Merge the recorded record segments using the given AVAssetExportSessionPreset. @@ -184,9 +184,9 @@ extern NSString *__nonnull const SCRecordSessionDocumentDirectory; */ - (AVAssetExportSession *__nullable)mergeSegmentsUsingPreset:(NSString *__nonnull)exportSessionPreset completionHandler:(void(^__nonnull)(NSURL *__nullable outputUrl, NSError *__nullable error))completionHandler; -- (AVAssetExportSession *)mergeSegmentsUsingPreset:(NSString *)exportSessionPreset - atURL:(NSURL *)url - completionHandler:(void(^)(NSURL *outputUrl, NSError *error))completionHandler; +- (AVAssetExportSession *__nullable)mergeSegmentsUsingPreset:(NSString *__nonnull)exportSessionPreset + atURL:(NSURL *__nonnull)url + completionHandler:(void(^__nullable)(NSURL * __nullable outputUrl, NSError * __nullable error))completionHandler; /** Returns an asset representing all the record segments diff --git a/Library/Sources/SCRecordSession.m b/Library/Sources/SCRecordSession.m index 96f56940..b22c8f53 100644 --- a/Library/Sources/SCRecordSession.m +++ b/Library/Sources/SCRecordSession.m @@ -152,7 +152,7 @@ + (NSError*)createError:(NSString*)errorDescription { return [NSError errorWithDomain:@"SCRecordSession" code:200 userInfo:@{NSLocalizedDescriptionKey : errorDescription}]; } -- (void)dispatchSyncOnSessionQueue:(void(^)())block { +- (void)dispatchSyncOnSessionQueue:(void(^)(void))block { SCRecorder *recorder = self.recorder; if (recorder == nil || [SCRecorder isSessionQueue]) { @@ -168,32 +168,35 @@ - (void)removeFile:(NSURL *)fileUrl { } - (void)removeSegment:(SCRecordSessionSegment *)segment { + __weak typeof(self) wSelf = self; [self dispatchSyncOnSessionQueue:^{ - NSUInteger index = [_segments indexOfObject:segment]; + NSUInteger index = [wSelf.segments indexOfObject:segment]; if (index != NSNotFound) { - [self removeSegmentAtIndex:index deleteFile:NO]; + [wSelf removeSegmentAtIndex:index deleteFile:NO]; } }]; } - (void)removeSegmentAtIndex:(NSInteger)segmentIndex deleteFile:(BOOL)deleteFile { + __weak typeof(self) wSelf = self; [self dispatchSyncOnSessionQueue:^{ - SCRecordSessionSegment *segment = [_segments objectAtIndex:segmentIndex]; - [_segments removeObjectAtIndex:segmentIndex]; + typeof(self) iSelf = wSelf; + SCRecordSessionSegment *segment = [iSelf->_segments objectAtIndex:segmentIndex]; + [iSelf->_segments removeObjectAtIndex:segmentIndex]; CMTime segmentDuration = segment.duration; if (CMTIME_IS_VALID(segmentDuration)) { // NSLog(@"Removed duration of %fs", CMTimeGetSeconds(segmentDuration)); - _segmentsDuration = CMTimeSubtract(_segmentsDuration, segmentDuration); + wSelf.segmentsDuration = CMTimeSubtract(iSelf->_segmentsDuration, segmentDuration); } else { CMTime newDuration = kCMTimeZero; - for (SCRecordSessionSegment *segment in _segments) { + for (SCRecordSessionSegment *segment in wSelf.segments) { if (CMTIME_IS_VALID(segment.duration)) { newDuration = CMTimeAdd(newDuration, segment.duration); } } - _segmentsDuration = newDuration; + iSelf->_segmentsDuration = newDuration; } if (deleteFile) { @@ -203,35 +206,39 @@ - (void)removeSegmentAtIndex:(NSInteger)segmentIndex deleteFile:(BOOL)deleteFile } - (void)removeLastSegment { + __weak typeof(self) wSelf = self; [self dispatchSyncOnSessionQueue:^{ - if (_segments.count > 0) { - [self removeSegmentAtIndex:_segments.count - 1 deleteFile:YES]; + typeof(self) iSelf = wSelf; + if (iSelf->_segments.count > 0) { + [self removeSegmentAtIndex:iSelf->_segments.count - 1 deleteFile:YES]; } }]; } -- (void)removeAllSegments:(void(^ __nullable)())completionHandler { +- (void)removeAllSegments:(void(^ __nullable)(void))completionHandler { [self removeAllSegments:YES withCompletion:completionHandler]; } - (void)removeAllSegments:(BOOL)removeFiles - withCompletion:(void(^ __nullable)())completionHandler;{ - [self dispatchSyncOnSessionQueue:^{ - while (_segments.count > 0) { - if (removeFiles) { - SCRecordSessionSegment *segment = [_segments objectAtIndex:0]; - [segment deleteFile]; - } - [_segments removeObjectAtIndex:0]; - } - - _segmentsDuration = kCMTimeZero; - - if (completionHandler) { - completionHandler(); - } - }]; + withCompletion:(void(^ __nullable)(void))completionHandler;{ + __weak typeof(self) wSelf = self; + [self dispatchSyncOnSessionQueue:^{ + typeof(self) iSelf = wSelf; + while (iSelf->_segments.count > 0) { + if (removeFiles) { + SCRecordSessionSegment *segment = [iSelf->_segments objectAtIndex:0]; + [segment deleteFile]; + } + [iSelf->_segments removeObjectAtIndex:0]; + } + + iSelf->_segmentsDuration = kCMTimeZero; + + if (completionHandler) { + completionHandler(); + } + }]; } - (NSString*)_suggestedFileType { @@ -358,16 +365,18 @@ - (AVAssetWriter *)createWriter:(NSError **)error { } - (void)deinitialize { + __weak typeof(self) wSelf = self; [self dispatchSyncOnSessionQueue:^{ + typeof(self) iSelf = wSelf; [self endSegmentWithInfo:nil completionHandler:nil]; - _audioConfiguration = nil; - _videoConfiguration = nil; - _audioInitializationFailed = NO; - _videoInitializationFailed = NO; - _videoInput = nil; - _audioInput = nil; - _videoPixelBufferAdaptor = nil; + iSelf->_audioConfiguration = nil; + iSelf->_videoConfiguration = nil; + iSelf->_audioInitializationFailed = NO; + iSelf->_videoInitializationFailed = NO; + iSelf->_videoInput = nil; + iSelf->_audioInput = nil; + iSelf->_videoPixelBufferAdaptor = nil; }]; } @@ -417,16 +426,20 @@ - (void)initializeAudio:(NSDictionary *)audioSettings formatDescription:(CMForma } - (void)addSegment:(SCRecordSessionSegment *)segment { - [self dispatchSyncOnSessionQueue:^{ - [_segments addObject:segment]; - _segmentsDuration = CMTimeAdd(_segmentsDuration, segment.duration); + __weak typeof(self) wSelf = self; + [self dispatchSyncOnSessionQueue:^{ + typeof(self) iSelf = wSelf; + [iSelf->_segments addObject:segment]; + iSelf->_segmentsDuration = CMTimeAdd(iSelf->_segmentsDuration, segment.duration); }]; } - (void)insertSegment:(SCRecordSessionSegment *)segment atIndex:(NSInteger)segmentIndex { - [self dispatchSyncOnSessionQueue:^{ - [_segments insertObject:segment atIndex:segmentIndex]; - _segmentsDuration = CMTimeAdd(_segmentsDuration, segment.duration); + __weak typeof(self) wSelf = self; + [self dispatchSyncOnSessionQueue:^{ + typeof(self) iSelf = wSelf; + [iSelf->_segments insertObject:segment atIndex:segmentIndex]; + iSelf->_segmentsDuration = CMTimeAdd(iSelf->_segmentsDuration, segment.duration); }]; } @@ -452,18 +465,20 @@ - (CMSampleBufferRef)adjustBuffer:(CMSampleBufferRef)sample withTimeOffset:(CMTi } - (void)beginSegment:(NSError**)error { - [self dispatchSyncOnSessionQueue:^{ - if (_assetWriter == nil) { - _assetWriter = [self createWriter:error]; - _currentSegmentDuration = kCMTimeZero; - _currentSegmentHasAudio = NO; - _currentSegmentHasVideo = NO; + __block NSError* localError; + __weak typeof(self) wSelf = self; + [self dispatchSyncOnSessionQueue:^{ + typeof(self) iSelf = wSelf; + if (iSelf->_assetWriter == nil) { + iSelf->_assetWriter = [iSelf createWriter:&localError]; + iSelf->_currentSegmentDuration = kCMTimeZero; + iSelf->_currentSegmentHasAudio = NO; + iSelf->_currentSegmentHasVideo = NO; } else { - if (error != nil) { - *error = [SCRecordSession createError:@"A record segment has already began."]; - } + localError = [SCRecordSession createError:@"A record segment has already began."]; } }]; + if (error) *error = localError; } - (void)_destroyAssetWriter { @@ -478,15 +493,17 @@ - (void)_destroyAssetWriter { } - (void)appendRecordSegmentUrl:(NSURL *)url info:(NSDictionary *)info error:(NSError *)error completionHandler:(void (^)(SCRecordSessionSegment *, NSError *))completionHandler { - [self dispatchSyncOnSessionQueue:^{ + __weak typeof(self) wSelf = self; + [self dispatchSyncOnSessionQueue:^{ + typeof(self) iSelf = wSelf; SCRecordSessionSegment *segment = nil; if (error == nil) { segment = [SCRecordSessionSegment segmentWithURL:url info:info]; - [self addSegment:segment]; + [iSelf addSegment:segment]; } - [self _destroyAssetWriter]; + [iSelf _destroyAssetWriter]; dispatch_async(dispatch_get_main_queue(), ^{ if (completionHandler != nil) { @@ -499,22 +516,24 @@ - (void)appendRecordSegmentUrl:(NSURL *)url info:(NSDictionary *)info error:(NSE - (BOOL)endSegmentWithInfo:(NSDictionary *)info completionHandler:(void(^)(SCRecordSessionSegment *segment, NSError* error))completionHandler { __block BOOL success = NO; - [self dispatchSyncOnSessionQueue:^{ - dispatch_sync(_audioQueue, ^{ - if (_recordSegmentReady) { - _recordSegmentReady = NO; + __weak typeof(self) wSelf = self; + [self dispatchSyncOnSessionQueue:^{ + typeof(self) iSelf = wSelf; + dispatch_sync(iSelf->_audioQueue, ^{ + if (iSelf->_recordSegmentReady) { + iSelf->_recordSegmentReady = NO; success = YES; - AVAssetWriter *writer = _assetWriter; + AVAssetWriter *writer = iSelf->_assetWriter; if (writer != nil) { - BOOL currentSegmentEmpty = (!_currentSegmentHasVideo && !_currentSegmentHasAudio); + BOOL currentSegmentEmpty = (!iSelf->_currentSegmentHasVideo && !iSelf->_currentSegmentHasAudio); if (currentSegmentEmpty) { [writer cancelWriting]; - [self _destroyAssetWriter]; + [iSelf _destroyAssetWriter]; - [self removeFile:writer.outputURL]; + [iSelf removeFile:writer.outputURL]; if (completionHandler != nil) { dispatch_async(dispatch_get_main_queue(), ^{ @@ -523,14 +542,14 @@ - (BOOL)endSegmentWithInfo:(NSDictionary *)info completionHandler:(void(^)(SCRec } } else { // NSLog(@"Ending session at %fs", CMTimeGetSeconds(_currentSegmentDuration)); - [writer endSessionAtSourceTime:CMTimeAdd(_currentSegmentDuration, _sessionStartTime)]; + [writer endSessionAtSourceTime:CMTimeAdd(iSelf->_currentSegmentDuration, iSelf->_sessionStartTime)]; [writer finishWritingWithCompletionHandler: ^{ - [self appendRecordSegmentUrl:writer.outputURL info:info error:writer.error completionHandler:completionHandler]; + [iSelf appendRecordSegmentUrl:writer.outputURL info:info error:writer.error completionHandler:completionHandler]; }]; } } else { - [_movieFileOutput stopRecording]; + [iSelf->_movieFileOutput stopRecording]; } } else { dispatch_async(dispatch_get_main_queue(), ^{ @@ -571,7 +590,9 @@ - (AVAssetExportSession *)mergeSegmentsUsingPreset:(NSString *)exportSessionPres __block NSString *fileType = nil; __block NSURL *outputUrl = nil; - [self dispatchSyncOnSessionQueue:^{ + __weak typeof(self) wSelf = self; + [self dispatchSyncOnSessionQueue:^{ + typeof(self) iSelf = wSelf; fileType = [self _suggestedFileType]; if (fileType == nil) { @@ -585,11 +606,11 @@ - (AVAssetExportSession *)mergeSegmentsUsingPreset:(NSString *)exportSessionPres return; } - NSString *filename = [NSString stringWithFormat:@"%@SCVideo-Merged.%@", _identifier, fileExtension]; - outputUrl = [SCRecordSessionSegment segmentURLForFilename:filename andDirectory:_segmentsDirectory]; + NSString *filename = [NSString stringWithFormat:@"%@SCVideo-Merged.%@", iSelf->_identifier, fileExtension]; + outputUrl = [SCRecordSessionSegment segmentURLForFilename:filename andDirectory:iSelf->_segmentsDirectory]; [self removeFile:outputUrl]; - if (_segments.count == 0) { + if (iSelf->_segments.count == 0) { error = [SCRecordSession createError:@"The session does not contains any record segment"]; } else { asset = [self assetRepresentingSegments]; @@ -630,20 +651,22 @@ - (AVAssetExportSession *)mergeSegmentsUsingPreset:(NSString *)exportSessionPres __block NSString *fileType = nil; __block NSURL *outputUrl = url; - [self dispatchSyncOnSessionQueue:^{ - fileType = [self _suggestedFileType]; + __weak typeof(self) wSelf = self; + [self dispatchSyncOnSessionQueue:^{ + typeof(self) iSelf = wSelf; + fileType = [iSelf _suggestedFileType]; if (fileType == nil) { error = [SCRecordSession createError:@"No output fileType was set"]; return; } - [self removeFile:outputUrl]; + [iSelf removeFile:outputUrl]; - if (_segments.count == 0) { + if (iSelf->_segments.count == 0) { error = [SCRecordSession createError:@"The session does not contains any record segment"]; } else { - asset = [self assetRepresentingSegments]; + asset = [iSelf assetRepresentingSegments]; } }]; @@ -686,16 +709,18 @@ - (void)finishEndSession:(NSError*)mergeError completionHandler:(void (^)(NSErro } } -- (void)cancelSession:(void (^)())completionHandler { - [self dispatchSyncOnSessionQueue:^{ - if (_assetWriter == nil) { - [self removeAllSegments:nil]; +- (void)cancelSession:(void (^)(void))completionHandler { + __weak typeof(self) wSelf = self; + [self dispatchSyncOnSessionQueue:^{ + typeof(self) iSelf = wSelf; + if (iSelf->_assetWriter == nil) { + [iSelf removeAllSegments:nil]; if (completionHandler != nil) { completionHandler(); } } else { - [self endSegmentWithInfo:nil completionHandler:^(SCRecordSessionSegment *segment, NSError *error) { - [self removeAllSegments:nil]; + [iSelf endSegmentWithInfo:nil completionHandler:^(SCRecordSessionSegment *segment, NSError *error) { + [iSelf removeAllSegments:nil]; if (completionHandler != nil) { completionHandler(); } @@ -729,17 +754,18 @@ - (void)appendAudioSampleBuffer:(CMSampleBufferRef)audioSampleBuffer completion: CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(adjustedBuffer); CMTime lastTimeAudio = CMTimeAdd(presentationTime, duration); - + __weak typeof(self) wSelf = self; dispatch_async(_audioQueue, ^{ - if ([_audioInput isReadyForMoreMediaData] && [_audioInput appendSampleBuffer:adjustedBuffer]) { - _lastTimeAudio = lastTimeAudio; + typeof(self) iSelf = wSelf; + if ([iSelf->_audioInput isReadyForMoreMediaData] && [iSelf->_audioInput appendSampleBuffer:adjustedBuffer]) { + iSelf->_lastTimeAudio = lastTimeAudio; - if (!_currentSegmentHasVideo) { - _currentSegmentDuration = CMTimeSubtract(lastTimeAudio, _sessionStartTime); + if (!iSelf->_currentSegmentHasVideo) { + iSelf->_currentSegmentDuration = CMTimeSubtract(lastTimeAudio, iSelf->_sessionStartTime); } // NSLog(@"Appending audio at %fs (buffer: %fs)", CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(adjustedBuffer)), CMTimeGetSeconds(actualBufferTime)); - _currentSegmentHasAudio = YES; + iSelf->_currentSegmentHasAudio = YES; completion(YES); } else { @@ -831,13 +857,15 @@ - (void)appendSegmentsToComposition:(AVMutableComposition * __nonnull)compositio } - (void)appendSegmentsToComposition:(AVMutableComposition *)composition audioMix:(AVMutableAudioMix *)audioMix { + __weak typeof(self) wSelf = self; [self dispatchSyncOnSessionQueue:^{ + typeof(self) iSelf = wSelf; AVMutableCompositionTrack *audioTrack = nil; AVMutableCompositionTrack *videoTrack = nil; int currentSegment = 0; CMTime currentTime = composition.duration; - for (SCRecordSessionSegment *recordSegment in _segments) { + for (SCRecordSessionSegment *recordSegment in iSelf->_segments) { AVAsset *asset = recordSegment.asset; NSArray *audioAssetTracks = [asset tracksWithMediaType:AVMediaTypeAudio]; @@ -858,7 +886,7 @@ - (void)appendSegmentsToComposition:(AVMutableComposition *)composition audioMix } } - videoTime = [self _appendTrack:videoAssetTrack toCompositionTrack:videoTrack atTime:videoTime withBounds:maxBounds]; + videoTime = [iSelf _appendTrack:videoAssetTrack toCompositionTrack:videoTrack atTime:videoTime withBounds:maxBounds]; maxBounds = videoTime; } @@ -874,7 +902,7 @@ - (void)appendSegmentsToComposition:(AVMutableComposition *)composition audioMix } } - audioTime = [self _appendTrack:audioAssetTrack toCompositionTrack:audioTrack atTime:audioTime withBounds:maxBounds]; + audioTime = [iSelf _appendTrack:audioAssetTrack toCompositionTrack:audioTrack atTime:audioTime withBounds:maxBounds]; } currentTime = composition.duration; @@ -886,13 +914,15 @@ - (void)appendSegmentsToComposition:(AVMutableComposition *)composition audioMix - (AVPlayerItem *)playerItemRepresentingSegments { __block AVPlayerItem *playerItem = nil; - [self dispatchSyncOnSessionQueue:^{ - if (_segments.count == 1) { - SCRecordSessionSegment *segment = _segments.firstObject; + __weak typeof(self) wSelf = self; + [self dispatchSyncOnSessionQueue:^{ + typeof(self) iSelf = wSelf; + if (iSelf->_segments.count == 1) { + SCRecordSessionSegment *segment = iSelf->_segments.firstObject; playerItem = [AVPlayerItem playerItemWithAsset:segment.asset]; } else { AVMutableComposition *composition = [AVMutableComposition composition]; - [self appendSegmentsToComposition:composition]; + [iSelf appendSegmentsToComposition:composition]; playerItem = [AVPlayerItem playerItemWithAsset:composition]; } @@ -903,9 +933,11 @@ - (AVPlayerItem *)playerItemRepresentingSegments { - (AVAsset *)assetRepresentingSegments { __block AVAsset *asset = nil; - [self dispatchSyncOnSessionQueue:^{ - if (_segments.count == 1) { - SCRecordSessionSegment *segment = _segments.firstObject; + __weak typeof(self) wSelf = self; + [self dispatchSyncOnSessionQueue:^{ + typeof(self) iSelf = wSelf; + if (iSelf->_segments.count == 1) { + SCRecordSessionSegment *segment = iSelf->_segments.firstObject; asset = segment.asset; } else { AVMutableComposition *composition = [AVMutableComposition composition]; @@ -986,10 +1018,12 @@ - (NSURL *)outputUrl { - (void)setSegmentsDirectory:(NSString *)segmentsDirectory { _segmentsDirectory = [segmentsDirectory copy]; - [self dispatchSyncOnSessionQueue:^{ + __weak typeof(self) wSelf = self; + [self dispatchSyncOnSessionQueue:^{ + typeof(self) iSelf = wSelf; NSFileManager *fileManager = [NSFileManager defaultManager]; - for (SCRecordSessionSegment *recordSegment in self.segments) { - NSURL *newUrl = [SCRecordSessionSegment segmentURLForFilename:recordSegment.url.lastPathComponent andDirectory:_segmentsDirectory]; + for (SCRecordSessionSegment *recordSegment in iSelf.segments) { + NSURL *newUrl = [SCRecordSessionSegment segmentURLForFilename:recordSegment.url.lastPathComponent andDirectory:iSelf->_segmentsDirectory]; if (![newUrl isEqual:recordSegment.url]) { NSError *error = nil; diff --git a/Library/Sources/SCRecordSession_Internal.h b/Library/Sources/SCRecordSession_Internal.h index 556ba5f4..60a5da74 100644 --- a/Library/Sources/SCRecordSession_Internal.h +++ b/Library/Sources/SCRecordSession_Internal.h @@ -39,6 +39,7 @@ } @property (weak, nonatomic) SCRecorder *recorder; +@property (assign, atomic) CMTime segmentsDuration; @property (readonly, nonatomic) BOOL videoInitialized; @property (readonly, nonatomic) BOOL audioInitialized; diff --git a/Library/Sources/SCRecorder.h b/Library/Sources/SCRecorder.h index 67478dab..6fd3386c 100644 --- a/Library/Sources/SCRecorder.h +++ b/Library/Sources/SCRecorder.h @@ -403,7 +403,7 @@ if it is empty or not. @param completionHandler called on the main queue when the recorder is ready to record again. */ -- (void)pause:( void(^ __nullable)()) completionHandler; +- (void)pause:( void(^ __nullable)(void)) completionHandler; /** Capture a photo from the camera diff --git a/Library/Sources/SCRecorder.m b/Library/Sources/SCRecorder.m index 8cac653e..171387d7 100644 --- a/Library/Sources/SCRecorder.m +++ b/Library/Sources/SCRecorder.m @@ -37,7 +37,7 @@ @interface SCRecorder() { double _lastAppendedVideoTime; NSTimer *_movieOutputProgressTimer; CMTime _lastMovieFileOutputTime; - void(^_pauseCompletionHandler)(); + void(^_pauseCompletionHandler)(void); SCFilter *_transformFilter; size_t _transformFilterBufferWidth; size_t _transformFilterBufferHeight; @@ -450,14 +450,15 @@ - (void)_progressTimerFired:(NSTimer *)progressTimer { id delegate = self.delegate; if (recordSession != nil) { + __weak typeof(self) wSelf = self; if ([delegate respondsToSelector:@selector(recorder:didAppendVideoSampleBufferInSession:)]) { dispatch_async(dispatch_get_main_queue(), ^{ - [delegate recorder:self didAppendVideoSampleBufferInSession:recordSession]; + [delegate recorder:wSelf didAppendVideoSampleBufferInSession:recordSession]; }); } if ([delegate respondsToSelector:@selector(recorder:didAppendAudioSampleBufferInSession:)]) { dispatch_async(dispatch_get_main_queue(), ^{ - [delegate recorder:self didAppendAudioSampleBufferInSession:_session]; + [delegate recorder:wSelf didAppendAudioSampleBufferInSession:wSelf.session]; }); } } @@ -468,13 +469,15 @@ - (void)_progressTimerFired:(NSTimer *)progressTimer { - (void)record { _didCaptureFirstAudioBuffer = NO; - void (^block)() = ^{ - _isRecording = YES; - if (_movieOutput != nil && _session != nil) { - _movieOutput.maxRecordedDuration = self.maxRecordDuration; - [self beginRecordSegmentIfNeeded:_session]; - if (_movieOutputProgressTimer == nil) { - _movieOutputProgressTimer = [NSTimer scheduledTimerWithTimeInterval:1.0 / 60.0 target:self selector:@selector(_progressTimerFired:) userInfo:nil repeats:YES]; + __weak typeof(self) wSelf = self; + void (^block)(void) = ^{ + typeof(self) internal = wSelf; + internal->_isRecording = YES; + if (internal->_movieOutput != nil && internal.session != nil) { + internal->_movieOutput.maxRecordedDuration = internal.maxRecordDuration; + [self beginRecordSegmentIfNeeded:internal.session]; + if (internal->_movieOutputProgressTimer == nil) { + internal->_movieOutputProgressTimer = [NSTimer scheduledTimerWithTimeInterval:1.0 / 60.0 target:self selector:@selector(_progressTimerFired:) userInfo:nil repeats:YES]; } } }; @@ -490,26 +493,28 @@ - (void)pause { [self pause:nil]; } -- (void)pause:(void(^)())completionHandler { +- (void)pause:(void(^)(void))completionHandler { _isRecording = NO; - void (^block)() = ^{ - SCRecordSession *recordSession = _session; + __weak typeof(self) wSelf = self; + void (^block)(void) = ^{ + typeof(self) iSelf = wSelf; + SCRecordSession *recordSession = iSelf->_session; if (recordSession != nil) { if (recordSession.recordSegmentReady) { - NSDictionary *info = [self _createSegmentInfo]; + NSDictionary *info = [iSelf _createSegmentInfo]; if (recordSession.isUsingMovieFileOutput) { - [_movieOutputProgressTimer invalidate]; - _movieOutputProgressTimer = nil; + [iSelf->_movieOutputProgressTimer invalidate]; + iSelf->_movieOutputProgressTimer = nil; if ([recordSession endSegmentWithInfo:info completionHandler:nil]) { - _pauseCompletionHandler = completionHandler; + iSelf->_pauseCompletionHandler = completionHandler; } else { dispatch_handler(completionHandler); } } else { [recordSession endSegmentWithInfo:info completionHandler:^(SCRecordSessionSegment *segment, NSError *error) { - id delegate = self.delegate; + id delegate = iSelf.delegate; if ([delegate respondsToSelector:@selector(recorder:didCompleteSegment:inSession:error:)]) { [delegate recorder:self didCompleteSegment:segment inSession:recordSession error:error]; } @@ -665,19 +670,23 @@ - (void)appendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer toRecordSession: } - (void)captureOutput:(AVCaptureFileOutput *)captureOutput didStartRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections { - dispatch_async(_sessionQueue, ^{ - [_session notifyMovieFileOutputIsReady]; + __weak typeof(self) wSelf = self; + dispatch_async(_sessionQueue, ^{ + typeof(self) iSelf = wSelf; + [iSelf->_session notifyMovieFileOutputIsReady]; - if (!_isRecording) { - [self pause:_pauseCompletionHandler]; - } - }); + if (!iSelf->_isRecording) { + [self pause:iSelf->_pauseCompletionHandler]; + } + }); } - (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error { _isRecording = NO; + __weak typeof(self) wSelf = self; dispatch_async(_sessionQueue, ^{ + typeof(self) iSelf = wSelf; BOOL hasComplete = NO; NSError *actualError = error; if ([actualError.localizedDescription isEqualToString:@"Recording Stopped"]) { @@ -685,32 +694,32 @@ - (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToO hasComplete = YES; } - [_session appendRecordSegmentUrl:outputFileURL info:[self _createSegmentInfo] error:actualError completionHandler:^(SCRecordSessionSegment *segment, NSError *error) { - void (^pauseCompletionHandler)() = _pauseCompletionHandler; - _pauseCompletionHandler = nil; + [iSelf->_session appendRecordSegmentUrl:outputFileURL info:[iSelf _createSegmentInfo] error:actualError completionHandler:^(SCRecordSessionSegment *segment, NSError *error) { + void (^pauseCompletionHandler)(void) = iSelf->_pauseCompletionHandler; + iSelf->_pauseCompletionHandler = nil; - SCRecordSession *recordSession = _session; + SCRecordSession *recordSession = iSelf->_session; - if (recordSession != nil) { - id delegate = self.delegate; - if ([delegate respondsToSelector:@selector(recorder:didCompleteSegment:inSession:error:)]) { - [delegate recorder:self didCompleteSegment:segment inSession:recordSession error:error]; - } + if (recordSession != nil) { + id delegate = iSelf.delegate; + if ([delegate respondsToSelector:@selector(recorder:didCompleteSegment:inSession:error:)]) { + [delegate recorder:iSelf didCompleteSegment:segment inSession:recordSession error:error]; + } - if (hasComplete || (CMTIME_IS_VALID(_maxRecordDuration) && CMTIME_COMPARE_INLINE(recordSession.duration, >=, _maxRecordDuration))) { - if ([delegate respondsToSelector:@selector(recorder:didCompleteSession:)]) { - [delegate recorder:self didCompleteSession:recordSession]; - } - } - } + if (hasComplete || (CMTIME_IS_VALID(iSelf->_maxRecordDuration) && CMTIME_COMPARE_INLINE(recordSession.duration, >=, iSelf->_maxRecordDuration))) { + if ([delegate respondsToSelector:@selector(recorder:didCompleteSession:)]) { + [delegate recorder:iSelf didCompleteSession:recordSession]; + } + } + } - if (pauseCompletionHandler != nil) { - pauseCompletionHandler(); - } - }]; + if (pauseCompletionHandler != nil) { + pauseCompletionHandler(); + } + }]; - if (_isRecording) { - [self record]; + if (iSelf->_isRecording) { + [iSelf record]; } }); } @@ -749,24 +758,29 @@ - (void)_handleVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer withSession:(SC } BOOL isFirstVideoBuffer = !recordSession.currentSegmentHasVideo; // NSLog(@"APPENDING"); - [self appendVideoSampleBuffer:sampleBuffer toRecordSession:recordSession duration:duration connection:connection completion:^(BOOL success) { - _lastAppendedVideoTime = CACurrentMediaTime(); - if (success) { - if ([delegate respondsToSelector:@selector(recorder:didAppendVideoSampleBufferInSession:)]) { - dispatch_async(dispatch_get_main_queue(), ^{ - [delegate recorder:self didAppendVideoSampleBufferInSession:recordSession]; - }); - } - - [self checkRecordSessionDuration:recordSession]; - } else { - if ([delegate respondsToSelector:@selector(recorder:didSkipVideoSampleBufferInSession:)]) { - dispatch_async(dispatch_get_main_queue(), ^{ - [delegate recorder:self didSkipVideoSampleBufferInSession:recordSession]; - }); - } - } - }]; + __weak typeof(self) wSelf = self; + [self appendVideoSampleBuffer:sampleBuffer toRecordSession:recordSession + duration:duration + connection:connection + completion:^(BOOL success) { + typeof(self) iSelf = wSelf; + iSelf->_lastAppendedVideoTime = CACurrentMediaTime(); + if (success) { + if ([delegate respondsToSelector:@selector(recorder:didAppendVideoSampleBufferInSession:)]) { + dispatch_async(dispatch_get_main_queue(), ^{ + [delegate recorder:iSelf didAppendVideoSampleBufferInSession:recordSession]; + }); + } + + [iSelf checkRecordSessionDuration:recordSession]; + } else { + if ([delegate respondsToSelector:@selector(recorder:didSkipVideoSampleBufferInSession:)]) { + dispatch_async(dispatch_get_main_queue(), ^{ + [delegate recorder:iSelf didSkipVideoSampleBufferInSession:recordSession]; + }); + } + } + }]; if (isFirstVideoBuffer && !recordSession.currentSegmentHasAudio) { CMSampleBufferRef audioBuffer = _lastAudioBuffer.sampleBuffer; @@ -1392,15 +1406,17 @@ - (void)setVideoOrientation:(AVCaptureVideoOrientation)videoOrientation { } - (void)setSession:(SCRecordSession *)recordSession { - if (_session != recordSession) { - dispatch_sync(_sessionQueue, ^{ - _session.recorder = nil; + if (_session != recordSession) { + __weak typeof(self) wSelf = self; + dispatch_sync(_sessionQueue, ^{ + typeof(self) iSelf = wSelf; + iSelf->_session.recorder = nil; - _session = recordSession; + iSelf->_session = recordSession; - recordSession.recorder = self; - }); - } + recordSession.recorder = self; + }); + } } - (AVCaptureFocusMode)focusMode { @@ -1605,9 +1621,11 @@ - (BOOL)videoEnabledAndReady { } - (void)setKeepMirroringOnWrite:(BOOL)keepMirroringOnWrite { - dispatch_sync(_sessionQueue, ^{ - _keepMirroringOnWrite = keepMirroringOnWrite; - _transformFilter = nil; + __weak typeof(self) wSelf = self; + dispatch_sync(_sessionQueue, ^{ + typeof(self) iSelf = wSelf; + iSelf->_keepMirroringOnWrite = keepMirroringOnWrite; + iSelf->_transformFilter = nil; }); } diff --git a/Library/Sources/SCRecorderTools.m b/Library/Sources/SCRecorderTools.m index 8ac48d95..c4338300 100644 --- a/Library/Sources/SCRecorderTools.m +++ b/Library/Sources/SCRecorderTools.m @@ -101,7 +101,7 @@ + (NSString *)bestCaptureSessionPresetForDevice:(AVCaptureDevice *)device withMa + (NSString *)bestCaptureSessionPresetCompatibleWithAllDevices { NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; - CMVideoDimensions highestCompatibleDimension; + CMVideoDimensions highestCompatibleDimension = {0,0}; BOOL lowestSet = NO; for (AVCaptureDevice *device in videoDevices) { From b0b9351258e60a434a52512a7db8bc84ec26b84e Mon Sep 17 00:00:00 2001 From: Roy Lovejoy Date: Tue, 8 May 2018 21:16:32 -0700 Subject: [PATCH 22/55] detaching audio for haptic goodness --- Library/Sources/SCRecorder.h | 2 ++ Library/Sources/SCRecorder.m | 70 +++++++++++++++++++++++------------- 2 files changed, 47 insertions(+), 25 deletions(-) diff --git a/Library/Sources/SCRecorder.h b/Library/Sources/SCRecorder.h index 6fd3386c..eb3b6cbd 100644 --- a/Library/Sources/SCRecorder.h +++ b/Library/Sources/SCRecorder.h @@ -417,6 +417,8 @@ */ - (void)previewViewFrameChanged; +- (NSError*)attachAudio; +- (void)detachAudio; /** Get an image representing the last output video buffer. */ diff --git a/Library/Sources/SCRecorder.m b/Library/Sources/SCRecorder.m index 171387d7..0f21dbb8 100644 --- a/Library/Sources/SCRecorder.m +++ b/Library/Sources/SCRecorder.m @@ -189,7 +189,7 @@ - (void)commitConfiguration { } } -- (BOOL)_reconfigureSession { +- (BOOL)_reconfigureSession:(BOOL)attachAudio { NSError *newError = nil; AVCaptureSession *session = _captureSession; @@ -254,6 +254,37 @@ - (BOOL)_reconfigureSession { _videoOutputAdded = YES; } } + if (attachAudio) + newError = [self attachAudio]; + } + + if (self.photoConfiguration.enabled) { + if (_photoOutput == nil) { + _photoOutput = [[AVCaptureStillImageOutput alloc] init]; + _photoOutput.outputSettings = [self.photoConfiguration createOutputSettings]; + } + + if (![session.outputs containsObject:_photoOutput]) { + if ([session canAddOutput:_photoOutput]) { + [session addOutput:_photoOutput]; + } else { + if (newError == nil) { + newError = [SCRecorder createError:@"Cannot add photoOutput inside the session"]; + } + } + } + } + + [self commitConfiguration]; + } + _error = newError; + + return newError == nil; +} + +- (NSError*)attachAudio { + AVCaptureSession *session = _captureSession; + NSError* newError = nil; _audioOutputAdded = NO; if (self.audioConfiguration.enabled) { @@ -275,33 +306,22 @@ - (BOOL)_reconfigureSession { _audioOutputAdded = YES; } } + return newError; } - if (self.photoConfiguration.enabled) { - if (_photoOutput == nil) { - _photoOutput = [[AVCaptureStillImageOutput alloc] init]; - _photoOutput.outputSettings = [self.photoConfiguration createOutputSettings]; - } - - if (![session.outputs containsObject:_photoOutput]) { - if ([session canAddOutput:_photoOutput]) { - [session addOutput:_photoOutput]; - } else { - if (newError == nil) { - newError = [SCRecorder createError:@"Cannot add photoOutput inside the session"]; - } - } - } +- (void)detachAudio { + AVCaptureSession *session = _captureSession; + if (_audioOutput) { + if ([session.outputs containsObject:_audioOutput]) { + [session removeOutput:_audioOutput]; } - - [self commitConfiguration]; + _audioOutputAdded = NO; + _audioOutput = nil; } - _error = newError; - - return newError == nil; } - (BOOL)prepare:(NSError **)error { + if (_captureSession != nil) { [NSException raise:@"SCCameraException" format:@"The session is already opened"]; } @@ -319,7 +339,7 @@ - (BOOL)prepare:(NSError **)error { [self beginConfiguration]; - BOOL success = [self _reconfigureSession]; + BOOL success = [self _reconfigureSession:_isRecording]; if (!success && error != nil) { *error = _error; @@ -439,7 +459,7 @@ - (void)unprepare { _previewLayer.session = nil; _captureSession = nil; } - [self _reconfigureSession]; + [self _reconfigureSession:_isRecording]; } - (void)_progressTimerFired:(NSTimer *)progressTimer { @@ -1395,7 +1415,7 @@ - (void)setCaptureSessionPreset:(NSString *)sessionPreset { _captureSessionPreset = sessionPreset; if (_captureSession != nil) { - [self _reconfigureSession]; + [self _reconfigureSession:_isRecording]; _captureSessionPreset = _captureSession.sessionPreset; } } @@ -1671,7 +1691,7 @@ - (void)setFastRecordMethodEnabled:(BOOL)fastRecordMethodEnabled { if (_fastRecordMethodEnabled != fastRecordMethodEnabled) { _fastRecordMethodEnabled = fastRecordMethodEnabled; - [self _reconfigureSession]; + [self _reconfigureSession:_isRecording]; } } From 591edba915e1438c7398c999fb37001ee5743c7c Mon Sep 17 00:00:00 2001 From: Roy Lovejoy Date: Wed, 16 May 2018 20:00:56 -0400 Subject: [PATCH 23/55] haptic fix --- Library/Sources/SCRecorder.m | 47 ++++++++++++++++++------------------ 1 file changed, 23 insertions(+), 24 deletions(-) diff --git a/Library/Sources/SCRecorder.m b/Library/Sources/SCRecorder.m index 0f21dbb8..d1219f47 100644 --- a/Library/Sources/SCRecorder.m +++ b/Library/Sources/SCRecorder.m @@ -254,8 +254,7 @@ - (BOOL)_reconfigureSession:(BOOL)attachAudio { _videoOutputAdded = YES; } } - if (attachAudio) - newError = [self attachAudio]; + newError = [self attachAudio]; } if (self.photoConfiguration.enabled) { @@ -285,29 +284,29 @@ - (BOOL)_reconfigureSession:(BOOL)attachAudio { - (NSError*)attachAudio { AVCaptureSession *session = _captureSession; NSError* newError = nil; - - _audioOutputAdded = NO; - if (self.audioConfiguration.enabled) { - if (_audioOutput == nil) { - _audioOutput = [[AVCaptureAudioDataOutput alloc] init]; - [_audioOutput setSampleBufferDelegate:self queue:_sessionQueue]; - } - - if (![session.outputs containsObject:_audioOutput]) { - if ([session canAddOutput:_audioOutput]) { - [session addOutput:_audioOutput]; - _audioOutputAdded = YES; - } else { - if (newError == nil) { - newError = [SCRecorder createError:@"Cannot add audioOutput inside the sesssion"]; - } - } - } else { - _audioOutputAdded = YES; - } - } + + _audioOutputAdded = NO; + if (self.audioConfiguration.enabled) { + if (_audioOutput == nil) { + _audioOutput = [[AVCaptureAudioDataOutput alloc] init]; + [_audioOutput setSampleBufferDelegate:self queue:_sessionQueue]; + } + + if (![session.outputs containsObject:_audioOutput]) { + if ([session canAddOutput:_audioOutput]) { + [session addOutput:_audioOutput]; + _audioOutputAdded = YES; + } else { + if (newError == nil) { + newError = [SCRecorder createError:@"Cannot add audioOutput inside the sesssion"]; + } + } + } else { + _audioOutputAdded = YES; + } + } return newError; - } +} - (void)detachAudio { AVCaptureSession *session = _captureSession; From ba963e3f964a424d60aee557841c5754ba4e09bd Mon Sep 17 00:00:00 2001 From: Roy Lovejoy Date: Thu, 17 May 2018 13:17:04 -0400 Subject: [PATCH 24/55] on the fly audio leveling --- Library/Sources/SCRecorder.m | 37 ++++++++++++++++++++++++++++ Library/Sources/SCRecorderDelegate.h | 5 ++++ SCRecorder.podspec | 2 +- 3 files changed, 43 insertions(+), 1 deletion(-) diff --git a/Library/Sources/SCRecorder.m b/Library/Sources/SCRecorder.m index d1219f47..96396c40 100644 --- a/Library/Sources/SCRecorder.m +++ b/Library/Sources/SCRecorder.m @@ -865,6 +865,43 @@ - (void)_handleAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer withSession:(SC // NSLog(@"SKIPPING"); } } + if ([_delegate respondsToSelector:@selector(recorder:didAcquireAudioBufferList:)]) { + id delegate = self.delegate; +// CMItemCount sampleCount = CMSampleBufferGetNumSamples(sampleBuffer); + size_t audioBufferListSize = 0; + AudioBufferList* audioBufferList = nil; + CMBlockBufferRef outAudioBuffer = nil; + OSStatus status = CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(sampleBuffer, + &audioBufferListSize, + nil, + 0, + nil, nil, + 0, + nil); + + if (status == kCMBlockBufferNoErr) { + audioBufferList = malloc(audioBufferListSize); + status = CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(sampleBuffer, + nil, + audioBufferList, + audioBufferListSize, + nil, nil, + kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment, + &outAudioBuffer); + + if (status == kCMBlockBufferNoErr) { + dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH,0), ^{ + [delegate recorder:self didAcquireAudioBufferList:audioBufferList]; + CFRelease(outAudioBuffer); + free(audioBufferList); + }); + } else { + NSLog(@"OSStatus = %i", status); + CFRelease(outAudioBuffer); + free(audioBufferList); + } + } + } } } diff --git a/Library/Sources/SCRecorderDelegate.h b/Library/Sources/SCRecorderDelegate.h index 03f537ea..8fc0ba9d 100644 --- a/Library/Sources/SCRecorderDelegate.h +++ b/Library/Sources/SCRecorderDelegate.h @@ -124,4 +124,9 @@ typedef NS_ENUM(NSInteger, SCFlashMode) { */ - (NSDictionary *__nullable)createSegmentInfoForRecorder:(SCRecorder *__nonnull)recorder; +/** + Gives an opportunity to the delegate to do some low-level audio munging. + */ +- (void)recorder:(SCRecorder *__nonnull)recorder didAcquireAudioBufferList:(AudioBufferList*)audioBufferList; + @end diff --git a/SCRecorder.podspec b/SCRecorder.podspec index 0b5bb389..bcb41d89 100644 --- a/SCRecorder.podspec +++ b/SCRecorder.podspec @@ -11,7 +11,7 @@ Pod::Spec.new do |s| s.homepage = "https://github.com/rFlex/SCRecorder" s.license = 'Apache License, Version 2.0' s.author = { "Simon CORSIN" => "simon@corsin.me" } - s.platform = :ios, '7.0' + s.platform = :ios, '8.0' s.source = { :git => "https://github.com/rFlex/SCRecorder.git", :tag => "v2.7.0" } s.source_files = 'Library/Sources/*.{h,m}' s.public_header_files = 'Library/Sources/*.h' From 62b10d672333755d6aa4e45840f0a49401f66c9b Mon Sep 17 00:00:00 2001 From: Roy Lovejoy Date: Fri, 18 May 2018 11:17:55 -0400 Subject: [PATCH 25/55] more touchup --- Library/Sources/SCRecorder.m | 38 ++++++++++++------------------------ 1 file changed, 13 insertions(+), 25 deletions(-) diff --git a/Library/Sources/SCRecorder.m b/Library/Sources/SCRecorder.m index 96396c40..9f16c134 100644 --- a/Library/Sources/SCRecorder.m +++ b/Library/Sources/SCRecorder.m @@ -868,39 +868,27 @@ - (void)_handleAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer withSession:(SC if ([_delegate respondsToSelector:@selector(recorder:didAcquireAudioBufferList:)]) { id delegate = self.delegate; // CMItemCount sampleCount = CMSampleBufferGetNumSamples(sampleBuffer); - size_t audioBufferListSize = 0; - AudioBufferList* audioBufferList = nil; + size_t size = sizeof(AudioBufferList); + AudioBufferList* audioBufferList = malloc(size); CMBlockBufferRef outAudioBuffer = nil; OSStatus status = CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(sampleBuffer, - &audioBufferListSize, - nil, - 0, + &size, + audioBufferList, + size, nil, nil, - 0, - nil); + kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment, + &outAudioBuffer); - if (status == kCMBlockBufferNoErr) { - audioBufferList = malloc(audioBufferListSize); - status = CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(sampleBuffer, - nil, - audioBufferList, - audioBufferListSize, - nil, nil, - kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment, - &outAudioBuffer); - + dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH,0), ^{ if (status == kCMBlockBufferNoErr) { - dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH,0), ^{ - [delegate recorder:self didAcquireAudioBufferList:audioBufferList]; - CFRelease(outAudioBuffer); - free(audioBufferList); - }); + [delegate recorder:self didAcquireAudioBufferList:audioBufferList]; } else { NSLog(@"OSStatus = %i", status); - CFRelease(outAudioBuffer); - free(audioBufferList); } - } + if (outAudioBuffer) CFRelease(outAudioBuffer); + free(audioBufferList); + }); + } } } From 260f95efabe3021ed554498c4f78475f86ade9a7 Mon Sep 17 00:00:00 2001 From: Roy Lovejoy Date: Fri, 18 May 2018 18:24:44 -0400 Subject: [PATCH 26/55] final audio goodness --- Library/Sources/SCRecorder.m | 25 +++++++++---------------- Library/Sources/SCRecorderDelegate.h | 2 +- 2 files changed, 10 insertions(+), 17 deletions(-) diff --git a/Library/Sources/SCRecorder.m b/Library/Sources/SCRecorder.m index 9f16c134..4095823f 100644 --- a/Library/Sources/SCRecorder.m +++ b/Library/Sources/SCRecorder.m @@ -865,30 +865,23 @@ - (void)_handleAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer withSession:(SC // NSLog(@"SKIPPING"); } } - if ([_delegate respondsToSelector:@selector(recorder:didAcquireAudioBufferList:)]) { + if ([_delegate respondsToSelector:@selector(recorder:didAcquireAudioBuffer:length:)]) { id delegate = self.delegate; -// CMItemCount sampleCount = CMSampleBufferGetNumSamples(sampleBuffer); - size_t size = sizeof(AudioBufferList); - AudioBufferList* audioBufferList = malloc(size); - CMBlockBufferRef outAudioBuffer = nil; - OSStatus status = CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(sampleBuffer, - &size, - audioBufferList, - size, - nil, nil, - kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment, - &outAudioBuffer); + CMBlockBufferRef blockBuffer = CMSampleBufferGetDataBuffer(sampleBuffer); + CMItemCount sampleCount = CMSampleBufferGetNumSamples(sampleBuffer); + size_t sampleSize = CMSampleBufferGetSampleSize(sampleBuffer, 0); + size_t dataLength = sampleCount * sampleSize; + SInt16* data = malloc(dataLength); + OSStatus status = CMBlockBufferCopyDataBytes(blockBuffer, 0, dataLength, data); dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH,0), ^{ if (status == kCMBlockBufferNoErr) { - [delegate recorder:self didAcquireAudioBufferList:audioBufferList]; + [delegate recorder:self didAcquireAudioBuffer:data length:sampleCount]; } else { NSLog(@"OSStatus = %i", status); } - if (outAudioBuffer) CFRelease(outAudioBuffer); - free(audioBufferList); + free(data); }); - } } } diff --git a/Library/Sources/SCRecorderDelegate.h b/Library/Sources/SCRecorderDelegate.h index 8fc0ba9d..3f806497 100644 --- a/Library/Sources/SCRecorderDelegate.h +++ b/Library/Sources/SCRecorderDelegate.h @@ -127,6 +127,6 @@ typedef NS_ENUM(NSInteger, SCFlashMode) { /** Gives an opportunity to the delegate to do some low-level audio munging. */ -- (void)recorder:(SCRecorder *__nonnull)recorder didAcquireAudioBufferList:(AudioBufferList*)audioBufferList; +- (void)recorder:(SCRecorder *__nonnull)recorder didAcquireAudioBuffer:(SInt16*)audioBuffer length:(CMItemCount)length; @end From c775aa972c94727eccc6abbf93dfcb759d3ee90d Mon Sep 17 00:00:00 2001 From: Roy Lovejoy Date: Tue, 5 Jun 2018 09:55:28 -0700 Subject: [PATCH 27/55] exporting larger video tracks in composition --- Library/Sources/SCAssetExportSession.m | 216 +++++++++++++++++++++++-- Library/Sources/SCVideoConfiguration.h | 2 + Library/Sources/SCVideoConfiguration.m | 20 ++- 3 files changed, 219 insertions(+), 19 deletions(-) diff --git a/Library/Sources/SCAssetExportSession.m b/Library/Sources/SCAssetExportSession.m index 67b897a9..0bdd6517 100644 --- a/Library/Sources/SCAssetExportSession.m +++ b/Library/Sources/SCAssetExportSession.m @@ -180,13 +180,13 @@ - (void)CGRenderWithInputPixelBuffer:(CVPixelBufferRef)inputPixelBuffer toOutput CGContextRef ctx = SCCreateContextFromPixelBuffer(outputPixelBuffer); void (^layoutBlock)(void) = ^{ - overlay.frame = CGRectMake(0, 0, videoSize.width, videoSize.height); +// overlay.frame = CGRectMake(0, 0, videoSize.width, videoSize.height); if ([overlay respondsToSelector:@selector(updateWithVideoTime:)]) { [overlay updateWithVideoTime:timeSeconds]; } - [overlay layoutIfNeeded]; +// [overlay layoutIfNeeded]; }; if (onMainThread) { @@ -583,26 +583,31 @@ - (void)_setupVideoUsingTracks:(NSArray *)videoTracks { _inputBufferSize = CGSizeZero; if (videoTracks.count > 0 && self.videoConfiguration.enabled && !self.videoConfiguration.shouldIgnore) { AVAssetTrack *videoTrack = [videoTracks objectAtIndex:0]; + CGAffineTransform trackTransform = videoTrack.preferredTransform; + + // Output + AVVideoComposition *videoComposition = self.videoConfiguration.composition; + + if (videoComposition == nil) { + _inputBufferSize = videoTrack.naturalSize; + } else { + _inputBufferSize = videoComposition.renderSize; + } // Input - NSDictionary *videoSettings = [_videoConfiguration createAssetWriterOptionsWithVideoSize:videoTrack.naturalSize]; + NSDictionary *videoSettings = [_videoConfiguration createAssetWriterOptionsWithVideoSize:_inputBufferSize + sizeIsSuggestion:videoComposition == nil]; _videoInput = [self addWriter:AVMediaTypeVideo withSettings:videoSettings]; if (_videoConfiguration.keepInputAffineTransform) { _videoInput.transform = videoTrack.preferredTransform; - } else { - _videoInput.transform = _videoConfiguration.affineTransform; - } + } else if (videoComposition) { + _videoInput.transform = trackTransform; + } else + _videoInput.transform = _videoConfiguration.affineTransform; - // Output - AVVideoComposition *videoComposition = self.videoConfiguration.composition; - if (videoComposition == nil) { - _inputBufferSize = videoTrack.naturalSize; - } else { - _inputBufferSize = videoComposition.renderSize; - } - CGSize outputBufferSize = _inputBufferSize; + CGSize outputBufferSize = videoComposition.renderSize; if (!CGSizeEqualToSize(self.videoConfiguration.bufferSize, CGSizeZero)) { outputBufferSize = self.videoConfiguration.bufferSize; } @@ -734,4 +739,187 @@ - (AVAssetReader *)reader { return _reader; } ++ (UIImageOrientation)orientationForVideoTransform:(CGAffineTransform)videoTransform { + + UIImageOrientation videoAssetOrientation_ = UIImageOrientationUp; //leave this - it may be used in the future + + if(videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) { + videoAssetOrientation_= UIImageOrientationRight; + } + if(videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) { + videoAssetOrientation_ = UIImageOrientationLeft; + } + if(videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) { + videoAssetOrientation_ = UIImageOrientationUp; + } + if(videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) + { + videoAssetOrientation_ = UIImageOrientationDown; + } + return videoAssetOrientation_; +} + +- (CGAffineTransform)transformForVideoTransform:(CGAffineTransform)videoTransform + naturalSize:(CGSize)naturalSize + withRequiredResolution:(CGSize)requiredResolution { + //FIXING ORIENTATION// + UIImageOrientation videoAssetOrientation_ = [SCAssetExportSession orientationForVideoTransform:videoTransform]; //leave this - it may be used in the future + BOOL isVideoAssetPortrait_ = NO; + if (videoAssetOrientation_ == UIImageOrientationRight || + videoAssetOrientation_ == UIImageOrientationLeft) { + isVideoAssetPortrait_ = YES; + } + CGFloat trackWidth = naturalSize.width; + CGFloat trackHeight = naturalSize.height; + CGFloat widthRatio = 0; + CGFloat heightRatio = 0; + + double aspectRatio = (MAX(trackWidth, trackHeight) / MIN(trackWidth, trackHeight)); + double delta = ABS(aspectRatio - (16.0/9.0)); + BOOL closeEnoughTo16x9 = delta < 0.1; // 1.6777 .. 1.8777 tag:gabe - if this is encompassing too much - maybe 0.08? + + // DLog(@"image size %f,%f", trackWidth,trackHeight); + // DLog(@"required size %f,%f", self.requiredResolution.width,self.requiredResolution.height); + // DLog(@"Original transform a(%f) b(%f) c(%f) d(%f) tx(%f) ty(%f)", + // videoTransform.a,videoTransform.b,videoTransform.c,videoTransform.d,videoTransform.tx,videoTransform.ty) + /* + * 2 Main Cases + * a- portrait + * happens when taking video from the rear camera or selecting from the library + * + * b- landscape + * clips from the library should be in landscape + * as well as camera footage from the front camera + * + * */ + if(isVideoAssetPortrait_) { + // DLog(@"IS PORTRAIT - ORIGINAL TRANSFORM"); + trackWidth = naturalSize.height; + trackHeight = naturalSize.width; + + if (trackWidth == requiredResolution.width && + trackHeight == requiredResolution.height) { + return videoTransform; + } else { + widthRatio = requiredResolution.width / trackWidth; + heightRatio = requiredResolution.height / trackHeight; + + if (closeEnoughTo16x9) { + // aspect fill time + if (widthRatio < heightRatio) + widthRatio = heightRatio; + else + heightRatio = widthRatio; + } else { + /* + * Since this is portrait, that means the height should be taller than the width + * therefore, adjust to fit height and center via width + * */ + + // aspect fit (old code) + heightRatio = requiredResolution.height / trackHeight; + widthRatio = heightRatio; + } + CGAffineTransform scaleFactor = CGAffineTransformMakeScale(widthRatio, heightRatio); + CGFloat translationDistanceX = 0; + CGFloat translationDistanceY = 0; + + /* + * If width < required width, center by width + * height will always fill the screen + * center it by height Just in case + * */ + CGFloat newWidth = widthRatio * trackWidth; + if (newWidth != requiredResolution.width) { + translationDistanceX = (requiredResolution.width - newWidth)/2; + } + CGFloat newHeight = heightRatio * trackHeight; + if (newHeight != requiredResolution.height) { + translationDistanceY = (requiredResolution.height - newHeight)/2; + } + + // DLog(@"translate %f,%f", translationDistanceX, translationDistanceY); + return CGAffineTransformConcat(CGAffineTransformConcat(videoTransform, scaleFactor), CGAffineTransformMakeTranslation(translationDistanceX, translationDistanceY)); + } + } else { + trackWidth = naturalSize.width; + trackHeight = naturalSize.height; + /* + * Fix for shit saved locally + * */ + BOOL isOrientedUpWithSwitchedWidthHeight = NO; + if ((videoAssetOrientation_ == UIImageOrientationUp || videoAssetOrientation_ == UIImageOrientationDown) + && trackWidth > trackHeight) { + isOrientedUpWithSwitchedWidthHeight = YES; + } + /* + * Special case for photos that haven been recorded with swapped settings + * */ + if (isOrientedUpWithSwitchedWidthHeight) { + trackWidth = naturalSize.height; + trackHeight = naturalSize.width; + widthRatio = requiredResolution.width/trackWidth; + heightRatio = requiredResolution.height/trackHeight; + CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(widthRatio,heightRatio); + CGFloat translationDistance = (CGFloat) (heightRatio*fabs(videoTransform.ty)); + return CGAffineTransformConcat(CGAffineTransformConcat(videoTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, translationDistance));//840 is the number + } else if (trackWidth == requiredResolution.width && + trackHeight == requiredResolution.height) { + /*If the resolution is the same, just rotate and scale*/ + widthRatio = requiredResolution.width/trackWidth; + heightRatio = requiredResolution.height/trackHeight; + CGAffineTransform scaleFactor = CGAffineTransformMakeScale(widthRatio, heightRatio); + return CGAffineTransformConcat(videoTransform, scaleFactor); + } else { + if (closeEnoughTo16x9) { + widthRatio = requiredResolution.width / trackWidth; + heightRatio = requiredResolution.height / trackHeight; + + // aspect fill time + if (widthRatio < heightRatio) + widthRatio = heightRatio; + else + heightRatio = widthRatio; + } else { + /*If the resolutions are different and the video's height > width + * scale by height + * if the video is 16x9 it will fit + * */ + // aspect fit (old code) + if (trackHeight > trackWidth) { + heightRatio = requiredResolution.height/trackHeight; + widthRatio = heightRatio; + } else { + /* Occurs for square videos + * otherwise will only happen for landscape videos which are not supported + * */ + widthRatio = requiredResolution.width/trackWidth; + heightRatio = widthRatio; + } + } + // DLog(@"NOT PORTRAIT") + // DLog(@"LANDSCAPE width ratio %f", widthRatio); + // DLog(@"LANDSCAPE height ratio %f", heightRatio); + CGAffineTransform scaleFactor = CGAffineTransformMakeScale(widthRatio, heightRatio); + CGFloat translationDistanceX = 0; + CGFloat translationDistanceY = 0; + /* + * If width < required width, center by width + * height will always fill the screen + * */ + CGFloat newWidth = widthRatio * trackWidth; + if (newWidth != requiredResolution.width) { + translationDistanceX = (requiredResolution.width - newWidth)/2; + } + CGFloat newHeight = heightRatio * trackHeight; + if (newHeight != requiredResolution.height) { + translationDistanceY = (requiredResolution.height - newHeight)/2; + } + // DLog(@"translation x,y %f,%f", translationDistanceX, translationDistanceY) + //CGFloat translationDistance = (CGFloat) (heightRatio*fabs(videoTransform.ty)); + return CGAffineTransformConcat(CGAffineTransformConcat(videoTransform, scaleFactor), CGAffineTransformMakeTranslation(translationDistanceX, translationDistanceY)); + } + } +} + @end diff --git a/Library/Sources/SCVideoConfiguration.h b/Library/Sources/SCVideoConfiguration.h index c5e6a453..1ae29f3f 100644 --- a/Library/Sources/SCVideoConfiguration.h +++ b/Library/Sources/SCVideoConfiguration.h @@ -181,5 +181,7 @@ typedef enum : NSUInteger { - (NSDictionary *__nonnull)createAssetWriterOptionsWithVideoSize:(CGSize)videoSize; +- (NSDictionary *__nonnull)createAssetWriterOptionsWithVideoSize:(CGSize)videoSize + sizeIsSuggestion:(BOOL)suggestion; @end diff --git a/Library/Sources/SCVideoConfiguration.m b/Library/Sources/SCVideoConfiguration.m index f1bb8608..d89a27d4 100644 --- a/Library/Sources/SCVideoConfiguration.m +++ b/Library/Sources/SCVideoConfiguration.m @@ -36,7 +36,12 @@ static CGSize MakeVideoSize(CGSize videoSize, float requestedWidth) { return CGSizeMake(videoSize.width / ratio, videoSize.height / ratio); } -- (NSDictionary *)createAssetWriterOptionsWithVideoSize:(CGSize)videoSize { +- (NSDictionary *__nonnull)createAssetWriterOptionsWithVideoSize:(CGSize)videoSize { + return [self createAssetWriterOptionsWithVideoSize:videoSize sizeIsSuggestion:YES]; +} + +- (NSDictionary *)createAssetWriterOptionsWithVideoSize:(CGSize)videoSize + sizeIsSuggestion:(BOOL)suggestion { NSDictionary *options = self.options; if (options != nil) { return options; @@ -48,18 +53,23 @@ - (NSDictionary *)createAssetWriterOptionsWithVideoSize:(CGSize)videoSize { if (self.preset != nil) { if ([self.preset isEqualToString:SCPresetLowQuality]) { bitrate = 500000; - outputSize = MakeVideoSize(videoSize, 640); + if (suggestion) + outputSize = MakeVideoSize(videoSize, 640); } else if ([self.preset isEqualToString:SCPresetMediumQuality]) { bitrate = 1000000; - outputSize = MakeVideoSize(videoSize, 1280); + if (suggestion) + outputSize = MakeVideoSize(videoSize, 1280); } else if ([self.preset isEqualToString:SCPresetHighestQuality]) { bitrate = 6000000; - outputSize = MakeVideoSize(videoSize, 1920); + if (suggestion) + outputSize = MakeVideoSize(videoSize, 1920); } else { NSLog(@"Unrecognized video preset %@", self.preset); } } - + if (suggestion == NO) + outputSize = videoSize; + if (CGSizeEqualToSize(outputSize, CGSizeZero)) { outputSize = videoSize; } From 3a9117379f74f6f9742fd9c179553de0349f4236 Mon Sep 17 00:00:00 2001 From: Roy Lovejoy Date: Tue, 5 Jun 2018 17:04:29 -0700 Subject: [PATCH 28/55] overlay muting support --- Library/Sources/SCRecorder.h | 3 + Library/Sources/SCRecorder.m | 149 +++++++++++++++++++-------- Library/Sources/SCRecorderDelegate.h | 3 +- 3 files changed, 110 insertions(+), 45 deletions(-) diff --git a/Library/Sources/SCRecorder.h b/Library/Sources/SCRecorder.h index eb3b6cbd..50fa51b3 100644 --- a/Library/Sources/SCRecorder.h +++ b/Library/Sources/SCRecorder.h @@ -280,6 +280,8 @@ */ @property (readonly, nonatomic) dispatch_queue_t __nonnull sessionQueue; +@property (assign, nonatomic) BOOL audioMute; + /** Create a recorder @return the newly created recorder @@ -419,6 +421,7 @@ - (NSError*)attachAudio; - (void)detachAudio; + /** Get an image representing the last output video buffer. */ diff --git a/Library/Sources/SCRecorder.m b/Library/Sources/SCRecorder.m index 4095823f..abf8bbb7 100644 --- a/Library/Sources/SCRecorder.m +++ b/Library/Sources/SCRecorder.m @@ -33,16 +33,23 @@ @interface SCRecorder() { BOOL _didCaptureFirstAudioBuffer; BOOL _preparing; BOOL _reconfiguring; + BOOL _audioMuting; int _beginSessionConfigurationCount; double _lastAppendedVideoTime; NSTimer *_movieOutputProgressTimer; - CMTime _lastMovieFileOutputTime; + CMTime _lastMovieFileOutputTime; + CMTime _firstAudioTime; void(^_pauseCompletionHandler)(void); SCFilter *_transformFilter; size_t _transformFilterBufferWidth; size_t _transformFilterBufferHeight; + + CMBlockBufferRef quietBlockBuffer; + CMSampleBufferRef quietSampleBuffer; + } @property (nonatomic, strong) SCContext* scContext; + @end @implementation SCRecorder @@ -57,7 +64,9 @@ - (id)init { self = [super init]; if (self) { - _sessionQueue = dispatch_queue_create("me.corsin.SCRecorder.RecordSession", nil); + quietBlockBuffer = nil; + quietSampleBuffer = nil; + _sessionQueue = dispatch_queue_create("me.corsin.SCRecorder.RecordSession", nil); dispatch_queue_set_specific(_sessionQueue, kSCRecorderRecordSessionQueueKey, "true", nil); dispatch_set_target_queue(_sessionQueue, dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0)); @@ -85,6 +94,7 @@ - (id)init { _resetZoomOnChangeDevice = YES; _mirrorOnFrontCamera = NO; _automaticallyConfiguresApplicationAudioSession = YES; + _audioMuting = NO; self.device = AVCaptureDevicePositionBack; _videoConfiguration = [SCVideoConfiguration new]; @@ -104,11 +114,19 @@ - (id)init { } - (void)dealloc { - [_videoConfiguration removeObserver:self forKeyPath:@"enabled"]; - [_audioConfiguration removeObserver:self forKeyPath:@"enabled"]; - [_photoConfiguration removeObserver:self forKeyPath:@"options"]; + [_videoConfiguration removeObserver:self forKeyPath:@"enabled"]; + [_audioConfiguration removeObserver:self forKeyPath:@"enabled"]; + [_photoConfiguration removeObserver:self forKeyPath:@"options"]; + + [[NSNotificationCenter defaultCenter] removeObserver:self]; + + if (quietBlockBuffer) + CFRelease(quietBlockBuffer); + quietBlockBuffer = nil; + if (quietSampleBuffer) + CFRelease(quietSampleBuffer); + quietSampleBuffer = nil; - [[NSNotificationCenter defaultCenter] removeObserver:self]; [self unprepare]; } @@ -778,7 +796,8 @@ - (void)_handleVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer withSession:(SC BOOL isFirstVideoBuffer = !recordSession.currentSegmentHasVideo; // NSLog(@"APPENDING"); __weak typeof(self) wSelf = self; - [self appendVideoSampleBuffer:sampleBuffer toRecordSession:recordSession + [self appendVideoSampleBuffer:sampleBuffer + toRecordSession:recordSession duration:duration connection:connection completion:^(BOOL success) { @@ -805,11 +824,12 @@ - (void)_handleVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer withSession:(SC CMSampleBufferRef audioBuffer = _lastAudioBuffer.sampleBuffer; if (audioBuffer != nil) { CMTime lastAudioEndTime = CMTimeAdd(CMSampleBufferGetPresentationTimeStamp(audioBuffer), CMSampleBufferGetDuration(audioBuffer)); - CMTime videoStartTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); + CMTime videoStartTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); + // If the end time of the last audio buffer is after this video buffer, we need to re-use it, // since it was skipped on the last cycle to wait until the video becomes ready. if (CMTIME_COMPARE_INLINE(lastAudioEndTime, >, videoStartTime)) { - [self _handleAudioSampleBuffer:audioBuffer withSession:recordSession]; + [self _handleAudioSampleBuffer:audioBuffer withSession:recordSession]; } } } @@ -865,7 +885,7 @@ - (void)_handleAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer withSession:(SC // NSLog(@"SKIPPING"); } } - if ([_delegate respondsToSelector:@selector(recorder:didAcquireAudioBuffer:length:)]) { + if ([_delegate respondsToSelector:@selector(recorder:didAcquireAudioBuffer:length:timestamp:)]) { id delegate = self.delegate; CMBlockBufferRef blockBuffer = CMSampleBufferGetDataBuffer(sampleBuffer); CMItemCount sampleCount = CMSampleBufferGetNumSamples(sampleBuffer); @@ -873,10 +893,13 @@ - (void)_handleAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer withSession:(SC size_t dataLength = sampleCount * sampleSize; SInt16* data = malloc(dataLength); OSStatus status = CMBlockBufferCopyDataBytes(blockBuffer, 0, dataLength, data); - +// CMTime audioStartTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); + CMTime audioStartTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer); + CMTime bufferTimestamp = CMTimeSubtract(audioStartTime, _firstAudioTime); + dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH,0), ^{ if (status == kCMBlockBufferNoErr) { - [delegate recorder:self didAcquireAudioBuffer:data length:sampleCount]; + [delegate recorder:self didAcquireAudioBuffer:data length:sampleCount timestamp:bufferTimestamp]; } else { NSLog(@"OSStatus = %i", status); } @@ -887,42 +910,73 @@ - (void)_handleAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer withSession:(SC } - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection { - if (captureOutput == _videoOutput) { - _lastVideoBuffer.sampleBuffer = sampleBuffer; -// NSLog(@"VIDEO BUFFER: %fs (%fs)", CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)), CMTimeGetSeconds(CMSampleBufferGetDuration(sampleBuffer))); - if (_videoConfiguration.shouldIgnore) { - return; - } + if (captureOutput == _videoOutput) { + _lastVideoBuffer.sampleBuffer = sampleBuffer; + // NSLog(@"VIDEO BUFFER: %fs (%fs)", CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)), CMTimeGetSeconds(CMSampleBufferGetDuration(sampleBuffer))); - SCImageView *imageView = _SCImageView; - if (imageView != nil) { - CFRetain(sampleBuffer); - dispatch_async(dispatch_get_main_queue(), ^{ - [imageView setImageBySampleBuffer:sampleBuffer]; - CFRelease(sampleBuffer); - }); - } - } else if (captureOutput == _audioOutput) { - _lastAudioBuffer.sampleBuffer = sampleBuffer; -// NSLog(@"AUDIO BUFFER: %fs (%fs)", CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)), CMTimeGetSeconds(CMSampleBufferGetDuration(sampleBuffer))); + if (_videoConfiguration.shouldIgnore) { + return; + } - if (_audioConfiguration.shouldIgnore) { - return; - } - } + SCImageView *imageView = _SCImageView; + if (imageView != nil) { + CFRetain(sampleBuffer); + dispatch_async(dispatch_get_main_queue(), ^{ + [imageView setImageBySampleBuffer:sampleBuffer]; + CFRelease(sampleBuffer); + }); + } + } else if (captureOutput == _audioOutput) { + _lastAudioBuffer.sampleBuffer = sampleBuffer; + // NSLog(@"AUDIO BUFFER: %fs (%fs)", CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)), CMTimeGetSeconds(CMSampleBufferGetDuration(sampleBuffer))); - if (!_initializeSessionLazily || _isRecording) { - SCRecordSession *recordSession = _session; - if (recordSession != nil) { - if (captureOutput == _videoOutput && _didCaptureFirstAudioBuffer) { - [self _handleVideoSampleBuffer:sampleBuffer withSession:recordSession connection:connection]; - } else if (captureOutput == _audioOutput) { - _didCaptureFirstAudioBuffer = YES; - [self _handleAudioSampleBuffer:sampleBuffer withSession:recordSession]; - } - } - } + if (_audioConfiguration.shouldIgnore) { + return; + } + if (_audioMuting) { + if (quietSampleBuffer == nil) { + CMItemCount numSamples = CMSampleBufferGetNumSamples(sampleBuffer); + size_t sampleSize = CMSampleBufferGetSampleSize(sampleBuffer, 0); + CMFormatDescriptionRef format = CMSampleBufferGetFormatDescription(sampleBuffer); + size_t dataLength = numSamples * sampleSize; + OSStatus status = noErr; + static SInt16* blockOfZeros = nil; + + if (blockOfZeros == nil) + blockOfZeros = malloc(dataLength); + + memset(blockOfZeros, 0x0000, dataLength); + + status = CMBlockBufferCreateWithMemoryBlock(kCFAllocatorDefault, blockOfZeros, dataLength, kCFAllocatorDefault, nil, 0, dataLength, kCMBlockBufferAssureMemoryNowFlag, &quietBlockBuffer); + if (status) NSLog(@"CMBlockBuffer OSStatus = %i", status); + if (status == noErr) { + status = CMAudioSampleBufferCreateWithPacketDescriptions(kCFAllocatorDefault, quietBlockBuffer, YES, nil, nil, format, numSamples, kCMTimeZero, nil, &quietSampleBuffer); + if (status) NSLog(@"CMSampleBuffer OSStatus = %i", status); + } + } + if (quietSampleBuffer) { + CMSampleBufferSetOutputPresentationTimeStamp(quietSampleBuffer, CMSampleBufferGetPresentationTimeStamp(sampleBuffer)); + sampleBuffer = quietSampleBuffer; + _lastAudioBuffer.sampleBuffer = sampleBuffer; + } + } + } + + if (!_initializeSessionLazily || _isRecording) { + SCRecordSession *recordSession = _session; + if (recordSession != nil) { + if (captureOutput == _videoOutput && _didCaptureFirstAudioBuffer) { + [self _handleVideoSampleBuffer:sampleBuffer withSession:recordSession connection:connection]; + } else if (captureOutput == _audioOutput) { + if (_didCaptureFirstAudioBuffer == NO) + _firstAudioTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); + _didCaptureFirstAudioBuffer = YES; + + [self _handleAudioSampleBuffer:sampleBuffer withSession:recordSession]; + } + } + } } - (NSDictionary *)_createSegmentInfo { @@ -1733,4 +1787,11 @@ + (BOOL)isSessionQueue { return dispatch_get_specific(kSCRecorderRecordSessionQueueKey) != nil; } +- (void)setAudioMute:(BOOL)muting { + _audioMuting = muting; +} + +- (BOOL)audioMute { + return _audioMuting; +} @end diff --git a/Library/Sources/SCRecorderDelegate.h b/Library/Sources/SCRecorderDelegate.h index 3f806497..ca60724c 100644 --- a/Library/Sources/SCRecorderDelegate.h +++ b/Library/Sources/SCRecorderDelegate.h @@ -127,6 +127,7 @@ typedef NS_ENUM(NSInteger, SCFlashMode) { /** Gives an opportunity to the delegate to do some low-level audio munging. */ -- (void)recorder:(SCRecorder *__nonnull)recorder didAcquireAudioBuffer:(SInt16*)audioBuffer length:(CMItemCount)length; +- (void)recorder:(SCRecorder *__nonnull)recorder didAcquireAudioBuffer:(SInt16*)audioBuffer + length:(CMItemCount)length timestamp:(CMTime)time; @end From 748e00a07473c64ad4eb9a921afed74f770f4dae Mon Sep 17 00:00:00 2001 From: Roy Lovejoy Date: Thu, 7 Jun 2018 22:21:27 -0700 Subject: [PATCH 29/55] warnings --- Library/Sources/SCRecorder.h | 4 ++-- Library/Sources/SCRecorderDelegate.h | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Library/Sources/SCRecorder.h b/Library/Sources/SCRecorder.h index 50fa51b3..e167545d 100644 --- a/Library/Sources/SCRecorder.h +++ b/Library/Sources/SCRecorder.h @@ -419,13 +419,13 @@ */ - (void)previewViewFrameChanged; -- (NSError*)attachAudio; +- (NSError* __nullable)attachAudio; - (void)detachAudio; /** Get an image representing the last output video buffer. */ -- (UIImage *__nullable)snapshotOfLastVideoBuffer; +- (UIImage * __nullable)snapshotOfLastVideoBuffer; /** Returns a shared recorder if you want to use the same instance throughout your app. diff --git a/Library/Sources/SCRecorderDelegate.h b/Library/Sources/SCRecorderDelegate.h index ca60724c..caa82713 100644 --- a/Library/Sources/SCRecorderDelegate.h +++ b/Library/Sources/SCRecorderDelegate.h @@ -127,7 +127,7 @@ typedef NS_ENUM(NSInteger, SCFlashMode) { /** Gives an opportunity to the delegate to do some low-level audio munging. */ -- (void)recorder:(SCRecorder *__nonnull)recorder didAcquireAudioBuffer:(SInt16*)audioBuffer +- (void)recorder:(SCRecorder *__nonnull)recorder didAcquireAudioBuffer:(SInt16* __nonnull)audioBuffer length:(CMItemCount)length timestamp:(CMTime)time; @end From d321424205385ad96407a03f6c69772f3875b8f7 Mon Sep 17 00:00:00 2001 From: Roy Lovejoy Date: Mon, 11 Jun 2018 13:30:25 -0700 Subject: [PATCH 30/55] warning removal --- Library/Sources/SCRecorder.h | 3 +++ 1 file changed, 3 insertions(+) diff --git a/Library/Sources/SCRecorder.h b/Library/Sources/SCRecorder.h index e167545d..4984ba44 100644 --- a/Library/Sources/SCRecorder.h +++ b/Library/Sources/SCRecorder.h @@ -272,7 +272,10 @@ /** The underlying AVCaptureStillImageOutput */ +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated-declarations" @property (readonly, nonatomic) AVCaptureStillImageOutput *__nullable photoOutput; +#pragma clang diagnostic pop /** The dispatch queue that the SCRecorder uses for sending messages to the attached From d2342fa7e62d071d49a5d66aacabc5d416b6dd87 Mon Sep 17 00:00:00 2001 From: Roy Lovejoy Date: Tue, 26 Jun 2018 20:06:09 -0700 Subject: [PATCH 31/55] #500 code work --- Library/Sources/SCRecorder.h | 6 +++++ Library/Sources/SCRecorder.m | 46 ++++++++++++++++++++++++++---------- 2 files changed, 40 insertions(+), 12 deletions(-) diff --git a/Library/Sources/SCRecorder.h b/Library/Sources/SCRecorder.h index 4984ba44..01a625ac 100644 --- a/Library/Sources/SCRecorder.h +++ b/Library/Sources/SCRecorder.h @@ -390,6 +390,12 @@ */ - (BOOL)setActiveFormatWithFrameRate:(CMTimeScale)frameRate width:(int)width andHeight:(int)height error:(NSError*__nullable*__nullable)error; +/** + this is necessary because for some reason, on the camera flip, BitSmash calls pause, then record, instead of just flipping + so, we need one 'master' prerecord so we know when the REAL start is + */ +- (void)prerecord; + /** Allow the recorder to append the sample buffers inside the current setted session */ diff --git a/Library/Sources/SCRecorder.m b/Library/Sources/SCRecorder.m index abf8bbb7..d0e15733 100644 --- a/Library/Sources/SCRecorder.m +++ b/Library/Sources/SCRecorder.m @@ -30,15 +30,17 @@ @interface SCRecorder() { BOOL _shouldAutoresumeRecording; BOOL _needsSwitchBackToContinuousFocus; BOOL _adjustingFocus; - BOOL _didCaptureFirstAudioBuffer; + BOOL _didCaptureFirstAudioBuffer; + BOOL _didCaptureFirstSessionBuffer; BOOL _preparing; BOOL _reconfiguring; BOOL _audioMuting; + BOOL hasDidAcquireAudioBuffer; int _beginSessionConfigurationCount; double _lastAppendedVideoTime; NSTimer *_movieOutputProgressTimer; CMTime _lastMovieFileOutputTime; - CMTime _firstAudioTime; + CMTime _firstBufferTime, _firstSessionTime, _runningTime, _lastBufferTime; void(^_pauseCompletionHandler)(void); SCFilter *_transformFilter; size_t _transformFilterBufferWidth; @@ -504,8 +506,14 @@ - (void)_progressTimerFired:(NSTimer *)progressTimer { _lastMovieFileOutputTime = recordedDuration; } +- (void)prerecord { + _didCaptureFirstSessionBuffer = NO; + _runningTime = kCMTimeZero; + hasDidAcquireAudioBuffer = [_delegate respondsToSelector:@selector(recorder:didAcquireAudioBuffer:length:timestamp:)]; +} + - (void)record { - _didCaptureFirstAudioBuffer = NO; + _didCaptureFirstAudioBuffer = NO; __weak typeof(self) wSelf = self; void (^block)(void) = ^{ typeof(self) internal = wSelf; @@ -885,7 +893,25 @@ - (void)_handleAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer withSession:(SC // NSLog(@"SKIPPING"); } } - if ([_delegate respondsToSelector:@selector(recorder:didAcquireAudioBuffer:length:timestamp:)]) { + + if (hasDidAcquireAudioBuffer) { + CMTime sampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); + CMTime sampleDuration = CMSampleBufferGetDuration(sampleBuffer); + + if (_didCaptureFirstSessionBuffer == NO) { +// NSLog(@"**** FIRST SESSION ***"); + _firstSessionTime = sampleTime; + _lastBufferTime = sampleTime; + _didCaptureFirstSessionBuffer = YES; + } + if (_didCaptureFirstAudioBuffer == NO) { +// NSLog(@"**** FIRST BUFFER ***"); + _runningTime = CMTimeSubtract(_lastBufferTime, _firstSessionTime); + _firstBufferTime = sampleTime; + _didCaptureFirstAudioBuffer = YES; + } + _lastBufferTime = CMTimeAdd(sampleTime, sampleDuration); + id delegate = self.delegate; CMBlockBufferRef blockBuffer = CMSampleBufferGetDataBuffer(sampleBuffer); CMItemCount sampleCount = CMSampleBufferGetNumSamples(sampleBuffer); @@ -893,9 +919,10 @@ - (void)_handleAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer withSession:(SC size_t dataLength = sampleCount * sampleSize; SInt16* data = malloc(dataLength); OSStatus status = CMBlockBufferCopyDataBytes(blockBuffer, 0, dataLength, data); -// CMTime audioStartTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); - CMTime audioStartTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer); - CMTime bufferTimestamp = CMTimeSubtract(audioStartTime, _firstAudioTime); + CMTime bufferTimestamp = CMTimeSubtract(sampleTime, _firstBufferTime); + + if (CMTimeCompare(_runningTime, kCMTimeZero)) + bufferTimestamp = CMTimeAdd(bufferTimestamp, _runningTime); dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH,0), ^{ if (status == kCMBlockBufferNoErr) { @@ -969,10 +996,6 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CM if (captureOutput == _videoOutput && _didCaptureFirstAudioBuffer) { [self _handleVideoSampleBuffer:sampleBuffer withSession:recordSession connection:connection]; } else if (captureOutput == _audioOutput) { - if (_didCaptureFirstAudioBuffer == NO) - _firstAudioTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); - _didCaptureFirstAudioBuffer = YES; - [self _handleAudioSampleBuffer:sampleBuffer withSession:recordSession]; } } @@ -1427,7 +1450,6 @@ - (void)setDevice:(AVCaptureDevicePosition)device { if (_captureSession != nil && !_reconfiguring) { [self reconfigureVideoInput:self.videoConfiguration.enabled audioInput:NO]; } - [self didChangeValueForKey:@"device"]; } From 37b18411d0e6e8719872985bddf62c0552c04dc6 Mon Sep 17 00:00:00 2001 From: Roy Lovejoy Date: Sat, 18 Aug 2018 21:11:38 -0700 Subject: [PATCH 32/55] crashlytics hang - multiple dispatch_sync oddness fix --- Library/Sources/SCRecordSession.m | 31 ++++++++++++++++++------------- 1 file changed, 18 insertions(+), 13 deletions(-) diff --git a/Library/Sources/SCRecordSession.m b/Library/Sources/SCRecordSession.m index b22c8f53..d8dd88eb 100644 --- a/Library/Sources/SCRecordSession.m +++ b/Library/Sources/SCRecordSession.m @@ -155,6 +155,7 @@ + (NSError*)createError:(NSString*)errorDescription { - (void)dispatchSyncOnSessionQueue:(void(^)(void))block { SCRecorder *recorder = self.recorder; + block = [block copy]; if (recorder == nil || [SCRecorder isSessionQueue]) { block(); } else { @@ -519,7 +520,7 @@ - (BOOL)endSegmentWithInfo:(NSDictionary *)info completionHandler:(void(^)(SCRec __weak typeof(self) wSelf = self; [self dispatchSyncOnSessionQueue:^{ typeof(self) iSelf = wSelf; - dispatch_sync(iSelf->_audioQueue, ^{ +// dispatch_sync(iSelf->_audioQueue, ^{ if (iSelf->_recordSegmentReady) { iSelf->_recordSegmentReady = NO; success = YES; @@ -551,14 +552,14 @@ - (BOOL)endSegmentWithInfo:(NSDictionary *)info completionHandler:(void(^)(SCRec } else { [iSelf->_movieFileOutput stopRecording]; } - } else { - dispatch_async(dispatch_get_main_queue(), ^{ - if (completionHandler != nil) { - completionHandler(nil, [SCRecordSession createError:@"The current record segment is not ready for this operation"]); - } - }); - } - }); + } else { + if (completionHandler != nil) { + dispatch_async(dispatch_get_main_queue(), ^{ + completionHandler(nil, [SCRecordSession createError:@"The current record segment is not ready for this operation"]); + }); + } + } +// }); }]; return success; @@ -715,14 +716,18 @@ - (void)cancelSession:(void (^)(void))completionHandler { typeof(self) iSelf = wSelf; if (iSelf->_assetWriter == nil) { [iSelf removeAllSegments:nil]; - if (completionHandler != nil) { - completionHandler(); - } + if (completionHandler != nil) { + dispatch_async(dispatch_get_main_queue(), ^{ + completionHandler(); + }); + } } else { [iSelf endSegmentWithInfo:nil completionHandler:^(SCRecordSessionSegment *segment, NSError *error) { [iSelf removeAllSegments:nil]; if (completionHandler != nil) { - completionHandler(); + dispatch_async(dispatch_get_main_queue(), ^{ + completionHandler(); + }); } }]; } From 6b0e5afb823fbf1ad856cbc9fe6c35256f2c8351 Mon Sep 17 00:00:00 2001 From: Gabriel Rozenberg Date: Sun, 11 Nov 2018 12:30:29 -0500 Subject: [PATCH 33/55] now can use hvec --- .../project.pbxproj | 10 +- Examples/ObjC/Sources/Main.storyboard | 413 ++-- .../Sources/SCAudioRecordViewController.m | 2 +- .../Sources/SCRecorderExamples-Info.plist | 6 + .../ObjC/Sources/SCRecorderViewController.h | 2 + .../ObjC/Sources/SCRecorderViewController.m | 573 +++-- .../Sources/SCVideoPlayerViewController.m | 473 ++-- Library/SCRecorder.xcodeproj/project.pbxproj | 4 +- Library/Sources/SCAssetExportSession.m | 346 +-- Library/Sources/SCRecordSession.m | 230 +- Library/Sources/SCRecorder.h | 3 + Library/Sources/SCRecorder.m | 2168 +++++++++-------- Library/Sources/SCRecorderTools.m | 51 +- Library/Sources/SCVideoConfiguration.h | 23 +- Library/Sources/SCVideoConfiguration.m | 170 +- 15 files changed, 2280 insertions(+), 2194 deletions(-) diff --git a/Examples/ObjC/SCRecorderExamples.xcodeproj/project.pbxproj b/Examples/ObjC/SCRecorderExamples.xcodeproj/project.pbxproj index dd72632a..a172ec3e 100644 --- a/Examples/ObjC/SCRecorderExamples.xcodeproj/project.pbxproj +++ b/Examples/ObjC/SCRecorderExamples.xcodeproj/project.pbxproj @@ -7,7 +7,7 @@ objects = { /* Begin PBXBuildFile section */ - 90A0D7DE1BC7367A008F1BB0 /* Launch Screen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 90A0D7DD1BC7367A008F1BB0 /* Launch Screen.storyboard */; settings = {ASSET_TAGS = (); }; }; + 90A0D7DE1BC7367A008F1BB0 /* Launch Screen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 90A0D7DD1BC7367A008F1BB0 /* Launch Screen.storyboard */; }; 90D4FBED1BC6DDEC0017748D /* Metal.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 90D4FBEC1BC6DDEC0017748D /* Metal.framework */; settings = {ATTRIBUTES = (Weak, ); }; }; DC10CF251ACFD082009880C4 /* libSCRecorder.a in Frameworks */ = {isa = PBXBuildFile; fileRef = DC10CF1E1ACFCF6E009880C4 /* libSCRecorder.a */; }; DC1485DD1AFB78F2004896E9 /* a_filter.cisf in Resources */ = {isa = PBXBuildFile; fileRef = DC1485DC1AFB78F2004896E9 /* a_filter.cisf */; }; @@ -324,7 +324,7 @@ LastUpgradeCheck = 0510; TargetAttributes = { DC7F370718FC2A0D00D54901 = { - DevelopmentTeam = S9T665AXM4; + DevelopmentTeam = D82RJZ5422; }; }; }; @@ -515,6 +515,7 @@ CODE_SIGN_IDENTITY = "iPhone Developer"; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; DEFINES_MODULE = NO; + DEVELOPMENT_TEAM = D82RJZ5422; ENABLE_BITCODE = YES; GCC_PRECOMPILE_PREFIX_HEADER = YES; GCC_PREFIX_HEADER = "Sources/SCRecorderExamples-Prefix.pch"; @@ -524,7 +525,7 @@ /Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/include, ); INFOPLIST_FILE = "Sources/SCRecorderExamples-Info.plist"; - IPHONEOS_DEPLOYMENT_TARGET = 7.0; + IPHONEOS_DEPLOYMENT_TARGET = 11.0; OTHER_LDFLAGS = ( "-all_load", "-ObjC", @@ -544,6 +545,7 @@ CODE_SIGN_IDENTITY = "iPhone Developer"; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; DEFINES_MODULE = NO; + DEVELOPMENT_TEAM = D82RJZ5422; ENABLE_BITCODE = YES; GCC_PRECOMPILE_PREFIX_HEADER = YES; GCC_PREFIX_HEADER = "Sources/SCRecorderExamples-Prefix.pch"; @@ -553,7 +555,7 @@ /Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/include, ); INFOPLIST_FILE = "Sources/SCRecorderExamples-Info.plist"; - IPHONEOS_DEPLOYMENT_TARGET = 7.0; + IPHONEOS_DEPLOYMENT_TARGET = 11.0; OTHER_LDFLAGS = ( "-all_load", "-ObjC", diff --git a/Examples/ObjC/Sources/Main.storyboard b/Examples/ObjC/Sources/Main.storyboard index f9b24a93..b640b782 100644 --- a/Examples/ObjC/Sources/Main.storyboard +++ b/Examples/ObjC/Sources/Main.storyboard @@ -1,8 +1,13 @@ - - + + + + + - + + + @@ -10,13 +15,11 @@ + - - - @@ -24,279 +27,259 @@ - + - + - - - - + + - + - + - - + + - - + + - - + + - - - + - - + - - + + - - - + + - - - + - - - - + - - - - + @@ -321,104 +304,94 @@ - + - + - - + + - - - - + + @@ -431,41 +404,38 @@ - + - + - - + + - - - + + - - - - + + @@ -475,107 +445,98 @@ - + - + - - - - + + @@ -591,25 +552,24 @@ - + - + - - + + - - + - - + + @@ -618,61 +578,54 @@ - + - + - - + + - - + - + - + - - + + - - + - - - @@ -687,8 +640,8 @@ - - + + @@ -698,7 +651,7 @@ - + diff --git a/Examples/ObjC/Sources/SCAudioRecordViewController.m b/Examples/ObjC/Sources/SCAudioRecordViewController.m index 2208d0cd..2a71c578 100644 --- a/Examples/ObjC/Sources/SCAudioRecordViewController.m +++ b/Examples/ObjC/Sources/SCAudioRecordViewController.m @@ -107,7 +107,7 @@ - (void)recorder:(SCRecorder *)recorder didAppendAudioSampleBuffer:(SCRecordSess - (void)deleteRecordSession { [self.player setItemByAsset:nil]; - [_recordSession removeAllSegments]; +// [_recordSession removeAllSegments]; _recordSession = nil; } diff --git a/Examples/ObjC/Sources/SCRecorderExamples-Info.plist b/Examples/ObjC/Sources/SCRecorderExamples-Info.plist index 0dc207c7..e18edab8 100644 --- a/Examples/ObjC/Sources/SCRecorderExamples-Info.plist +++ b/Examples/ObjC/Sources/SCRecorderExamples-Info.plist @@ -24,6 +24,12 @@ 1.0 LSRequiresIPhoneOS + NSCameraUsageDescription + tor record + NSMicrophoneUsageDescription + to mic + NSPhotoLibraryUsageDescription + gimee fotos acces UILaunchStoryboardName Launch Screen UIMainStoryboardFile diff --git a/Examples/ObjC/Sources/SCRecorderViewController.h b/Examples/ObjC/Sources/SCRecorderViewController.h index 918b864f..0fa0adbf 100644 --- a/Examples/ObjC/Sources/SCRecorderViewController.h +++ b/Examples/ObjC/Sources/SCRecorderViewController.h @@ -26,6 +26,8 @@ @property (weak, nonatomic) IBOutlet UIView *toolsContainerView; @property (weak, nonatomic) IBOutlet UIButton *openToolsButton; +@property (strong, nonatomic) UILongPressGestureRecognizer *longPressGestureRecognizer; + - (IBAction)switchCameraMode:(id)sender; - (IBAction)switchFlash:(id)sender; - (IBAction)capturePhoto:(id)sender; diff --git a/Examples/ObjC/Sources/SCRecorderViewController.m b/Examples/ObjC/Sources/SCRecorderViewController.m index 5f99eed9..0cb45c3d 100644 --- a/Examples/ObjC/Sources/SCRecorderViewController.m +++ b/Examples/ObjC/Sources/SCRecorderViewController.m @@ -6,16 +6,14 @@ // Copyright (c) 2013 SCorsin. All rights reserved. // -#import #import #import "SCTouchDetector.h" #import "SCRecorderViewController.h" #import "SCVideoPlayerViewController.h" #import "SCImageDisplayerViewController.h" -#import #import "SCSessionListViewController.h" #import "SCRecordSessionManager.h" -#import +#import "VideoToolbox/VTCompressionProperties.h" #define kVideoPreset AVCaptureSessionPresetHigh @@ -24,10 +22,10 @@ ///////////////////// @interface SCRecorderViewController () { - SCRecorder *_recorder; - UIImage *_photo; - SCRecordSession *_recordSession; - UIImageView *_ghostImageView; + SCRecorder *_recorder; + UIImage *_photo; + SCRecordSession *_recordSession; + UIImageView *_ghostImageView; } @property (strong, nonatomic) SCRecorderToolsView *focusView; @@ -40,7 +38,7 @@ @interface SCRecorderViewController () { @implementation SCRecorderViewController -#pragma mark - UIViewController +#pragma mark - UIViewController #if __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_7_0 @@ -53,128 +51,177 @@ - (UIStatusBarStyle) preferredStatusBarStyle { #pragma mark - Left cycle - (void)dealloc { - _recorder.previewView = nil; + _recorder.previewView = nil; } - (void)viewDidLoad { - [super viewDidLoad]; - self.capturePhotoButton.alpha = 0.0; - - _ghostImageView = [[UIImageView alloc] initWithFrame:self.view.bounds]; - _ghostImageView.contentMode = UIViewContentModeScaleAspectFill; - _ghostImageView.alpha = 0.2; - _ghostImageView.userInteractionEnabled = NO; - _ghostImageView.hidden = YES; - - [self.view insertSubview:_ghostImageView aboveSubview:self.previewView]; - - _recorder = [SCRecorder recorder]; - _recorder.captureSessionPreset = [SCRecorderTools bestCaptureSessionPresetCompatibleWithAllDevices]; + [super viewDidLoad]; + self.capturePhotoButton.alpha = 0.0; + + _ghostImageView = [[UIImageView alloc] initWithFrame:self.view.bounds]; + _ghostImageView.contentMode = UIViewContentModeScaleAspectFill; + _ghostImageView.alpha = 0.2; + _ghostImageView.userInteractionEnabled = NO; + _ghostImageView.hidden = YES; + + [self.view insertSubview:_ghostImageView aboveSubview:self.previewView]; + + _recorder = [SCRecorder recorder]; + [_recorder beginConfiguration]; + _recorder.captureSessionPreset = AVCaptureSessionPresetInputPriority; +// _recorder.captureSessionPreset = [SCRecorderTools bestCaptureSessionPresetCompatibleWithAllDevices]; +// _recorder.captureSessionPreset = [SCRecorderTools bestCaptureSessionPresetForDevicePosition:AVCaptureDevicePositionBack withMaxSize:CGSizeMake(3840, 3840)]; + NSError *formatError; + [_recorder setActiveFormatWithFrameRate:60 width:1080 andHeight:1920 error:&formatError]; + _recorder.autoSetVideoOrientation = NO; //YES causes bad orientation for video from camera roll +// _recorder.automaticallyConfiguresApplicationAudioSession = YES; // _recorder.maxRecordDuration = CMTimeMake(10, 1); // _recorder.fastRecordMethodEnabled = YES; - - _recorder.delegate = self; - _recorder.autoSetVideoOrientation = NO; //YES causes bad orientation for video from camera roll - - UIView *previewView = self.previewView; - _recorder.previewView = previewView; - - [self.retakeButton addTarget:self action:@selector(handleRetakeButtonTapped:) forControlEvents:UIControlEventTouchUpInside]; - [self.stopButton addTarget:self action:@selector(handleStopButtonTapped:) forControlEvents:UIControlEventTouchUpInside]; + SCVideoConfiguration *videoConfiguration = _recorder.videoConfiguration; +// videoConfiguration.preset = SCPresetHighestQuality; +// videoConfiguration.bitrate = 10000000; //(10 mbps) +// videoConfiguration.bitrate = 85000000; //(85 mbps) +// videoConfiguration.bitrate = 100000000; //(100 mbps) +// videoConfiguration.bitrate = 150000000; //(510 mbps) +// videoConfiguration.size = CGSizeMake(1080, 1920); + videoConfiguration.size = CGSizeMake(2160, 3840); + videoConfiguration.maxFrameRate = 0; +// videoConfiguration.profileLevel = AVVideoProfileLevelH264HighAutoLevel; +// videoConfiguration.profileLevel = (__bridge NSString *)kVTProfileLevel_H264_High_5_2; + videoConfiguration.profileLevel = (__bridge NSString *)kVTProfileLevel_HEVC_Main_AutoLevel; +// videoConfiguration.profileLevel = (__bridge NSString *)kVTProfileLevel_HEVC_Main10_AutoLevel; +// videoConfiguration.timeScale = 1; + videoConfiguration.sizeAsSquare = NO; + videoConfiguration.codec = AVVideoCodecTypeHEVC; + videoConfiguration.enabled = YES; + _recorder.videoOrientation = AVCaptureVideoOrientationPortrait; + _recorder.photoConfiguration.enabled = NO; + + SCAudioConfiguration *audio = _recorder.audioConfiguration; + audio.preset = SCPresetHighestQuality; + // Whether the audio should be enabled or not + //should already be enabled, changing it to yes causes a crash + // audio.enabled = YES; + // the bitrate of the audio output +// audio.bitrate = kBITLocalAudioBitrate; + // Number of audio output channels + audio.channelsCount = 2; // Mono output + // The sample rate of the audio output +// audio.sampleRate = kBITLocalAudioSampleRate; + // The format of the audio output + audio.format = kAudioFormatMPEG4AAC; // AAC + audio.enabled = YES; + + _recorder.delegate = self; + + [_recorder setMirrorOnFrontCamera:YES]; + + UIView *previewView = self.previewView; + _recorder.previewView = previewView; + + [self.retakeButton addTarget:self action:@selector(handleRetakeButtonTapped:) forControlEvents:UIControlEventTouchUpInside]; + [self.stopButton addTarget:self action:@selector(handleStopButtonTapped:) forControlEvents:UIControlEventTouchUpInside]; [self.reverseCamera addTarget:self action:@selector(handleReverseCameraTapped:) forControlEvents:UIControlEventTouchUpInside]; - - [self.recordView addGestureRecognizer:[[SCTouchDetector alloc] initWithTarget:self action:@selector(handleTouchDetected:)]]; - self.loadingView.hidden = YES; - - self.focusView = [[SCRecorderToolsView alloc] initWithFrame:previewView.bounds]; - self.focusView.autoresizingMask = UIViewAutoresizingFlexibleBottomMargin | UIViewAutoresizingFlexibleHeight | UIViewAutoresizingFlexibleLeftMargin | UIViewAutoresizingFlexibleRightMargin | UIViewAutoresizingFlexibleTopMargin | UIViewAutoresizingFlexibleWidth; - self.focusView.recorder = _recorder; - [previewView addSubview:self.focusView]; - - self.focusView.outsideFocusTargetImage = [UIImage imageNamed:@"capture_flip"]; - self.focusView.insideFocusTargetImage = [UIImage imageNamed:@"capture_flip"]; - - _recorder.initializeSessionLazily = NO; - - NSError *error; - if (![_recorder prepare:&error]) { - NSLog(@"Prepare error: %@", error.localizedDescription); - } + +// [self.recordView addGestureRecognizer:[[SCTouchDetector alloc] initWithTarget:self action:@selector(handleTouchDetected:)]]; + self.longPressGestureRecognizer = [[UILongPressGestureRecognizer alloc] initWithTarget:self action:@selector(longPressed:)]; + [self.recordView addGestureRecognizer:self.longPressGestureRecognizer]; + self.loadingView.hidden = YES; + + self.focusView = [[SCRecorderToolsView alloc] initWithFrame:previewView.bounds]; + self.focusView.autoresizingMask = UIViewAutoresizingFlexibleBottomMargin | UIViewAutoresizingFlexibleHeight | UIViewAutoresizingFlexibleLeftMargin | UIViewAutoresizingFlexibleRightMargin | UIViewAutoresizingFlexibleTopMargin | UIViewAutoresizingFlexibleWidth; + self.focusView.recorder = _recorder; + [previewView addSubview:self.focusView]; + + self.focusView.outsideFocusTargetImage = [UIImage imageNamed:@"capture_flip"]; + self.focusView.insideFocusTargetImage = [UIImage imageNamed:@"capture_flip"]; + + [_recorder commitConfiguration]; + + NSError *error; + if (![_recorder prepare:&error]) { + NSLog(@"Prepare error: %@", error.localizedDescription); + } + + if (!_recorder.isPrepared) { + [_recorder prepare:nil]; + } + _recorder.session = [SCRecordSession recordSession]; } - (void)recorder:(SCRecorder *)recorder didSkipVideoSampleBufferInSession:(SCRecordSession *)recordSession { - NSLog(@"Skipped video buffer"); + NSLog(@"Skipped video buffer"); } - (void)recorder:(SCRecorder *)recorder didReconfigureAudioInput:(NSError *)audioInputError { - NSLog(@"Reconfigured audio input: %@", audioInputError); + NSLog(@"Reconfigured audio input: %@", audioInputError); } - (void)recorder:(SCRecorder *)recorder didReconfigureVideoInput:(NSError *)videoInputError { - NSLog(@"Reconfigured video input: %@", videoInputError); + NSLog(@"Reconfigured video input: %@", videoInputError); } - (void)viewWillAppear:(BOOL)animated { - [super viewWillAppear:animated]; - - [self prepareSession]; - + [super viewWillAppear:animated]; + + [self prepareSession]; + self.navigationController.navigationBarHidden = YES; } - (void)viewDidLayoutSubviews { - [super viewDidLayoutSubviews]; - - [_recorder previewViewFrameChanged]; + [super viewDidLayoutSubviews]; + + [_recorder previewViewFrameChanged]; } - (void)viewDidAppear:(BOOL)animated { - [super viewDidAppear:animated]; - - [_recorder startRunning]; + [super viewDidAppear:animated]; + + [_recorder startRunning]; } - (void)viewWillDisappear:(BOOL)animated { - [super viewWillDisappear:animated]; + [super viewWillDisappear:animated]; - [_recorder stopRunning]; + [_recorder stopRunning]; } - (void)viewDidDisappear:(BOOL)animated { - [super viewDidDisappear:animated]; - - self.navigationController.navigationBarHidden = NO; + [super viewDidDisappear:animated]; + + self.navigationController.navigationBarHidden = NO; } #pragma mark - Handle - (void)showAlertViewWithTitle:(NSString*)title message:(NSString*) message { - UIAlertView * alertView = [[UIAlertView alloc] initWithTitle:title message:message delegate:nil cancelButtonTitle:nil otherButtonTitles:@"OK", nil]; - [alertView show]; + UIAlertView * alertView = [[UIAlertView alloc] initWithTitle:title message:message delegate:nil cancelButtonTitle:nil otherButtonTitles:@"OK", nil]; + [alertView show]; } - (void)showVideo { - [self performSegueWithIdentifier:@"Video" sender:self]; + [self performSegueWithIdentifier:@"Video" sender:self]; } - (void)prepareForSegue:(UIStoryboardSegue *)segue sender:(id)sender { - if ([segue.destinationViewController isKindOfClass:[SCVideoPlayerViewController class]]) { - SCVideoPlayerViewController *videoPlayer = segue.destinationViewController; - videoPlayer.recordSession = _recordSession; - } else if ([segue.destinationViewController isKindOfClass:[SCImageDisplayerViewController class]]) { - SCImageDisplayerViewController *imageDisplayer = segue.destinationViewController; - imageDisplayer.photo = _photo; - _photo = nil; - } else if ([segue.destinationViewController isKindOfClass:[SCSessionListViewController class]]) { - SCSessionListViewController *sessionListVC = segue.destinationViewController; - - sessionListVC.recorder = _recorder; - } + if ([segue.destinationViewController isKindOfClass:[SCVideoPlayerViewController class]]) { + SCVideoPlayerViewController *videoPlayer = segue.destinationViewController; + videoPlayer.recordSession = _recordSession; + } else if ([segue.destinationViewController isKindOfClass:[SCImageDisplayerViewController class]]) { + SCImageDisplayerViewController *imageDisplayer = segue.destinationViewController; + imageDisplayer.photo = _photo; + _photo = nil; + } else if ([segue.destinationViewController isKindOfClass:[SCSessionListViewController class]]) { + SCSessionListViewController *sessionListVC = segue.destinationViewController; + + sessionListVC.recorder = _recorder; + } } - (void)showPhoto:(UIImage *)photo { - _photo = photo; - [self performSegueWithIdentifier:@"Photo" sender:self]; + _photo = photo; + [self performSegueWithIdentifier:@"Photo" sender:self]; } - (void) handleReverseCameraTapped:(id)sender { @@ -182,239 +229,253 @@ - (void) handleReverseCameraTapped:(id)sender { } - (void)imagePickerController:(UIImagePickerController *)picker didFinishPickingMediaWithInfo:(NSDictionary *)info { - NSURL *url = info[UIImagePickerControllerMediaURL]; - [picker dismissViewControllerAnimated:YES completion:nil]; - - SCRecordSessionSegment *segment = [SCRecordSessionSegment segmentWithURL:url info:nil]; - - [_recorder.session addSegment:segment]; - _recordSession = [SCRecordSession recordSession]; - [_recordSession addSegment:segment]; - - [self showVideo]; + NSURL *url = info[UIImagePickerControllerMediaURL]; + [picker dismissViewControllerAnimated:YES completion:nil]; + + SCRecordSessionSegment *segment = [SCRecordSessionSegment segmentWithURL:url info:nil]; + + [_recorder.session addSegment:segment]; + _recordSession = [SCRecordSession recordSession]; + [_recordSession addSegment:segment]; + + [self showVideo]; } - (void) handleStopButtonTapped:(id)sender { - [_recorder pause:^{ - [self saveAndShowSession:_recorder.session]; - }]; + [_recorder pause:^{ + [self saveAndShowSession:_recorder.session]; + }]; } - (void)saveAndShowSession:(SCRecordSession *)recordSession { - [[SCRecordSessionManager sharedInstance] saveRecordSession:recordSession]; - - _recordSession = recordSession; - [self showVideo]; + [[SCRecordSessionManager sharedInstance] saveRecordSession:recordSession]; + + _recordSession = recordSession; + [self showVideo]; } - (void)handleRetakeButtonTapped:(id)sender { - SCRecordSession *recordSession = _recorder.session; - - if (recordSession != nil) { - _recorder.session = nil; - - // If the recordSession was saved, we don't want to completely destroy it - if ([[SCRecordSessionManager sharedInstance] isSaved:recordSession]) { - [recordSession endSegmentWithInfo:nil completionHandler:nil]; - } else { - [recordSession cancelSession:nil]; - } - } - + SCRecordSession *recordSession = _recorder.session; + + if (recordSession != nil) { + _recorder.session = nil; + + // If the recordSession was saved, we don't want to completely destroy it + if ([[SCRecordSessionManager sharedInstance] isSaved:recordSession]) { + [recordSession endSegmentWithInfo:nil completionHandler:nil]; + } else { + [recordSession cancelSession:nil]; + } + } + [self prepareSession]; } - (IBAction)switchCameraMode:(id)sender { - if ([_recorder.captureSessionPreset isEqualToString:AVCaptureSessionPresetPhoto]) { - [UIView animateWithDuration:0.3 delay:0 options:UIViewAnimationOptionCurveEaseInOut animations:^{ - self.capturePhotoButton.alpha = 0.0; - self.recordView.alpha = 1.0; - self.retakeButton.alpha = 1.0; - self.stopButton.alpha = 1.0; - } completion:^(BOOL finished) { + if ([_recorder.captureSessionPreset isEqualToString:AVCaptureSessionPresetPhoto]) { + [UIView animateWithDuration:0.3 delay:0 options:UIViewAnimationOptionCurveEaseInOut animations:^{ + self.capturePhotoButton.alpha = 0.0; + self.recordView.alpha = 1.0; + self.retakeButton.alpha = 1.0; + self.stopButton.alpha = 1.0; + } completion:^(BOOL finished) { _recorder.captureSessionPreset = kVideoPreset; - [self.switchCameraModeButton setTitle:@"Switch Photo" forState:UIControlStateNormal]; - [self.flashModeButton setTitle:@"Flash : Off" forState:UIControlStateNormal]; - _recorder.flashMode = SCFlashModeOff; - }]; - } else { - [UIView animateWithDuration:0.3 delay:0 options:UIViewAnimationOptionCurveEaseInOut animations:^{ - self.recordView.alpha = 0.0; - self.retakeButton.alpha = 0.0; - self.stopButton.alpha = 0.0; - self.capturePhotoButton.alpha = 1.0; - } completion:^(BOOL finished) { + [self.switchCameraModeButton setTitle:@"Switch Photo" forState:UIControlStateNormal]; + [self.flashModeButton setTitle:@"Flash : Off" forState:UIControlStateNormal]; + _recorder.flashMode = SCFlashModeOff; + }]; + } else { + [UIView animateWithDuration:0.3 delay:0 options:UIViewAnimationOptionCurveEaseInOut animations:^{ + self.recordView.alpha = 0.0; + self.retakeButton.alpha = 0.0; + self.stopButton.alpha = 0.0; + self.capturePhotoButton.alpha = 1.0; + } completion:^(BOOL finished) { _recorder.captureSessionPreset = AVCaptureSessionPresetPhoto; - [self.switchCameraModeButton setTitle:@"Switch Video" forState:UIControlStateNormal]; - [self.flashModeButton setTitle:@"Flash : Auto" forState:UIControlStateNormal]; - _recorder.flashMode = SCFlashModeAuto; - }]; - } + [self.switchCameraModeButton setTitle:@"Switch Video" forState:UIControlStateNormal]; + [self.flashModeButton setTitle:@"Flash : Auto" forState:UIControlStateNormal]; + _recorder.flashMode = SCFlashModeAuto; + }]; + } } - (IBAction)switchFlash:(id)sender { - NSString *flashModeString = nil; - if ([_recorder.captureSessionPreset isEqualToString:AVCaptureSessionPresetPhoto]) { - switch (_recorder.flashMode) { - case SCFlashModeAuto: - flashModeString = @"Flash : Off"; - _recorder.flashMode = SCFlashModeOff; - break; - case SCFlashModeOff: - flashModeString = @"Flash : On"; - _recorder.flashMode = SCFlashModeOn; - break; - case SCFlashModeOn: - flashModeString = @"Flash : Light"; - _recorder.flashMode = SCFlashModeLight; - break; - case SCFlashModeLight: - flashModeString = @"Flash : Auto"; - _recorder.flashMode = SCFlashModeAuto; - break; - default: - break; - } - } else { - switch (_recorder.flashMode) { - case SCFlashModeOff: - flashModeString = @"Flash : On"; - _recorder.flashMode = SCFlashModeLight; - break; - case SCFlashModeLight: - flashModeString = @"Flash : Off"; - _recorder.flashMode = SCFlashModeOff; - break; - default: - break; - } - } - - [self.flashModeButton setTitle:flashModeString forState:UIControlStateNormal]; + NSString *flashModeString = nil; + if ([_recorder.captureSessionPreset isEqualToString:AVCaptureSessionPresetPhoto]) { + switch (_recorder.flashMode) { + case SCFlashModeAuto: + flashModeString = @"Flash : Off"; + _recorder.flashMode = SCFlashModeOff; + break; + case SCFlashModeOff: + flashModeString = @"Flash : On"; + _recorder.flashMode = SCFlashModeOn; + break; + case SCFlashModeOn: + flashModeString = @"Flash : Light"; + _recorder.flashMode = SCFlashModeLight; + break; + case SCFlashModeLight: + flashModeString = @"Flash : Auto"; + _recorder.flashMode = SCFlashModeAuto; + break; + default: + break; + } + } else { + switch (_recorder.flashMode) { + case SCFlashModeOff: + flashModeString = @"Flash : On"; + _recorder.flashMode = SCFlashModeLight; + break; + case SCFlashModeLight: + flashModeString = @"Flash : Off"; + _recorder.flashMode = SCFlashModeOff; + break; + default: + break; + } + } + + [self.flashModeButton setTitle:flashModeString forState:UIControlStateNormal]; } - (void)prepareSession { - if (_recorder.session == nil) { - - SCRecordSession *session = [SCRecordSession recordSession]; - session.fileType = AVFileTypeQuickTimeMovie; - - _recorder.session = session; - } - - [self updateTimeRecordedLabel]; - [self updateGhostImage]; + if (_recorder.session == nil) { + + SCRecordSession *session = [SCRecordSession recordSession]; + session.fileType = AVFileTypeQuickTimeMovie; + _recorder.session = session; + } + + [self updateTimeRecordedLabel]; + [self updateGhostImage]; } - (void)recorder:(SCRecorder *)recorder didCompleteSession:(SCRecordSession *)recordSession { - NSLog(@"didCompleteSession:"); - [self saveAndShowSession:recordSession]; + NSLog(@"didCompleteSession:"); + [self saveAndShowSession:recordSession]; } - (void)recorder:(SCRecorder *)recorder didInitializeAudioInSession:(SCRecordSession *)recordSession error:(NSError *)error { - if (error == nil) { - NSLog(@"Initialized audio in record session"); - } else { - NSLog(@"Failed to initialize audio in record session: %@", error.localizedDescription); - } + if (error == nil) { + NSLog(@"Initialized audio in record session"); + } else { + NSLog(@"Failed to initialize audio in record session: %@", error.localizedDescription); + } } - (void)recorder:(SCRecorder *)recorder didInitializeVideoInSession:(SCRecordSession *)recordSession error:(NSError *)error { - if (error == nil) { - NSLog(@"Initialized video in record session"); - } else { - NSLog(@"Failed to initialize video in record session: %@", error.localizedDescription); - } + if (error == nil) { + NSLog(@"Initialized video in record session"); + } else { + NSLog(@"Failed to initialize video in record session: %@", error.localizedDescription); + } } - (void)recorder:(SCRecorder *)recorder didBeginSegmentInSession:(SCRecordSession *)recordSession error:(NSError *)error { - NSLog(@"Began record segment: %@", error); + NSLog(@"Began record segment: %@", error); } - (void)recorder:(SCRecorder *)recorder didCompleteSegment:(SCRecordSessionSegment *)segment inSession:(SCRecordSession *)recordSession error:(NSError *)error { - NSLog(@"Completed record segment at %@: %@ (frameRate: %f)", segment.url, error, segment.frameRate); - [self updateGhostImage]; + NSLog(@"Completed record segment at %@: %@ (frameRate: %f)", segment.url, error, segment.frameRate); + [self updateGhostImage]; } - (void)updateTimeRecordedLabel { - CMTime currentTime = kCMTimeZero; - - if (_recorder.session != nil) { - currentTime = _recorder.session.duration; - } - - self.timeRecordedLabel.text = [NSString stringWithFormat:@"%.2f sec", CMTimeGetSeconds(currentTime)]; + CMTime currentTime = kCMTimeZero; + + if (_recorder.session != nil) { + currentTime = _recorder.session.duration; + } + + self.timeRecordedLabel.text = [NSString stringWithFormat:@"%.2f sec", CMTimeGetSeconds(currentTime)]; } - (void)recorder:(SCRecorder *)recorder didAppendVideoSampleBufferInSession:(SCRecordSession *)recordSession { - [self updateTimeRecordedLabel]; + [self updateTimeRecordedLabel]; +} + +- (void)recorder:(SCRecorder *__nonnull)recorder didAcquireAudioBuffer:(SInt16*)audioBuffer + length:(CMItemCount)length timestamp:(CMTime)time { +} + +- (void)longPressed:(UILongPressGestureRecognizer *)gesture { +// NSLog(@"long press with state %i", gesture.state); + if (gesture.state == UIGestureRecognizerStateBegan) { + _ghostImageView.hidden = YES; + [_recorder prerecord]; + [_recorder record]; + } else if (gesture.state == UIGestureRecognizerStateEnded) { + [_recorder pause]; + } } - (void)handleTouchDetected:(SCTouchDetector*)touchDetector { - if (touchDetector.state == UIGestureRecognizerStateBegan) { - _ghostImageView.hidden = YES; - [_recorder record]; - } else if (touchDetector.state == UIGestureRecognizerStateEnded) { - [_recorder pause]; - } + if (touchDetector.state == UIGestureRecognizerStateBegan) { + _ghostImageView.hidden = YES; + [_recorder record]; + } else if (touchDetector.state == UIGestureRecognizerStateEnded) { + [_recorder pause]; + } } - (IBAction)capturePhoto:(id)sender { - [_recorder capturePhoto:^(NSError *error, UIImage *image) { - if (image != nil) { - [self showPhoto:image]; - } else { - [self showAlertViewWithTitle:@"Failed to capture photo" message:error.localizedDescription]; - } - }]; + [_recorder capturePhoto:^(NSError *error, UIImage *image) { + if (image != nil) { + [self showPhoto:image]; + } else { + [self showAlertViewWithTitle:@"Failed to capture photo" message:error.localizedDescription]; + } + }]; } - (void)updateGhostImage { - UIImage *image = nil; - - if (_ghostModeButton.selected) { - if (_recorder.session.segments.count > 0) { - SCRecordSessionSegment *segment = [_recorder.session.segments lastObject]; - image = segment.lastImage; - } - } - - - _ghostImageView.image = image; + UIImage *image = nil; + + if (_ghostModeButton.selected) { + if (_recorder.session.segments.count > 0) { + SCRecordSessionSegment *segment = [_recorder.session.segments lastObject]; + image = segment.lastImage; + } + } + + + _ghostImageView.image = image; // _ghostImageView.image = [_recorder snapshotOfLastAppendedVideoBuffer]; - _ghostImageView.hidden = !_ghostModeButton.selected; + _ghostImageView.hidden = !_ghostModeButton.selected; } - (BOOL)prefersStatusBarHidden { - return YES; + return YES; } - (IBAction)switchGhostMode:(id)sender { - _ghostModeButton.selected = !_ghostModeButton.selected; - _ghostImageView.hidden = !_ghostModeButton.selected; - - [self updateGhostImage]; + _ghostModeButton.selected = !_ghostModeButton.selected; + _ghostImageView.hidden = !_ghostModeButton.selected; + + [self updateGhostImage]; } - (IBAction)toolsButtonTapped:(UIButton *)sender { - CGRect toolsFrame = self.toolsContainerView.frame; - CGRect openToolsButtonFrame = self.openToolsButton.frame; - - if (toolsFrame.origin.y < 0) { - sender.selected = YES; - toolsFrame.origin.y = 0; - openToolsButtonFrame.origin.y = toolsFrame.size.height + 15; - } else { - sender.selected = NO; - toolsFrame.origin.y = -toolsFrame.size.height; - openToolsButtonFrame.origin.y = 15; - } - - [UIView animateWithDuration:0.15 animations:^{ - self.toolsContainerView.frame = toolsFrame; - self.openToolsButton.frame = openToolsButtonFrame; - }]; + CGRect toolsFrame = self.toolsContainerView.frame; + CGRect openToolsButtonFrame = self.openToolsButton.frame; + + if (toolsFrame.origin.y < 0) { + sender.selected = YES; + toolsFrame.origin.y = 0; + openToolsButtonFrame.origin.y = toolsFrame.size.height + 15; + } else { + sender.selected = NO; + toolsFrame.origin.y = -toolsFrame.size.height; + openToolsButtonFrame.origin.y = 15; + } + + [UIView animateWithDuration:0.15 animations:^{ + self.toolsContainerView.frame = toolsFrame; + self.openToolsButton.frame = openToolsButtonFrame; + }]; } - (IBAction)closeCameraTapped:(id)sender { - [self.navigationController popViewControllerAnimated:YES]; + [self.navigationController popViewControllerAnimated:YES]; } @end diff --git a/Examples/ObjC/Sources/SCVideoPlayerViewController.m b/Examples/ObjC/Sources/SCVideoPlayerViewController.m index ef83876c..39d4c52b 100644 --- a/Examples/ObjC/Sources/SCVideoPlayerViewController.m +++ b/Examples/ObjC/Sources/SCVideoPlayerViewController.m @@ -10,6 +10,7 @@ #import "SCEditVideoViewController.h" #import "SCWatermarkOverlayView.h" #import +#import @interface SCVideoPlayerViewController () @@ -22,273 +23,307 @@ @implementation SCVideoPlayerViewController - (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil { - self = [super initWithNibName:nibNameOrNil bundle:nibBundleOrNil]; - - if (self) { - // Custom initialization - } - - return self; + self = [super initWithNibName:nibNameOrNil bundle:nibBundleOrNil]; + + if (self) { + // Custom initialization + } + + return self; } - (void)dealloc { - [self.filterSwitcherView removeObserver:self forKeyPath:@"selectedFilter"]; - self.filterSwitcherView = nil; - [_player pause]; - _player = nil; - [self cancelSaveToCameraRoll]; + [self.filterSwitcherView removeObserver:self forKeyPath:@"selectedFilter"]; + self.filterSwitcherView = nil; + [_player pause]; + _player = nil; + [self cancelSaveToCameraRoll]; } - (SCFilter *)createAnimatedFilter { - SCFilter *animatedFilter = [SCFilter emptyFilter]; - animatedFilter.name = @"Animated Filter"; - - SCFilter *gaussian = [SCFilter filterWithCIFilterName:@"CIGaussianBlur"]; - SCFilter *blackAndWhite = [SCFilter filterWithCIFilterName:@"CIColorControls"]; - - [animatedFilter addSubFilter:gaussian]; - [animatedFilter addSubFilter:blackAndWhite]; - - double duration = 0.5; - double currentTime = 0; - BOOL isAscending = YES; - - Float64 assetDuration = CMTimeGetSeconds(_recordSession.assetRepresentingSegments.duration); - - while (currentTime < assetDuration) { - if (isAscending) { - [blackAndWhite addAnimationForParameterKey:kCIInputSaturationKey startValue:@1 endValue:@0 startTime:currentTime duration:duration]; - [gaussian addAnimationForParameterKey:kCIInputRadiusKey startValue:@0 endValue:@10 startTime:currentTime duration:duration]; - } else { - [blackAndWhite addAnimationForParameterKey:kCIInputSaturationKey startValue:@0 endValue:@1 startTime:currentTime duration:duration]; - [gaussian addAnimationForParameterKey:kCIInputRadiusKey startValue:@10 endValue:@0 startTime:currentTime duration:duration]; - } - - currentTime += duration; - isAscending = !isAscending; - } - - return animatedFilter; + SCFilter *animatedFilter = [SCFilter emptyFilter]; + animatedFilter.name = @"Animated Filter"; + + SCFilter *gaussian = [SCFilter filterWithCIFilterName:@"CIGaussianBlur"]; + SCFilter *blackAndWhite = [SCFilter filterWithCIFilterName:@"CIColorControls"]; + + [animatedFilter addSubFilter:gaussian]; + [animatedFilter addSubFilter:blackAndWhite]; + + double duration = 0.5; + double currentTime = 0; + BOOL isAscending = YES; + + Float64 assetDuration = CMTimeGetSeconds(_recordSession.assetRepresentingSegments.duration); + + while (currentTime < assetDuration) { + if (isAscending) { + [blackAndWhite addAnimationForParameterKey:kCIInputSaturationKey startValue:@1 endValue:@0 startTime:currentTime duration:duration]; + [gaussian addAnimationForParameterKey:kCIInputRadiusKey startValue:@0 endValue:@10 startTime:currentTime duration:duration]; + } else { + [blackAndWhite addAnimationForParameterKey:kCIInputSaturationKey startValue:@0 endValue:@1 startTime:currentTime duration:duration]; + [gaussian addAnimationForParameterKey:kCIInputRadiusKey startValue:@10 endValue:@0 startTime:currentTime duration:duration]; + } + + currentTime += duration; + isAscending = !isAscending; + } + + return animatedFilter; } -- (void)viewDidLoad -{ - [super viewDidLoad]; - - self.exportView.clipsToBounds = YES; - self.exportView.layer.cornerRadius = 20; - UIBarButtonItem *saveButton = [[UIBarButtonItem alloc] initWithTitle:@"Save" style:UIBarButtonItemStyleBordered target:self action:@selector(saveToCameraRoll)]; - UIBarButtonItem *addButton = [[UIBarButtonItem alloc] initWithTitle:@"Add" style:UIBarButtonItemStyleBordered target:self action:@selector(startMediaBrowser)]; - self.navigationItem.rightBarButtonItems = @[saveButton, addButton]; - +- (void)viewDidLoad { + [super viewDidLoad]; + + self.exportView.clipsToBounds = YES; + self.exportView.layer.cornerRadius = 20; + UIBarButtonItem *saveButton = [[UIBarButtonItem alloc] initWithTitle:@"Save" style:UIBarButtonItemStyleBordered target:self action:@selector(saveToCameraRoll)]; + UIBarButtonItem *addButton = [[UIBarButtonItem alloc] initWithTitle:@"Add" style:UIBarButtonItemStyleBordered target:self action:@selector(startMediaBrowser)]; + self.navigationItem.rightBarButtonItems = @[saveButton, addButton]; + _player = [SCPlayer player]; - - if ([[NSProcessInfo processInfo] activeProcessorCount] > 1) { - self.filterSwitcherView.contentMode = UIViewContentModeScaleAspectFill; - - SCFilter *emptyFilter = [SCFilter emptyFilter]; - emptyFilter.name = @"#nofilter"; - - self.filterSwitcherView.filters = @[ - emptyFilter, - [SCFilter filterWithCIFilterName:@"CIPhotoEffectNoir"], - [SCFilter filterWithCIFilterName:@"CIPhotoEffectChrome"], - [SCFilter filterWithCIFilterName:@"CIPhotoEffectInstant"], - [SCFilter filterWithCIFilterName:@"CIPhotoEffectTonal"], - [SCFilter filterWithCIFilterName:@"CIPhotoEffectFade"], - // Adding a filter created using CoreImageShop - [SCFilter filterWithContentsOfURL:[[NSBundle mainBundle] URLForResource:@"a_filter" withExtension:@"cisf"]], - [self createAnimatedFilter] - ]; - _player.SCImageView = self.filterSwitcherView; - [self.filterSwitcherView addObserver:self forKeyPath:@"selectedFilter" options:NSKeyValueObservingOptionNew context:nil]; - } else { - SCVideoPlayerView *playerView = [[SCVideoPlayerView alloc] initWithPlayer:_player]; - playerView.playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill; - playerView.frame = self.filterSwitcherView.frame; - playerView.autoresizingMask = self.filterSwitcherView.autoresizingMask; - [self.filterSwitcherView.superview insertSubview:playerView aboveSubview:self.filterSwitcherView]; - [self.filterSwitcherView removeFromSuperview]; - } - + + if ([[NSProcessInfo processInfo] activeProcessorCount] > 1) { + self.filterSwitcherView.contentMode = UIViewContentModeScaleAspectFill; + + SCFilter *emptyFilter = [SCFilter emptyFilter]; + emptyFilter.name = @"#nofilter"; + + self.filterSwitcherView.filters = @[ + emptyFilter, + [SCFilter filterWithCIFilterName:@"CIPhotoEffectNoir"], + [SCFilter filterWithCIFilterName:@"CIPhotoEffectChrome"], + [SCFilter filterWithCIFilterName:@"CIPhotoEffectInstant"], + [SCFilter filterWithCIFilterName:@"CIPhotoEffectTonal"], + [SCFilter filterWithCIFilterName:@"CIPhotoEffectFade"], + // Adding a filter created using CoreImageShop + [SCFilter filterWithContentsOfURL:[[NSBundle mainBundle] URLForResource:@"a_filter" withExtension:@"cisf"]], + [self createAnimatedFilter] + ]; + _player.SCImageView = self.filterSwitcherView; + [self.filterSwitcherView addObserver:self forKeyPath:@"selectedFilter" options:NSKeyValueObservingOptionNew context:nil]; + } else { + SCVideoPlayerView *playerView = [[SCVideoPlayerView alloc] initWithPlayer:_player]; + playerView.playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill; + playerView.frame = self.filterSwitcherView.frame; + playerView.autoresizingMask = self.filterSwitcherView.autoresizingMask; + [self.filterSwitcherView.superview insertSubview:playerView aboveSubview:self.filterSwitcherView]; + [self.filterSwitcherView removeFromSuperview]; + } + _player.loopEnabled = YES; } - (void)viewWillAppear:(BOOL)animated { - [super viewWillAppear:animated]; - - [_player setItemByAsset:_recordSession.assetRepresentingSegments]; + [super viewWillAppear:animated]; + + [_player setItemByAsset:_recordSession.assetRepresentingSegments]; [_player play]; } - (void)viewWillDisappear:(BOOL)animated { - [super viewWillDisappear:animated]; - - [_player pause]; + [super viewWillDisappear:animated]; + + [_player pause]; } - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context { - if (object == self.filterSwitcherView) { - self.filterNameLabel.hidden = NO; - self.filterNameLabel.text = self.filterSwitcherView.selectedFilter.name; - self.filterNameLabel.alpha = 0; - [UIView animateWithDuration:0.3 animations:^{ - self.filterNameLabel.alpha = 1; - } completion:^(BOOL finished) { - if (finished) { - [UIView animateWithDuration:0.3 delay:1 options:UIViewAnimationOptionTransitionCrossDissolve animations:^{ - self.filterNameLabel.alpha = 0; - } completion:^(BOOL finished) { - - }]; - } - }]; - } + if (object == self.filterSwitcherView) { + self.filterNameLabel.hidden = NO; + self.filterNameLabel.text = self.filterSwitcherView.selectedFilter.name; + self.filterNameLabel.alpha = 0; + [UIView animateWithDuration:0.3 animations:^{ + self.filterNameLabel.alpha = 1; + } completion:^(BOOL finished) { + if (finished) { + [UIView animateWithDuration:0.3 delay:1 options:UIViewAnimationOptionTransitionCrossDissolve animations:^{ + self.filterNameLabel.alpha = 0; + } completion:^(BOOL finished) { + + }]; + } + }]; + } } - (void)prepareForSegue:(UIStoryboardSegue *)segue sender:(id)sender { - if ([segue.destinationViewController isKindOfClass:[SCEditVideoViewController class]]) { - SCEditVideoViewController *editVideo = segue.destinationViewController; - editVideo.recordSession = self.recordSession; - } + if ([segue.destinationViewController isKindOfClass:[SCEditVideoViewController class]]) { + SCEditVideoViewController *editVideo = segue.destinationViewController; + editVideo.recordSession = self.recordSession; + } } - (void)assetExportSessionDidProgress:(SCAssetExportSession *)assetExportSession { - dispatch_async(dispatch_get_main_queue(), ^{ - float progress = assetExportSession.progress; - - CGRect frame = self.progressView.frame; - frame.size.width = self.progressView.superview.frame.size.width * progress; - self.progressView.frame = frame; - }); + dispatch_async(dispatch_get_main_queue(), ^{ + float progress = assetExportSession.progress; + + CGRect frame = self.progressView.frame; + frame.size.width = self.progressView.superview.frame.size.width * progress; + self.progressView.frame = frame; + }); } - (void)cancelSaveToCameraRoll { - [_exportSession cancelExport]; + [_exportSession cancelExport]; } - (IBAction)cancelTapped:(id)sender { - [self cancelSaveToCameraRoll]; + [self cancelSaveToCameraRoll]; } - (void)_addActionToAlertController:(UIAlertController *)alertController forType:(SCContextType)contextType withName:(NSString *)name { - if ([SCContext supportsType:contextType]) { - UIAlertActionStyle style = (self.filterSwitcherView.contextType != contextType ? UIAlertActionStyleDefault : UIAlertActionStyleDestructive); - UIAlertAction *action = [UIAlertAction actionWithTitle:name style:style handler:^(UIAlertAction * _Nonnull action) { - self.filterSwitcherView.contextType = contextType; - }]; - [alertController addAction:action]; - } + if ([SCContext supportsType:contextType]) { + UIAlertActionStyle style = (self.filterSwitcherView.contextType != contextType ? UIAlertActionStyleDefault : UIAlertActionStyleDestructive); + UIAlertAction *action = [UIAlertAction actionWithTitle:name style:style handler:^(UIAlertAction * _Nonnull action) { + self.filterSwitcherView.contextType = contextType; + }]; + [alertController addAction:action]; + } } - (IBAction)changeRenderingModeTapped:(id)sender { - UIAlertController *alertController = [UIAlertController alertControllerWithTitle:@"Change video rendering mode" message:nil preferredStyle:UIAlertControllerStyleActionSheet]; - [self _addActionToAlertController:alertController forType:SCContextTypeAuto withName:@"Auto"]; - [self _addActionToAlertController:alertController forType:SCContextTypeMetal withName:@"Metal"]; - [self _addActionToAlertController:alertController forType:SCContextTypeEAGL withName:@"EAGL"]; - [self _addActionToAlertController:alertController forType:SCContextTypeCoreGraphics withName:@"Core Graphics"]; - [alertController addAction:[UIAlertAction actionWithTitle:@"Cancel" style:UIAlertActionStyleCancel handler:nil]]; - - [self presentViewController:alertController animated:YES completion:nil]; + UIAlertController *alertController = [UIAlertController alertControllerWithTitle:@"Change video rendering mode" message:nil preferredStyle:UIAlertControllerStyleActionSheet]; + [self _addActionToAlertController:alertController forType:SCContextTypeAuto withName:@"Auto"]; + [self _addActionToAlertController:alertController forType:SCContextTypeMetal withName:@"Metal"]; + [self _addActionToAlertController:alertController forType:SCContextTypeEAGL withName:@"EAGL"]; + [self _addActionToAlertController:alertController forType:SCContextTypeCoreGraphics withName:@"Core Graphics"]; + [alertController addAction:[UIAlertAction actionWithTitle:@"Cancel" style:UIAlertActionStyleCancel handler:nil]]; + + [self presentViewController:alertController animated:YES completion:nil]; } - (void)saveToCameraRoll { - self.navigationItem.rightBarButtonItem.enabled = NO; - SCFilter *currentFilter = [self.filterSwitcherView.selectedFilter copy]; - [_player pause]; - - SCAssetExportSession *exportSession = [[SCAssetExportSession alloc] initWithAsset:self.recordSession.assetRepresentingSegments]; - exportSession.videoConfiguration.filter = currentFilter; - exportSession.videoConfiguration.preset = SCPresetHighestQuality; - exportSession.audioConfiguration.preset = SCPresetHighestQuality; - exportSession.videoConfiguration.maxFrameRate = 35; - exportSession.outputUrl = self.recordSession.outputUrl; - exportSession.outputFileType = AVFileTypeMPEG4; - exportSession.delegate = self; - exportSession.contextType = SCContextTypeAuto; - self.exportSession = exportSession; - - self.exportView.hidden = NO; - self.exportView.alpha = 0; - CGRect frame = self.progressView.frame; - frame.size.width = 0; - self.progressView.frame = frame; - - [UIView animateWithDuration:0.3 animations:^{ - self.exportView.alpha = 1; - }]; - - SCWatermarkOverlayView *overlay = [SCWatermarkOverlayView new]; - overlay.date = self.recordSession.date; - exportSession.videoConfiguration.overlay = overlay; - NSLog(@"Starting exporting"); - - CFTimeInterval time = CACurrentMediaTime(); - __weak typeof(self) wSelf = self; - [exportSession exportAsynchronouslyWithCompletionHandler:^{ - __strong typeof(self) strongSelf = wSelf; - - if (!exportSession.cancelled) { - NSLog(@"Completed compression in %fs", CACurrentMediaTime() - time); - } - - if (strongSelf != nil) { - [strongSelf.player play]; - strongSelf.exportSession = nil; - strongSelf.navigationItem.rightBarButtonItem.enabled = YES; - - [UIView animateWithDuration:0.3 animations:^{ - strongSelf.exportView.alpha = 0; - }]; - } - - NSError *error = exportSession.error; - if (exportSession.cancelled) { - NSLog(@"Export was cancelled"); - } else if (error == nil) { - [[UIApplication sharedApplication] beginIgnoringInteractionEvents]; - [exportSession.outputUrl saveToCameraRollWithCompletion:^(NSString * _Nullable path, NSError * _Nullable error) { - [[UIApplication sharedApplication] endIgnoringInteractionEvents]; - - if (error == nil) { - [[[UIAlertView alloc] initWithTitle:@"Saved to camera roll" message:@"" delegate:nil cancelButtonTitle:@"OK" otherButtonTitles:nil] show]; - } else { - [[[UIAlertView alloc] initWithTitle:@"Failed to save" message:error.localizedDescription delegate:nil cancelButtonTitle:@"OK" otherButtonTitles:nil] show]; - } - }]; - } else { - if (!exportSession.cancelled) { - [[[UIAlertView alloc] initWithTitle:@"Failed to save" message:error.localizedDescription delegate:nil cancelButtonTitle:@"OK" otherButtonTitles:nil] show]; - } - } - }]; + AVAssetTrack * videoAssetTrack = [self.recordSession.assetRepresentingSegments tracksWithMediaType:AVMediaTypeVideo].firstObject; + NSLog(@"FPS is : %f ", videoAssetTrack.nominalFrameRate); + + [self.recordSession mergeSegmentsUsingPreset:AVAssetExportPresetPassthrough + atURL:self.recordSession.outputUrl + completionHandler:^(NSURL *url, NSError *error) { + [PHPhotoLibrary requestAuthorization:^(PHAuthorizationStatus status) { + [PHPhotoLibrary.sharedPhotoLibrary + performChanges:^{ + [PHAssetChangeRequest creationRequestForAssetFromVideoAtFileURL:url]; + } + completionHandler:^(BOOL success, NSError *error) { + NSLog(@"error %@", error); + dispatch_async(dispatch_get_main_queue(), ^{ + if (error == nil) { + [[[UIAlertView alloc] initWithTitle:@"Saved to camera roll" message:@"" delegate:nil cancelButtonTitle:@"OK" otherButtonTitles:nil] show]; + } else { + [[[UIAlertView alloc] initWithTitle:@"Failed to save" message:error.localizedDescription delegate:nil cancelButtonTitle:@"OK" otherButtonTitles:nil] show]; + } + }); + }]; + }]; + }]; +} + +- (void)saveToCameraRollForURL:(NSURL *)url { + self.navigationItem.rightBarButtonItem.enabled = NO; + SCFilter *currentFilter = [self.filterSwitcherView.selectedFilter copy]; + [_player pause]; + + SCAssetExportSession *exportSession = [[SCAssetExportSession alloc] initWithAsset:self.recordSession.assetRepresentingSegments]; + exportSession.videoConfiguration.filter = currentFilter; + exportSession.videoConfiguration.preset = SCPresetHighestQuality; + exportSession.videoConfiguration.size = CGSizeMake(3840, 2160); + exportSession.audioConfiguration.preset = SCPresetHighestQuality; + exportSession.videoConfiguration.maxFrameRate = 35; + + NSURL *documentsDirUrl = [NSURL fileURLWithPath:NSTemporaryDirectory() isDirectory:YES]; + NSString *fileName = [NSString stringWithFormat:@"this-export.mp4"]; + __block NSURL *out = [NSURL URLWithString:fileName relativeToURL:documentsDirUrl]; + + NSFileManager *fileManager = [NSFileManager defaultManager]; + [fileManager removeItemAtURL:out error:nil]; + + exportSession.outputUrl = out; + exportSession.outputFileType = AVFileTypeMPEG4; + exportSession.delegate = self; + exportSession.contextType = SCContextTypeAuto; + self.exportSession = exportSession; + + self.exportView.hidden = NO; + self.exportView.alpha = 0; + CGRect frame = self.progressView.frame; + frame.size.width = 0; + self.progressView.frame = frame; + + [UIView animateWithDuration:0.3 animations:^{ + self.exportView.alpha = 1; + }]; + + SCWatermarkOverlayView *overlay = [SCWatermarkOverlayView new]; + overlay.date = self.recordSession.date; + exportSession.videoConfiguration.overlay = overlay; + NSLog(@"Starting exporting"); + + CFTimeInterval time = CACurrentMediaTime(); + __weak typeof(self) wSelf = self; + [exportSession exportAsynchronouslyWithCompletionHandler:^{ + __strong typeof(self) strongSelf = wSelf; + + if (!exportSession.cancelled) { + NSLog(@"Completed compression in %fs", CACurrentMediaTime() - time); + } + + if (strongSelf != nil) { + [strongSelf.player play]; + strongSelf.exportSession = nil; + strongSelf.navigationItem.rightBarButtonItem.enabled = YES; + + [UIView animateWithDuration:0.3 animations:^{ + strongSelf.exportView.alpha = 0; + }]; + } + + NSError *error = exportSession.error; + if (exportSession.cancelled) { + NSLog(@"Export was cancelled"); + } else if (error == nil) { + [[UIApplication sharedApplication] beginIgnoringInteractionEvents]; + [out saveToCameraRollWithCompletion:^(NSString * _Nullable path, NSError * _Nullable error) { + [[UIApplication sharedApplication] endIgnoringInteractionEvents]; + + if (error == nil) { + [[[UIAlertView alloc] initWithTitle:@"Saved to camera roll" message:@"" delegate:nil cancelButtonTitle:@"OK" otherButtonTitles:nil] show]; + } else { + [[[UIAlertView alloc] initWithTitle:@"Failed to save" message:error.localizedDescription delegate:nil cancelButtonTitle:@"OK" otherButtonTitles:nil] show]; + } + }]; + } else { + if (!exportSession.cancelled) { + [[[UIAlertView alloc] initWithTitle:@"Failed to save" message:error.localizedDescription delegate:nil cancelButtonTitle:@"OK" otherButtonTitles:nil] show]; + } + } + }]; } - (BOOL)startMediaBrowser { - //Validations - if ([UIImagePickerController isSourceTypeAvailable:UIImagePickerControllerSourceTypeSavedPhotosAlbum] == NO) { - return NO; - } - - UIImagePickerController *mediaUI = [[UIImagePickerController alloc] init]; - mediaUI.sourceType = UIImagePickerControllerSourceTypeSavedPhotosAlbum; - mediaUI.mediaTypes = [[NSArray alloc] initWithObjects: (NSString *) kUTTypeMovie, nil]; - - mediaUI.allowsEditing = YES; - mediaUI.delegate = self; - - [self presentViewController:mediaUI animated:YES completion:nil]; - return YES; + //Validations + if ([UIImagePickerController isSourceTypeAvailable:UIImagePickerControllerSourceTypeSavedPhotosAlbum] == NO) { + return NO; + } + + UIImagePickerController *mediaUI = [[UIImagePickerController alloc] init]; + mediaUI.sourceType = UIImagePickerControllerSourceTypeSavedPhotosAlbum; + mediaUI.mediaTypes = [[NSArray alloc] initWithObjects: (NSString *) kUTTypeMovie, nil]; + + mediaUI.allowsEditing = YES; + mediaUI.delegate = self; + + [self presentViewController:mediaUI animated:YES completion:nil]; + return YES; } - (void)imagePickerController:(UIImagePickerController *)picker didFinishPickingMediaWithInfo:(NSDictionary *)info { - NSURL *url = info[UIImagePickerControllerMediaURL]; - [picker dismissViewControllerAnimated:YES completion:nil]; - - SCRecordSessionSegment *segment = [SCRecordSessionSegment segmentWithURL:url info:nil]; - - [_recordSession addSegment:segment]; + NSURL *url = info[UIImagePickerControllerMediaURL]; + [picker dismissViewControllerAnimated:YES completion:nil]; + + SCRecordSessionSegment *segment = [SCRecordSessionSegment segmentWithURL:url info:nil]; + + [_recordSession addSegment:segment]; } diff --git a/Library/SCRecorder.xcodeproj/project.pbxproj b/Library/SCRecorder.xcodeproj/project.pbxproj index 062b229e..3466d668 100644 --- a/Library/SCRecorder.xcodeproj/project.pbxproj +++ b/Library/SCRecorder.xcodeproj/project.pbxproj @@ -831,7 +831,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES; GCC_WARN_UNKNOWN_PRAGMAS = YES; GCC_WARN_UNUSED_VARIABLE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 6.1; + IPHONEOS_DEPLOYMENT_TARGET = 11.0; ONLY_ACTIVE_ARCH = YES; SDKROOT = iphoneos; }; @@ -861,7 +861,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES; GCC_WARN_UNKNOWN_PRAGMAS = YES; GCC_WARN_UNUSED_VARIABLE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 6.1; + IPHONEOS_DEPLOYMENT_TARGET = 11.0; OTHER_CFLAGS = "-DNS_BLOCK_ASSERTIONS=1"; SDKROOT = iphoneos; VALIDATE_PRODUCT = YES; diff --git a/Library/Sources/SCAssetExportSession.m b/Library/Sources/SCAssetExportSession.m index 0bdd6517..07a9eebc 100644 --- a/Library/Sources/SCAssetExportSession.m +++ b/Library/Sources/SCAssetExportSession.m @@ -421,8 +421,8 @@ - (BOOL)_setupContextIfNeeded { cgContext = SCCreateContextFromPixelBuffer(pixelBuffer); options = @{ - SCContextOptionsCGContextKey: (__bridge id)cgContext - }; + SCContextOptionsCGContextKey: (__bridge id)cgContext + }; } _context = [SCContext contextWithType:_contextType options:options]; @@ -453,10 +453,11 @@ - (void)_setupPixelBufferAdaptorIfNeeded:(BOOL)needed { if (needsPixelBuffer && _videoInput != nil) { NSDictionary *pixelBufferAttributes = @{ - (id)kCVPixelBufferPixelFormatTypeKey : [NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], // old setting = kCVPixelFormatType_32BGRA - (id)kCVPixelBufferWidthKey : [NSNumber numberWithFloat:_outputBufferSize.width], - (id)kCVPixelBufferHeightKey : [NSNumber numberWithFloat:_outputBufferSize.height] - }; +// (id)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange), // old setting = kCVPixelFormatType_32BGRA + (id)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange), // old setting = kCVPixelFormatType_32BGRA + (id)kCVPixelBufferWidthKey : [NSNumber numberWithFloat:_outputBufferSize.width], + (id)kCVPixelBufferHeightKey : [NSNumber numberWithFloat:_outputBufferSize.height] + }; _videoPixelAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:_videoInput sourcePixelBufferAttributes:pixelBufferAttributes]; } @@ -465,9 +466,9 @@ - (void)_setupPixelBufferAdaptorIfNeeded:(BOOL)needed { - (void)cancelExport { _cancelled = YES; - __weak typeof(self) wSelf = self; + __weak typeof(self) wSelf = self; dispatch_sync(_videoQueue, ^{ - typeof(self) iSelf = wSelf; + typeof(self) iSelf = wSelf; if (iSelf->_needsLeaveVideo) { iSelf->_needsLeaveVideo = NO; dispatch_group_leave(iSelf->_dispatchGroup); @@ -583,28 +584,28 @@ - (void)_setupVideoUsingTracks:(NSArray *)videoTracks { _inputBufferSize = CGSizeZero; if (videoTracks.count > 0 && self.videoConfiguration.enabled && !self.videoConfiguration.shouldIgnore) { AVAssetTrack *videoTrack = [videoTracks objectAtIndex:0]; - CGAffineTransform trackTransform = videoTrack.preferredTransform; + CGAffineTransform trackTransform = videoTrack.preferredTransform; - // Output - AVVideoComposition *videoComposition = self.videoConfiguration.composition; + // Output + AVVideoComposition *videoComposition = self.videoConfiguration.composition; - if (videoComposition == nil) { - _inputBufferSize = videoTrack.naturalSize; - } else { - _inputBufferSize = videoComposition.renderSize; - } + if (videoComposition == nil) { + _inputBufferSize = videoTrack.naturalSize; + } else { + _inputBufferSize = videoComposition.renderSize; + } // Input NSDictionary *videoSettings = [_videoConfiguration createAssetWriterOptionsWithVideoSize:_inputBufferSize - sizeIsSuggestion:videoComposition == nil]; + sizeIsSuggestion:videoComposition == nil]; _videoInput = [self addWriter:AVMediaTypeVideo withSettings:videoSettings]; if (_videoConfiguration.keepInputAffineTransform) { _videoInput.transform = videoTrack.preferredTransform; } else if (videoComposition) { - _videoInput.transform = trackTransform; + _videoInput.transform = trackTransform; } else - _videoInput.transform = _videoConfiguration.affineTransform; + _videoInput.transform = _videoConfiguration.affineTransform; CGSize outputBufferSize = videoComposition.renderSize; @@ -627,14 +628,15 @@ - (void)_setupVideoUsingTracks:(NSArray *)videoTracks { NSDictionary *settings = nil; if (_filter != nil || self.videoConfiguration.overlay != nil) { settings = @{ - (id)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA), - (id)kCVPixelBufferIOSurfacePropertiesKey : [NSDictionary dictionary] - }; + (id)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA), + (id)kCVPixelBufferIOSurfacePropertiesKey : [NSDictionary dictionary] + }; } else { settings = @{ - (id)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange), - (id)kCVPixelBufferIOSurfacePropertiesKey : [NSDictionary dictionary] - }; + (id)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange), +// (id)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange), + (id)kCVPixelBufferIOSurfacePropertiesKey : [NSDictionary dictionary] + }; } AVAssetReaderOutput *reader = nil; @@ -701,9 +703,9 @@ - (void)exportAsynchronouslyWithCompletionHandler:(void (^)(void))completionHand [self beginReadWriteOnAudio]; [self beginReadWriteOnVideo]; - __weak typeof(self) wSelf = self; + __weak typeof(self) wSelf = self; dispatch_group_notify(_dispatchGroup, dispatch_get_main_queue(), ^{ - typeof(self) iSelf = wSelf; + typeof(self) iSelf = wSelf; if (iSelf->_error == nil) { iSelf->_error = iSelf->_writer.error; } @@ -741,48 +743,48 @@ - (AVAssetReader *)reader { + (UIImageOrientation)orientationForVideoTransform:(CGAffineTransform)videoTransform { - UIImageOrientation videoAssetOrientation_ = UIImageOrientationUp; //leave this - it may be used in the future + UIImageOrientation videoAssetOrientation_ = UIImageOrientationUp; //leave this - it may be used in the future - if(videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) { - videoAssetOrientation_= UIImageOrientationRight; - } - if(videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) { - videoAssetOrientation_ = UIImageOrientationLeft; - } - if(videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) { - videoAssetOrientation_ = UIImageOrientationUp; - } - if(videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) - { - videoAssetOrientation_ = UIImageOrientationDown; - } - return videoAssetOrientation_; + if(videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) { + videoAssetOrientation_= UIImageOrientationRight; + } + if(videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) { + videoAssetOrientation_ = UIImageOrientationLeft; + } + if(videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) { + videoAssetOrientation_ = UIImageOrientationUp; + } + if(videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) + { + videoAssetOrientation_ = UIImageOrientationDown; + } + return videoAssetOrientation_; } - (CGAffineTransform)transformForVideoTransform:(CGAffineTransform)videoTransform - naturalSize:(CGSize)naturalSize - withRequiredResolution:(CGSize)requiredResolution { - //FIXING ORIENTATION// - UIImageOrientation videoAssetOrientation_ = [SCAssetExportSession orientationForVideoTransform:videoTransform]; //leave this - it may be used in the future - BOOL isVideoAssetPortrait_ = NO; - if (videoAssetOrientation_ == UIImageOrientationRight || - videoAssetOrientation_ == UIImageOrientationLeft) { - isVideoAssetPortrait_ = YES; - } - CGFloat trackWidth = naturalSize.width; - CGFloat trackHeight = naturalSize.height; - CGFloat widthRatio = 0; - CGFloat heightRatio = 0; - - double aspectRatio = (MAX(trackWidth, trackHeight) / MIN(trackWidth, trackHeight)); - double delta = ABS(aspectRatio - (16.0/9.0)); - BOOL closeEnoughTo16x9 = delta < 0.1; // 1.6777 .. 1.8777 tag:gabe - if this is encompassing too much - maybe 0.08? - - // DLog(@"image size %f,%f", trackWidth,trackHeight); - // DLog(@"required size %f,%f", self.requiredResolution.width,self.requiredResolution.height); - // DLog(@"Original transform a(%f) b(%f) c(%f) d(%f) tx(%f) ty(%f)", - // videoTransform.a,videoTransform.b,videoTransform.c,videoTransform.d,videoTransform.tx,videoTransform.ty) - /* + naturalSize:(CGSize)naturalSize + withRequiredResolution:(CGSize)requiredResolution { + //FIXING ORIENTATION// + UIImageOrientation videoAssetOrientation_ = [SCAssetExportSession orientationForVideoTransform:videoTransform]; //leave this - it may be used in the future + BOOL isVideoAssetPortrait_ = NO; + if (videoAssetOrientation_ == UIImageOrientationRight || + videoAssetOrientation_ == UIImageOrientationLeft) { + isVideoAssetPortrait_ = YES; + } + CGFloat trackWidth = naturalSize.width; + CGFloat trackHeight = naturalSize.height; + CGFloat widthRatio = 0; + CGFloat heightRatio = 0; + + double aspectRatio = (MAX(trackWidth, trackHeight) / MIN(trackWidth, trackHeight)); + double delta = ABS(aspectRatio - (16.0/9.0)); + BOOL closeEnoughTo16x9 = delta < 0.1; // 1.6777 .. 1.8777 tag:gabe - if this is encompassing too much - maybe 0.08? + + // DLog(@"image size %f,%f", trackWidth,trackHeight); + // DLog(@"required size %f,%f", self.requiredResolution.width,self.requiredResolution.height); + // DLog(@"Original transform a(%f) b(%f) c(%f) d(%f) tx(%f) ty(%f)", + // videoTransform.a,videoTransform.b,videoTransform.c,videoTransform.d,videoTransform.tx,videoTransform.ty) + /* * 2 Main Cases * a- portrait * happens when taking video from the rear camera or selecting from the library @@ -792,134 +794,134 @@ - (CGAffineTransform)transformForVideoTransform:(CGAffineTransform)videoTransfor * as well as camera footage from the front camera * * */ - if(isVideoAssetPortrait_) { - // DLog(@"IS PORTRAIT - ORIGINAL TRANSFORM"); - trackWidth = naturalSize.height; - trackHeight = naturalSize.width; - - if (trackWidth == requiredResolution.width && - trackHeight == requiredResolution.height) { - return videoTransform; - } else { - widthRatio = requiredResolution.width / trackWidth; - heightRatio = requiredResolution.height / trackHeight; - - if (closeEnoughTo16x9) { - // aspect fill time - if (widthRatio < heightRatio) - widthRatio = heightRatio; - else - heightRatio = widthRatio; - } else { - /* + if(isVideoAssetPortrait_) { + // DLog(@"IS PORTRAIT - ORIGINAL TRANSFORM"); + trackWidth = naturalSize.height; + trackHeight = naturalSize.width; + + if (trackWidth == requiredResolution.width && + trackHeight == requiredResolution.height) { + return videoTransform; + } else { + widthRatio = requiredResolution.width / trackWidth; + heightRatio = requiredResolution.height / trackHeight; + + if (closeEnoughTo16x9) { + // aspect fill time + if (widthRatio < heightRatio) + widthRatio = heightRatio; + else + heightRatio = widthRatio; + } else { + /* * Since this is portrait, that means the height should be taller than the width * therefore, adjust to fit height and center via width * */ - // aspect fit (old code) - heightRatio = requiredResolution.height / trackHeight; - widthRatio = heightRatio; - } - CGAffineTransform scaleFactor = CGAffineTransformMakeScale(widthRatio, heightRatio); - CGFloat translationDistanceX = 0; - CGFloat translationDistanceY = 0; + // aspect fit (old code) + heightRatio = requiredResolution.height / trackHeight; + widthRatio = heightRatio; + } + CGAffineTransform scaleFactor = CGAffineTransformMakeScale(widthRatio, heightRatio); + CGFloat translationDistanceX = 0; + CGFloat translationDistanceY = 0; - /* + /* * If width < required width, center by width * height will always fill the screen * center it by height Just in case * */ - CGFloat newWidth = widthRatio * trackWidth; - if (newWidth != requiredResolution.width) { - translationDistanceX = (requiredResolution.width - newWidth)/2; - } - CGFloat newHeight = heightRatio * trackHeight; - if (newHeight != requiredResolution.height) { - translationDistanceY = (requiredResolution.height - newHeight)/2; - } - - // DLog(@"translate %f,%f", translationDistanceX, translationDistanceY); - return CGAffineTransformConcat(CGAffineTransformConcat(videoTransform, scaleFactor), CGAffineTransformMakeTranslation(translationDistanceX, translationDistanceY)); - } - } else { - trackWidth = naturalSize.width; - trackHeight = naturalSize.height; - /* + CGFloat newWidth = widthRatio * trackWidth; + if (newWidth != requiredResolution.width) { + translationDistanceX = (requiredResolution.width - newWidth)/2; + } + CGFloat newHeight = heightRatio * trackHeight; + if (newHeight != requiredResolution.height) { + translationDistanceY = (requiredResolution.height - newHeight)/2; + } + + // DLog(@"translate %f,%f", translationDistanceX, translationDistanceY); + return CGAffineTransformConcat(CGAffineTransformConcat(videoTransform, scaleFactor), CGAffineTransformMakeTranslation(translationDistanceX, translationDistanceY)); + } + } else { + trackWidth = naturalSize.width; + trackHeight = naturalSize.height; + /* * Fix for shit saved locally * */ - BOOL isOrientedUpWithSwitchedWidthHeight = NO; - if ((videoAssetOrientation_ == UIImageOrientationUp || videoAssetOrientation_ == UIImageOrientationDown) - && trackWidth > trackHeight) { - isOrientedUpWithSwitchedWidthHeight = YES; - } - /* + BOOL isOrientedUpWithSwitchedWidthHeight = NO; + if ((videoAssetOrientation_ == UIImageOrientationUp || videoAssetOrientation_ == UIImageOrientationDown) + && trackWidth > trackHeight) { + isOrientedUpWithSwitchedWidthHeight = YES; + } + /* * Special case for photos that haven been recorded with swapped settings * */ - if (isOrientedUpWithSwitchedWidthHeight) { - trackWidth = naturalSize.height; - trackHeight = naturalSize.width; - widthRatio = requiredResolution.width/trackWidth; - heightRatio = requiredResolution.height/trackHeight; - CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(widthRatio,heightRatio); - CGFloat translationDistance = (CGFloat) (heightRatio*fabs(videoTransform.ty)); - return CGAffineTransformConcat(CGAffineTransformConcat(videoTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, translationDistance));//840 is the number - } else if (trackWidth == requiredResolution.width && - trackHeight == requiredResolution.height) { - /*If the resolution is the same, just rotate and scale*/ - widthRatio = requiredResolution.width/trackWidth; - heightRatio = requiredResolution.height/trackHeight; - CGAffineTransform scaleFactor = CGAffineTransformMakeScale(widthRatio, heightRatio); - return CGAffineTransformConcat(videoTransform, scaleFactor); - } else { - if (closeEnoughTo16x9) { - widthRatio = requiredResolution.width / trackWidth; - heightRatio = requiredResolution.height / trackHeight; - - // aspect fill time - if (widthRatio < heightRatio) - widthRatio = heightRatio; - else - heightRatio = widthRatio; - } else { - /*If the resolutions are different and the video's height > width + if (isOrientedUpWithSwitchedWidthHeight) { + trackWidth = naturalSize.height; + trackHeight = naturalSize.width; + widthRatio = requiredResolution.width/trackWidth; + heightRatio = requiredResolution.height/trackHeight; + CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(widthRatio,heightRatio); + CGFloat translationDistance = (CGFloat) (heightRatio*fabs(videoTransform.ty)); + return CGAffineTransformConcat(CGAffineTransformConcat(videoTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, translationDistance));//840 is the number + } else if (trackWidth == requiredResolution.width && + trackHeight == requiredResolution.height) { + /*If the resolution is the same, just rotate and scale*/ + widthRatio = requiredResolution.width/trackWidth; + heightRatio = requiredResolution.height/trackHeight; + CGAffineTransform scaleFactor = CGAffineTransformMakeScale(widthRatio, heightRatio); + return CGAffineTransformConcat(videoTransform, scaleFactor); + } else { + if (closeEnoughTo16x9) { + widthRatio = requiredResolution.width / trackWidth; + heightRatio = requiredResolution.height / trackHeight; + + // aspect fill time + if (widthRatio < heightRatio) + widthRatio = heightRatio; + else + heightRatio = widthRatio; + } else { + /*If the resolutions are different and the video's height > width * scale by height * if the video is 16x9 it will fit * */ - // aspect fit (old code) - if (trackHeight > trackWidth) { - heightRatio = requiredResolution.height/trackHeight; - widthRatio = heightRatio; - } else { - /* Occurs for square videos + // aspect fit (old code) + if (trackHeight > trackWidth) { + heightRatio = requiredResolution.height/trackHeight; + widthRatio = heightRatio; + } else { + /* Occurs for square videos * otherwise will only happen for landscape videos which are not supported * */ - widthRatio = requiredResolution.width/trackWidth; - heightRatio = widthRatio; - } - } - // DLog(@"NOT PORTRAIT") - // DLog(@"LANDSCAPE width ratio %f", widthRatio); - // DLog(@"LANDSCAPE height ratio %f", heightRatio); - CGAffineTransform scaleFactor = CGAffineTransformMakeScale(widthRatio, heightRatio); - CGFloat translationDistanceX = 0; - CGFloat translationDistanceY = 0; - /* + widthRatio = requiredResolution.width/trackWidth; + heightRatio = widthRatio; + } + } + // DLog(@"NOT PORTRAIT") + // DLog(@"LANDSCAPE width ratio %f", widthRatio); + // DLog(@"LANDSCAPE height ratio %f", heightRatio); + CGAffineTransform scaleFactor = CGAffineTransformMakeScale(widthRatio, heightRatio); + CGFloat translationDistanceX = 0; + CGFloat translationDistanceY = 0; + /* * If width < required width, center by width * height will always fill the screen * */ - CGFloat newWidth = widthRatio * trackWidth; - if (newWidth != requiredResolution.width) { - translationDistanceX = (requiredResolution.width - newWidth)/2; - } - CGFloat newHeight = heightRatio * trackHeight; - if (newHeight != requiredResolution.height) { - translationDistanceY = (requiredResolution.height - newHeight)/2; - } - // DLog(@"translation x,y %f,%f", translationDistanceX, translationDistanceY) - //CGFloat translationDistance = (CGFloat) (heightRatio*fabs(videoTransform.ty)); - return CGAffineTransformConcat(CGAffineTransformConcat(videoTransform, scaleFactor), CGAffineTransformMakeTranslation(translationDistanceX, translationDistanceY)); - } - } + CGFloat newWidth = widthRatio * trackWidth; + if (newWidth != requiredResolution.width) { + translationDistanceX = (requiredResolution.width - newWidth)/2; + } + CGFloat newHeight = heightRatio * trackHeight; + if (newHeight != requiredResolution.height) { + translationDistanceY = (requiredResolution.height - newHeight)/2; + } + // DLog(@"translation x,y %f,%f", translationDistanceX, translationDistanceY) + //CGFloat translationDistance = (CGFloat) (heightRatio*fabs(videoTransform.ty)); + return CGAffineTransformConcat(CGAffineTransformConcat(videoTransform, scaleFactor), CGAffineTransformMakeTranslation(translationDistanceX, translationDistanceY)); + } + } } @end diff --git a/Library/Sources/SCRecordSession.m b/Library/Sources/SCRecordSession.m index d8dd88eb..d3817b40 100644 --- a/Library/Sources/SCRecordSession.m +++ b/Library/Sources/SCRecordSession.m @@ -155,7 +155,7 @@ + (NSError*)createError:(NSString*)errorDescription { - (void)dispatchSyncOnSessionQueue:(void(^)(void))block { SCRecorder *recorder = self.recorder; - block = [block copy]; + block = [block copy]; if (recorder == nil || [SCRecorder isSessionQueue]) { block(); } else { @@ -169,7 +169,7 @@ - (void)removeFile:(NSURL *)fileUrl { } - (void)removeSegment:(SCRecordSessionSegment *)segment { - __weak typeof(self) wSelf = self; + __weak typeof(self) wSelf = self; [self dispatchSyncOnSessionQueue:^{ NSUInteger index = [wSelf.segments indexOfObject:segment]; if (index != NSNotFound) { @@ -179,9 +179,9 @@ - (void)removeSegment:(SCRecordSessionSegment *)segment { } - (void)removeSegmentAtIndex:(NSInteger)segmentIndex deleteFile:(BOOL)deleteFile { - __weak typeof(self) wSelf = self; + __weak typeof(self) wSelf = self; [self dispatchSyncOnSessionQueue:^{ - typeof(self) iSelf = wSelf; + typeof(self) iSelf = wSelf; SCRecordSessionSegment *segment = [iSelf->_segments objectAtIndex:segmentIndex]; [iSelf->_segments removeObjectAtIndex:segmentIndex]; @@ -207,9 +207,9 @@ - (void)removeSegmentAtIndex:(NSInteger)segmentIndex deleteFile:(BOOL)deleteFile } - (void)removeLastSegment { - __weak typeof(self) wSelf = self; + __weak typeof(self) wSelf = self; [self dispatchSyncOnSessionQueue:^{ - typeof(self) iSelf = wSelf; + typeof(self) iSelf = wSelf; if (iSelf->_segments.count > 0) { [self removeSegmentAtIndex:iSelf->_segments.count - 1 deleteFile:YES]; } @@ -218,28 +218,28 @@ - (void)removeLastSegment { - (void)removeAllSegments:(void(^ __nullable)(void))completionHandler { [self removeAllSegments:YES - withCompletion:completionHandler]; + withCompletion:completionHandler]; } - (void)removeAllSegments:(BOOL)removeFiles withCompletion:(void(^ __nullable)(void))completionHandler;{ - __weak typeof(self) wSelf = self; - [self dispatchSyncOnSessionQueue:^{ - typeof(self) iSelf = wSelf; - while (iSelf->_segments.count > 0) { - if (removeFiles) { - SCRecordSessionSegment *segment = [iSelf->_segments objectAtIndex:0]; - [segment deleteFile]; - } - [iSelf->_segments removeObjectAtIndex:0]; - } + __weak typeof(self) wSelf = self; + [self dispatchSyncOnSessionQueue:^{ + typeof(self) iSelf = wSelf; + while (iSelf->_segments.count > 0) { + if (removeFiles) { + SCRecordSessionSegment *segment = [iSelf->_segments objectAtIndex:0]; + [segment deleteFile]; + } + [iSelf->_segments removeObjectAtIndex:0]; + } - iSelf->_segmentsDuration = kCMTimeZero; + iSelf->_segmentsDuration = kCMTimeZero; - if (completionHandler) { - completionHandler(); - } - }]; + if (completionHandler) { + completionHandler(); + } + }]; } - (NSString*)_suggestedFileType { @@ -282,6 +282,8 @@ - (NSString *)_suggestedFileExtension { return @"wav"; } else if ([fileType isEqualToString:AVFileTypeMPEGLayer3]) { return @"mp3"; + } else if ([fileType isEqualToString:AVFileTypeHEIC]) { + return @"heic"; } return nil; @@ -366,9 +368,9 @@ - (AVAssetWriter *)createWriter:(NSError **)error { } - (void)deinitialize { - __weak typeof(self) wSelf = self; + __weak typeof(self) wSelf = self; [self dispatchSyncOnSessionQueue:^{ - typeof(self) iSelf = wSelf; + typeof(self) iSelf = wSelf; [self endSegmentWithInfo:nil completionHandler:nil]; iSelf->_audioConfiguration = nil; @@ -390,9 +392,9 @@ - (void)initializeVideo:(NSDictionary *)videoSettings formatDescription:(CMForma _videoInput.transform = _videoConfiguration.affineTransform; CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription); - NSDictionary *pixelBufferAttributes = @{ - (id)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange), + (id)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange), +// (id)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange), (id)kCVPixelBufferWidthKey : @(dimensions.width), (id)kCVPixelBufferHeightKey : @(dimensions.height) }; @@ -427,18 +429,18 @@ - (void)initializeAudio:(NSDictionary *)audioSettings formatDescription:(CMForma } - (void)addSegment:(SCRecordSessionSegment *)segment { - __weak typeof(self) wSelf = self; - [self dispatchSyncOnSessionQueue:^{ - typeof(self) iSelf = wSelf; + __weak typeof(self) wSelf = self; + [self dispatchSyncOnSessionQueue:^{ + typeof(self) iSelf = wSelf; [iSelf->_segments addObject:segment]; iSelf->_segmentsDuration = CMTimeAdd(iSelf->_segmentsDuration, segment.duration); }]; } - (void)insertSegment:(SCRecordSessionSegment *)segment atIndex:(NSInteger)segmentIndex { - __weak typeof(self) wSelf = self; - [self dispatchSyncOnSessionQueue:^{ - typeof(self) iSelf = wSelf; + __weak typeof(self) wSelf = self; + [self dispatchSyncOnSessionQueue:^{ + typeof(self) iSelf = wSelf; [iSelf->_segments insertObject:segment atIndex:segmentIndex]; iSelf->_segmentsDuration = CMTimeAdd(iSelf->_segmentsDuration, segment.duration); }]; @@ -466,20 +468,20 @@ - (CMSampleBufferRef)adjustBuffer:(CMSampleBufferRef)sample withTimeOffset:(CMTi } - (void)beginSegment:(NSError**)error { - __block NSError* localError; - __weak typeof(self) wSelf = self; - [self dispatchSyncOnSessionQueue:^{ - typeof(self) iSelf = wSelf; + __block NSError* localError; + __weak typeof(self) wSelf = self; + [self dispatchSyncOnSessionQueue:^{ + typeof(self) iSelf = wSelf; if (iSelf->_assetWriter == nil) { iSelf->_assetWriter = [iSelf createWriter:&localError]; iSelf->_currentSegmentDuration = kCMTimeZero; iSelf->_currentSegmentHasAudio = NO; iSelf->_currentSegmentHasVideo = NO; } else { - localError = [SCRecordSession createError:@"A record segment has already began."]; + localError = [SCRecordSession createError:@"A record segment has already began."]; } }]; - if (error) *error = localError; + if (error) *error = localError; } - (void)_destroyAssetWriter { @@ -494,9 +496,9 @@ - (void)_destroyAssetWriter { } - (void)appendRecordSegmentUrl:(NSURL *)url info:(NSDictionary *)info error:(NSError *)error completionHandler:(void (^)(SCRecordSessionSegment *, NSError *))completionHandler { - __weak typeof(self) wSelf = self; - [self dispatchSyncOnSessionQueue:^{ - typeof(self) iSelf = wSelf; + __weak typeof(self) wSelf = self; + [self dispatchSyncOnSessionQueue:^{ + typeof(self) iSelf = wSelf; SCRecordSessionSegment *segment = nil; if (error == nil) { @@ -517,48 +519,48 @@ - (void)appendRecordSegmentUrl:(NSURL *)url info:(NSDictionary *)info error:(NSE - (BOOL)endSegmentWithInfo:(NSDictionary *)info completionHandler:(void(^)(SCRecordSessionSegment *segment, NSError* error))completionHandler { __block BOOL success = NO; - __weak typeof(self) wSelf = self; - [self dispatchSyncOnSessionQueue:^{ - typeof(self) iSelf = wSelf; + __weak typeof(self) wSelf = self; + [self dispatchSyncOnSessionQueue:^{ + typeof(self) iSelf = wSelf; // dispatch_sync(iSelf->_audioQueue, ^{ - if (iSelf->_recordSegmentReady) { - iSelf->_recordSegmentReady = NO; - success = YES; - - AVAssetWriter *writer = iSelf->_assetWriter; + if (iSelf->_recordSegmentReady) { + iSelf->_recordSegmentReady = NO; + success = YES; - if (writer != nil) { - BOOL currentSegmentEmpty = (!iSelf->_currentSegmentHasVideo && !iSelf->_currentSegmentHasAudio); + AVAssetWriter *writer = iSelf->_assetWriter; - if (currentSegmentEmpty) { - [writer cancelWriting]; - [iSelf _destroyAssetWriter]; + if (writer != nil) { + BOOL currentSegmentEmpty = (!iSelf->_currentSegmentHasVideo && !iSelf->_currentSegmentHasAudio); - [iSelf removeFile:writer.outputURL]; + if (currentSegmentEmpty) { + [writer cancelWriting]; + [iSelf _destroyAssetWriter]; - if (completionHandler != nil) { - dispatch_async(dispatch_get_main_queue(), ^{ - completionHandler(nil, nil); - }); - } - } else { - // NSLog(@"Ending session at %fs", CMTimeGetSeconds(_currentSegmentDuration)); - [writer endSessionAtSourceTime:CMTimeAdd(iSelf->_currentSegmentDuration, iSelf->_sessionStartTime)]; + [iSelf removeFile:writer.outputURL]; - [writer finishWritingWithCompletionHandler: ^{ - [iSelf appendRecordSegmentUrl:writer.outputURL info:info error:writer.error completionHandler:completionHandler]; - }]; + if (completionHandler != nil) { + dispatch_async(dispatch_get_main_queue(), ^{ + completionHandler(nil, nil); + }); } } else { - [iSelf->_movieFileOutput stopRecording]; + // NSLog(@"Ending session at %fs", CMTimeGetSeconds(_currentSegmentDuration)); + [writer endSessionAtSourceTime:CMTimeAdd(iSelf->_currentSegmentDuration, iSelf->_sessionStartTime)]; + + [writer finishWritingWithCompletionHandler: ^{ + [iSelf appendRecordSegmentUrl:writer.outputURL info:info error:writer.error completionHandler:completionHandler]; + }]; } - } else { - if (completionHandler != nil) { - dispatch_async(dispatch_get_main_queue(), ^{ - completionHandler(nil, [SCRecordSession createError:@"The current record segment is not ready for this operation"]); - }); - } - } + } else { + [iSelf->_movieFileOutput stopRecording]; + } + } else { + if (completionHandler != nil) { + dispatch_async(dispatch_get_main_queue(), ^{ + completionHandler(nil, [SCRecordSession createError:@"The current record segment is not ready for this operation"]); + }); + } + } // }); }]; @@ -591,9 +593,9 @@ - (AVAssetExportSession *)mergeSegmentsUsingPreset:(NSString *)exportSessionPres __block NSString *fileType = nil; __block NSURL *outputUrl = nil; - __weak typeof(self) wSelf = self; - [self dispatchSyncOnSessionQueue:^{ - typeof(self) iSelf = wSelf; + __weak typeof(self) wSelf = self; + [self dispatchSyncOnSessionQueue:^{ + typeof(self) iSelf = wSelf; fileType = [self _suggestedFileType]; if (fileType == nil) { @@ -652,9 +654,9 @@ - (AVAssetExportSession *)mergeSegmentsUsingPreset:(NSString *)exportSessionPres __block NSString *fileType = nil; __block NSURL *outputUrl = url; - __weak typeof(self) wSelf = self; - [self dispatchSyncOnSessionQueue:^{ - typeof(self) iSelf = wSelf; + __weak typeof(self) wSelf = self; + [self dispatchSyncOnSessionQueue:^{ + typeof(self) iSelf = wSelf; fileType = [iSelf _suggestedFileType]; if (fileType == nil) { @@ -711,23 +713,23 @@ - (void)finishEndSession:(NSError*)mergeError completionHandler:(void (^)(NSErro } - (void)cancelSession:(void (^)(void))completionHandler { - __weak typeof(self) wSelf = self; - [self dispatchSyncOnSessionQueue:^{ - typeof(self) iSelf = wSelf; + __weak typeof(self) wSelf = self; + [self dispatchSyncOnSessionQueue:^{ + typeof(self) iSelf = wSelf; if (iSelf->_assetWriter == nil) { [iSelf removeAllSegments:nil]; - if (completionHandler != nil) { - dispatch_async(dispatch_get_main_queue(), ^{ - completionHandler(); - }); - } + if (completionHandler != nil) { + dispatch_async(dispatch_get_main_queue(), ^{ + completionHandler(); + }); + } } else { [iSelf endSegmentWithInfo:nil completionHandler:^(SCRecordSessionSegment *segment, NSError *error) { [iSelf removeAllSegments:nil]; if (completionHandler != nil) { - dispatch_async(dispatch_get_main_queue(), ^{ - completionHandler(); - }); + dispatch_async(dispatch_get_main_queue(), ^{ + completionHandler(); + }); } }]; } @@ -759,9 +761,9 @@ - (void)appendAudioSampleBuffer:(CMSampleBufferRef)audioSampleBuffer completion: CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(adjustedBuffer); CMTime lastTimeAudio = CMTimeAdd(presentationTime, duration); - __weak typeof(self) wSelf = self; + __weak typeof(self) wSelf = self; dispatch_async(_audioQueue, ^{ - typeof(self) iSelf = wSelf; + typeof(self) iSelf = wSelf; if ([iSelf->_audioInput isReadyForMoreMediaData] && [iSelf->_audioInput appendSampleBuffer:adjustedBuffer]) { iSelf->_lastTimeAudio = lastTimeAudio; @@ -802,16 +804,16 @@ - (void)appendVideoPixelBuffer:(CVPixelBufferRef)videoPixelBuffer atTime:(CMTime } duration = computedFrameDuration; } - - // CMTime timeVideo = _lastTimeVideo; - // CMTime actualBufferDuration = duration; - // - // if (CMTIME_IS_VALID(timeVideo)) { - // while (CMTIME_COMPARE_INLINE(CMTimeSubtract(actualBufferTime, timeVideo), >=, CMTimeMultiply(actualBufferDuration, 2))) { - // NSLog(@"Missing buffer"); - // timeVideo = CMTimeAdd(timeVideo, actualBufferDuration); - // } - // } + /*{ + CMTime timeVideo = _lastTimeVideo; + CMTime actualBufferDuration = duration; + if (CMTIME_IS_VALID(timeVideo)) { + while (CMTIME_COMPARE_INLINE(CMTimeSubtract(actualBufferTime, timeVideo), >=, CMTimeMultiply(actualBufferDuration, 2))) { + NSLog(@"Missing buffer"); + timeVideo = CMTimeAdd(timeVideo, actualBufferDuration); + } + } + }*/ if ([_videoInput isReadyForMoreMediaData]) { if ([_videoPixelBufferAdaptor appendPixelBuffer:videoPixelBuffer withPresentationTime:bufferTimestamp]) { @@ -862,9 +864,9 @@ - (void)appendSegmentsToComposition:(AVMutableComposition * __nonnull)compositio } - (void)appendSegmentsToComposition:(AVMutableComposition *)composition audioMix:(AVMutableAudioMix *)audioMix { - __weak typeof(self) wSelf = self; + __weak typeof(self) wSelf = self; [self dispatchSyncOnSessionQueue:^{ - typeof(self) iSelf = wSelf; + typeof(self) iSelf = wSelf; AVMutableCompositionTrack *audioTrack = nil; AVMutableCompositionTrack *videoTrack = nil; @@ -919,9 +921,9 @@ - (void)appendSegmentsToComposition:(AVMutableComposition *)composition audioMix - (AVPlayerItem *)playerItemRepresentingSegments { __block AVPlayerItem *playerItem = nil; - __weak typeof(self) wSelf = self; - [self dispatchSyncOnSessionQueue:^{ - typeof(self) iSelf = wSelf; + __weak typeof(self) wSelf = self; + [self dispatchSyncOnSessionQueue:^{ + typeof(self) iSelf = wSelf; if (iSelf->_segments.count == 1) { SCRecordSessionSegment *segment = iSelf->_segments.firstObject; playerItem = [AVPlayerItem playerItemWithAsset:segment.asset]; @@ -938,9 +940,9 @@ - (AVPlayerItem *)playerItemRepresentingSegments { - (AVAsset *)assetRepresentingSegments { __block AVAsset *asset = nil; - __weak typeof(self) wSelf = self; - [self dispatchSyncOnSessionQueue:^{ - typeof(self) iSelf = wSelf; + __weak typeof(self) wSelf = self; + [self dispatchSyncOnSessionQueue:^{ + typeof(self) iSelf = wSelf; if (iSelf->_segments.count == 1) { SCRecordSessionSegment *segment = iSelf->_segments.firstObject; asset = segment.asset; @@ -1023,9 +1025,9 @@ - (NSURL *)outputUrl { - (void)setSegmentsDirectory:(NSString *)segmentsDirectory { _segmentsDirectory = [segmentsDirectory copy]; - __weak typeof(self) wSelf = self; - [self dispatchSyncOnSessionQueue:^{ - typeof(self) iSelf = wSelf; + __weak typeof(self) wSelf = self; + [self dispatchSyncOnSessionQueue:^{ + typeof(self) iSelf = wSelf; NSFileManager *fileManager = [NSFileManager defaultManager]; for (SCRecordSessionSegment *recordSegment in iSelf.segments) { NSURL *newUrl = [SCRecordSessionSegment segmentURLForFilename:recordSegment.url.lastPathComponent andDirectory:iSelf->_segmentsDirectory]; diff --git a/Library/Sources/SCRecorder.h b/Library/Sources/SCRecorder.h index 01a625ac..c81aa3ca 100644 --- a/Library/Sources/SCRecorder.h +++ b/Library/Sources/SCRecorder.h @@ -123,6 +123,9 @@ */ @property (assign, nonatomic) BOOL automaticallyConfiguresApplicationAudioSession; + +@property (assign, nonatomic) BOOL automaticallyConfiguresCaptureDeviceForWideColor; + /** The captureSession. This will be null until prepare or startRunning has been called. Calling unprepare will set this property to null again. diff --git a/Library/Sources/SCRecorder.m b/Library/Sources/SCRecorder.m index d0e15733..8ebea9ca 100644 --- a/Library/Sources/SCRecorder.m +++ b/Library/Sources/SCRecorder.m @@ -13,38 +13,38 @@ #define kMinTimeBetweenAppend 0.004 @interface SCRecorder() { - AVCaptureVideoPreviewLayer *_previewLayer; - AVCaptureSession *_captureSession; - UIView *_previewView; - AVCaptureVideoDataOutput *_videoOutput; - AVCaptureMovieFileOutput *_movieOutput; - AVCaptureAudioDataOutput *_audioOutput; - AVCaptureStillImageOutput *_photoOutput; - SCSampleBufferHolder *_lastVideoBuffer; - SCSampleBufferHolder *_lastAudioBuffer; - CIContext *_context; - BOOL _audioInputAdded; - BOOL _audioOutputAdded; - BOOL _videoInputAdded; - BOOL _videoOutputAdded; - BOOL _shouldAutoresumeRecording; - BOOL _needsSwitchBackToContinuousFocus; - BOOL _adjustingFocus; + AVCaptureVideoPreviewLayer *_previewLayer; + AVCaptureSession *_captureSession; + UIView *_previewView; + AVCaptureVideoDataOutput *_videoOutput; + AVCaptureMovieFileOutput *_movieOutput; + AVCaptureAudioDataOutput *_audioOutput; + AVCaptureStillImageOutput *_photoOutput; + SCSampleBufferHolder *_lastVideoBuffer; + SCSampleBufferHolder *_lastAudioBuffer; + CIContext *_context; + BOOL _audioInputAdded; + BOOL _audioOutputAdded; + BOOL _videoInputAdded; + BOOL _videoOutputAdded; + BOOL _shouldAutoresumeRecording; + BOOL _needsSwitchBackToContinuousFocus; + BOOL _adjustingFocus; BOOL _didCaptureFirstAudioBuffer; BOOL _didCaptureFirstSessionBuffer; - BOOL _preparing; - BOOL _reconfiguring; + BOOL _preparing; + BOOL _reconfiguring; BOOL _audioMuting; BOOL hasDidAcquireAudioBuffer; - int _beginSessionConfigurationCount; - double _lastAppendedVideoTime; - NSTimer *_movieOutputProgressTimer; + int _beginSessionConfigurationCount; + double _lastAppendedVideoTime; + NSTimer *_movieOutputProgressTimer; CMTime _lastMovieFileOutputTime; CMTime _firstBufferTime, _firstSessionTime, _runningTime, _lastBufferTime; - void(^_pauseCompletionHandler)(void); - SCFilter *_transformFilter; - size_t _transformFilterBufferWidth; - size_t _transformFilterBufferHeight; + void(^_pauseCompletionHandler)(void); + SCFilter *_transformFilter; + size_t _transformFilterBufferWidth; + size_t _transformFilterBufferHeight; CMBlockBufferRef quietBlockBuffer; CMSampleBufferRef quietSampleBuffer; @@ -63,56 +63,57 @@ @implementation SCRecorder static char* SCRecorderPhotoOptionsContext = "PhotoOptionsContext"; - (id)init { - self = [super init]; + self = [super init]; - if (self) { + if (self) { quietBlockBuffer = nil; quietSampleBuffer = nil; _sessionQueue = dispatch_queue_create("me.corsin.SCRecorder.RecordSession", nil); - dispatch_queue_set_specific(_sessionQueue, kSCRecorderRecordSessionQueueKey, "true", nil); - dispatch_set_target_queue(_sessionQueue, dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0)); - - _captureSessionPreset = AVCaptureSessionPresetHigh; - _previewLayer = [[AVCaptureVideoPreviewLayer alloc] init]; - _previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill; - _initializeSessionLazily = YES; - - _videoOrientation = AVCaptureVideoOrientationPortrait; - _videoStabilizationMode = AVCaptureVideoStabilizationModeStandard; - - [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(_subjectAreaDidChange) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:nil]; - [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(sessionInterrupted:) name:AVAudioSessionInterruptionNotification object:nil]; - [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(applicationDidEnterBackground:) name:UIApplicationDidEnterBackgroundNotification object:nil]; - [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(applicationDidBecomeActive:) name:UIApplicationDidBecomeActiveNotification object:nil]; - [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(sessionRuntimeError:) name:AVCaptureSessionRuntimeErrorNotification object:self]; - [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(mediaServicesWereReset:) name:AVAudioSessionMediaServicesWereResetNotification object:nil]; - [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(mediaServicesWereLost:) name:AVAudioSessionMediaServicesWereLostNotification object:nil]; - [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(deviceOrientationChanged:) name:UIDeviceOrientationDidChangeNotification object:nil]; - - _lastVideoBuffer = [SCSampleBufferHolder new]; - _lastAudioBuffer = [SCSampleBufferHolder new]; - _maxRecordDuration = kCMTimeInvalid; - _resetZoomOnChangeDevice = YES; - _mirrorOnFrontCamera = NO; - _automaticallyConfiguresApplicationAudioSession = YES; + dispatch_queue_set_specific(_sessionQueue, kSCRecorderRecordSessionQueueKey, "true", nil); + dispatch_set_target_queue(_sessionQueue, dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0)); + + _captureSessionPreset = AVCaptureSessionPresetHigh; + _previewLayer = [[AVCaptureVideoPreviewLayer alloc] init]; + _previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill; + _initializeSessionLazily = YES; + + _videoOrientation = AVCaptureVideoOrientationPortrait; + _videoStabilizationMode = AVCaptureVideoStabilizationModeStandard; + + [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(_subjectAreaDidChange) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:nil]; + [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(sessionInterrupted:) name:AVAudioSessionInterruptionNotification object:nil]; + [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(applicationDidEnterBackground:) name:UIApplicationDidEnterBackgroundNotification object:nil]; + [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(applicationDidBecomeActive:) name:UIApplicationDidBecomeActiveNotification object:nil]; + [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(sessionRuntimeError:) name:AVCaptureSessionRuntimeErrorNotification object:self]; + [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(mediaServicesWereReset:) name:AVAudioSessionMediaServicesWereResetNotification object:nil]; + [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(mediaServicesWereLost:) name:AVAudioSessionMediaServicesWereLostNotification object:nil]; + [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(deviceOrientationChanged:) name:UIDeviceOrientationDidChangeNotification object:nil]; + + _lastVideoBuffer = [SCSampleBufferHolder new]; + _lastAudioBuffer = [SCSampleBufferHolder new]; + _maxRecordDuration = kCMTimeInvalid; + _resetZoomOnChangeDevice = YES; + _mirrorOnFrontCamera = NO; + _automaticallyConfiguresApplicationAudioSession = YES; + _automaticallyConfiguresCaptureDeviceForWideColor = YES; _audioMuting = NO; - self.device = AVCaptureDevicePositionBack; - _videoConfiguration = [SCVideoConfiguration new]; - _audioConfiguration = [SCAudioConfiguration new]; - _photoConfiguration = [SCPhotoConfiguration new]; + self.device = AVCaptureDevicePositionBack; + _videoConfiguration = [SCVideoConfiguration new]; + _audioConfiguration = [SCAudioConfiguration new]; + _photoConfiguration = [SCPhotoConfiguration new]; - [_videoConfiguration addObserver:self forKeyPath:@"enabled" options:NSKeyValueObservingOptionNew context:SCRecorderVideoEnabledContext]; - [_audioConfiguration addObserver:self forKeyPath:@"enabled" options:NSKeyValueObservingOptionNew context:SCRecorderAudioEnabledContext]; - [_photoConfiguration addObserver:self forKeyPath:@"options" options:NSKeyValueObservingOptionNew context:SCRecorderPhotoOptionsContext]; + [_videoConfiguration addObserver:self forKeyPath:@"enabled" options:NSKeyValueObservingOptionNew context:SCRecorderVideoEnabledContext]; + [_audioConfiguration addObserver:self forKeyPath:@"enabled" options:NSKeyValueObservingOptionNew context:SCRecorderAudioEnabledContext]; + [_photoConfiguration addObserver:self forKeyPath:@"options" options:NSKeyValueObservingOptionNew context:SCRecorderPhotoOptionsContext]; self.scContext = [SCContext contextWithType:SCContextTypeAuto options:nil]; - _context = self.scContext.CIContext; - } + _context = self.scContext.CIContext; + } - return self; + return self; } - (void)dealloc { @@ -129,189 +130,189 @@ - (void)dealloc { CFRelease(quietSampleBuffer); quietSampleBuffer = nil; - [self unprepare]; + [self unprepare]; } + (SCRecorder*)recorder { - return [[SCRecorder alloc] init]; + return [[SCRecorder alloc] init]; } - (void)applicationDidEnterBackground:(id)sender { - _shouldAutoresumeRecording = _isRecording; - [self pause]; + _shouldAutoresumeRecording = _isRecording; + [self pause]; } - (void)applicationDidBecomeActive:(id)sender { - [self reconfigureVideoInput:self.videoConfiguration.enabled audioInput:self.audioConfiguration.enabled]; + [self reconfigureVideoInput:self.videoConfiguration.enabled audioInput:self.audioConfiguration.enabled]; - if (_shouldAutoresumeRecording) { - _shouldAutoresumeRecording = NO; - [self record]; - } + if (_shouldAutoresumeRecording) { + _shouldAutoresumeRecording = NO; + [self record]; + } } - (void)deviceOrientationChanged:(id)sender { - if (_autoSetVideoOrientation) { - dispatch_sync(_sessionQueue, ^{ - [self updateVideoOrientation]; - }); - } + if (_autoSetVideoOrientation) { + dispatch_sync(_sessionQueue, ^{ + [self updateVideoOrientation]; + }); + } } - (void)sessionRuntimeError:(id)sender { - [self startRunning]; + [self startRunning]; } - (void)updateVideoOrientation { - if (!_session.currentSegmentHasAudio && !_session.currentSegmentHasVideo) { - [_session deinitialize]; - } + if (!_session.currentSegmentHasAudio && !_session.currentSegmentHasVideo) { + [_session deinitialize]; + } - AVCaptureVideoOrientation videoOrientation = [self actualVideoOrientation]; - AVCaptureConnection *videoConnection = [_videoOutput connectionWithMediaType:AVMediaTypeVideo]; + AVCaptureVideoOrientation videoOrientation = [self actualVideoOrientation]; + AVCaptureConnection *videoConnection = [_videoOutput connectionWithMediaType:AVMediaTypeVideo]; - if ([videoConnection isVideoOrientationSupported]) { - videoConnection.videoOrientation = videoOrientation; - } - if ([_previewLayer.connection isVideoOrientationSupported]) { - _previewLayer.connection.videoOrientation = videoOrientation; - } + if ([videoConnection isVideoOrientationSupported]) { + videoConnection.videoOrientation = videoOrientation; + } + if ([_previewLayer.connection isVideoOrientationSupported]) { + _previewLayer.connection.videoOrientation = videoOrientation; + } - AVCaptureConnection *photoConnection = [_photoOutput connectionWithMediaType:AVMediaTypeVideo]; - if ([photoConnection isVideoOrientationSupported]) { - photoConnection.videoOrientation = videoOrientation; - } + AVCaptureConnection *photoConnection = [_photoOutput connectionWithMediaType:AVMediaTypeVideo]; + if ([photoConnection isVideoOrientationSupported]) { + photoConnection.videoOrientation = videoOrientation; + } - AVCaptureConnection *movieOutputConnection = [_movieOutput connectionWithMediaType:AVMediaTypeVideo]; - if (movieOutputConnection.isVideoOrientationSupported) { - movieOutputConnection.videoOrientation = videoOrientation; - } + AVCaptureConnection *movieOutputConnection = [_movieOutput connectionWithMediaType:AVMediaTypeVideo]; + if (movieOutputConnection.isVideoOrientationSupported) { + movieOutputConnection.videoOrientation = videoOrientation; + } } - (void)beginConfiguration { - if (_captureSession != nil) { - _beginSessionConfigurationCount++; - if (_beginSessionConfigurationCount == 1) { - self.finishedCommit = NO; - [_captureSession beginConfiguration]; - } - } + if (_captureSession != nil) { + _beginSessionConfigurationCount++; + if (_beginSessionConfigurationCount == 1) { + self.finishedCommit = NO; + [_captureSession beginConfiguration]; + } + } } - (void)commitConfiguration { - if (_captureSession != nil) { - _beginSessionConfigurationCount--; - if (_beginSessionConfigurationCount == 0) { - [_captureSession commitConfiguration]; - self.finishedCommit = YES; - } - } + if (_captureSession != nil) { + _beginSessionConfigurationCount--; + if (_beginSessionConfigurationCount == 0) { + [_captureSession commitConfiguration]; + self.finishedCommit = YES; + } + } } - (BOOL)_reconfigureSession:(BOOL)attachAudio { - NSError *newError = nil; - - AVCaptureSession *session = _captureSession; - - if (session != nil) { - [self beginConfiguration]; - - if (![session.sessionPreset isEqualToString:_captureSessionPreset]) { - if ([session canSetSessionPreset:_captureSessionPreset]) { - session.sessionPreset = _captureSessionPreset; - } else { - newError = [SCRecorder createError:@"Cannot set session preset"]; - } - } - - if (self.fastRecordMethodEnabled) { - if (_movieOutput == nil) { - _movieOutput = [AVCaptureMovieFileOutput new]; - } - - if (_videoOutput != nil && [session.outputs containsObject:_videoOutput]) { - [session removeOutput:_videoOutput]; - } - - if (_audioOutput != nil && [session.outputs containsObject:_audioOutput]) { - [session removeOutput:_audioOutput]; - } - - if (![session.outputs containsObject:_movieOutput]) { - if ([session canAddOutput:_movieOutput]) { - [session addOutput:_movieOutput]; - } else { - if (newError == nil) { - newError = [SCRecorder createError:@"Cannot add movieOutput inside the session"]; - } - } - } - - } else { - if (_movieOutput != nil && [session.outputs containsObject:_movieOutput]) { - [session removeOutput:_movieOutput]; - } - - _videoOutputAdded = NO; - if (self.videoConfiguration.enabled) { - if (_videoOutput == nil) { - _videoOutput = [[AVCaptureVideoDataOutput alloc] init]; - _videoOutput.alwaysDiscardsLateVideoFrames = NO; - [_videoOutput setSampleBufferDelegate:self queue:_sessionQueue]; - } - - if (![session.outputs containsObject:_videoOutput]) { - if ([session canAddOutput:_videoOutput]) { - [session addOutput:_videoOutput]; - _videoOutputAdded = YES; - } else { - if (newError == nil) { - newError = [SCRecorder createError:@"Cannot add videoOutput inside the session"]; - } - } - } else { - _videoOutputAdded = YES; - } - } + NSError *newError = nil; + + AVCaptureSession *session = _captureSession; + + if (session != nil) { + [self beginConfiguration]; + + if (![session.sessionPreset isEqualToString:_captureSessionPreset]) { + if ([session canSetSessionPreset:_captureSessionPreset]) { + session.sessionPreset = _captureSessionPreset; + } else { + newError = [SCRecorder createError:@"Cannot set session preset"]; + } + } + + if (self.fastRecordMethodEnabled) { + if (_movieOutput == nil) { + _movieOutput = [AVCaptureMovieFileOutput new]; + } + + if (_videoOutput != nil && [session.outputs containsObject:_videoOutput]) { + [session removeOutput:_videoOutput]; + } + + if (_audioOutput != nil && [session.outputs containsObject:_audioOutput]) { + [session removeOutput:_audioOutput]; + } + + if (![session.outputs containsObject:_movieOutput]) { + if ([session canAddOutput:_movieOutput]) { + [session addOutput:_movieOutput]; + } else { + if (newError == nil) { + newError = [SCRecorder createError:@"Cannot add movieOutput inside the session"]; + } + } + } + + } else { + if (_movieOutput != nil && [session.outputs containsObject:_movieOutput]) { + [session removeOutput:_movieOutput]; + } + + _videoOutputAdded = NO; + if (self.videoConfiguration.enabled) { + if (_videoOutput == nil) { + _videoOutput = [[AVCaptureVideoDataOutput alloc] init]; + _videoOutput.alwaysDiscardsLateVideoFrames = NO; + [_videoOutput setSampleBufferDelegate:self queue:_sessionQueue]; + } + + if (![session.outputs containsObject:_videoOutput]) { + if ([session canAddOutput:_videoOutput]) { + [session addOutput:_videoOutput]; + _videoOutputAdded = YES; + } else { + if (newError == nil) { + newError = [SCRecorder createError:@"Cannot add videoOutput inside the session"]; + } + } + } else { + _videoOutputAdded = YES; + } + } newError = [self attachAudio]; } - if (self.photoConfiguration.enabled) { - if (_photoOutput == nil) { - _photoOutput = [[AVCaptureStillImageOutput alloc] init]; - _photoOutput.outputSettings = [self.photoConfiguration createOutputSettings]; - } + if (self.photoConfiguration.enabled) { + if (_photoOutput == nil) { + _photoOutput = [[AVCaptureStillImageOutput alloc] init]; + _photoOutput.outputSettings = [self.photoConfiguration createOutputSettings]; + } - if (![session.outputs containsObject:_photoOutput]) { - if ([session canAddOutput:_photoOutput]) { - [session addOutput:_photoOutput]; - } else { - if (newError == nil) { - newError = [SCRecorder createError:@"Cannot add photoOutput inside the session"]; - } - } - } - } + if (![session.outputs containsObject:_photoOutput]) { + if ([session canAddOutput:_photoOutput]) { + [session addOutput:_photoOutput]; + } else { + if (newError == nil) { + newError = [SCRecorder createError:@"Cannot add photoOutput inside the session"]; + } + } + } + } - [self commitConfiguration]; - } - _error = newError; + [self commitConfiguration]; + } + _error = newError; - return newError == nil; + return newError == nil; } - (NSError*)attachAudio { AVCaptureSession *session = _captureSession; NSError* newError = nil; - + _audioOutputAdded = NO; if (self.audioConfiguration.enabled) { if (_audioOutput == nil) { _audioOutput = [[AVCaptureAudioDataOutput alloc] init]; [_audioOutput setSampleBufferDelegate:self queue:_sessionQueue]; } - + if (![session.outputs containsObject:_audioOutput]) { if ([session canAddOutput:_audioOutput]) { [session addOutput:_audioOutput]; @@ -333,177 +334,178 @@ - (void)detachAudio { if (_audioOutput) { if ([session.outputs containsObject:_audioOutput]) { [session removeOutput:_audioOutput]; - } + } _audioOutputAdded = NO; _audioOutput = nil; - } + } } - (BOOL)prepare:(NSError **)error { - if (_captureSession != nil) { - [NSException raise:@"SCCameraException" format:@"The session is already opened"]; - } + if (_captureSession != nil) { + [NSException raise:@"SCCameraException" format:@"The session is already opened"]; + } - if (_preparing) { - return NO; - } + if (_preparing) { + return NO; + } - _preparing = YES; + _preparing = YES; - AVCaptureSession *session = [[AVCaptureSession alloc] init]; - session.automaticallyConfiguresApplicationAudioSession = self.automaticallyConfiguresApplicationAudioSession; - _beginSessionConfigurationCount = 0; - _captureSession = session; + AVCaptureSession *session = [[AVCaptureSession alloc] init]; + session.automaticallyConfiguresApplicationAudioSession = self.automaticallyConfiguresApplicationAudioSession; + session.automaticallyConfiguresCaptureDeviceForWideColor = self.automaticallyConfiguresCaptureDeviceForWideColor; + _beginSessionConfigurationCount = 0; + _captureSession = session; - [self beginConfiguration]; + [self beginConfiguration]; BOOL success = [self _reconfigureSession:_isRecording]; - if (!success && error != nil) { - *error = _error; - } + if (!success && error != nil) { + *error = _error; + } - _previewLayer.session = session; + _previewLayer.session = session; - [self reconfigureVideoInput:YES audioInput:YES]; + [self reconfigureVideoInput:YES audioInput:YES]; - [self commitConfiguration]; + [self commitConfiguration]; - _preparing = NO; + _preparing = NO; - return success; + return success; } - (BOOL)startRunning { - BOOL success = YES; - if (!self.isPrepared && !_preparing && !_reconfiguring) { - success = [self prepare:nil]; - } + BOOL success = YES; + if (!self.isPrepared && !_preparing && !_reconfiguring) { + success = [self prepare:nil]; + } - if (!_captureSession.isRunning) { - [_captureSession startRunning]; - } + if (!_captureSession.isRunning) { + [_captureSession startRunning]; + } - return success; + return success; } - (void)stopRunning { - [_captureSession stopRunning]; + [_captureSession stopRunning]; } - (void)_subjectAreaDidChange { - id delegate = self.delegate; + id delegate = self.delegate; - if (![delegate respondsToSelector:@selector(recorderShouldAutomaticallyRefocus:)] || [delegate recorderShouldAutomaticallyRefocus:self]) { - [self focusCenter]; - } + if (![delegate respondsToSelector:@selector(recorderShouldAutomaticallyRefocus:)] || [delegate recorderShouldAutomaticallyRefocus:self]) { + [self focusCenter]; + } } - (UIImage *)_imageFromSampleBufferHolder:(SCSampleBufferHolder *)sampleBufferHolder { - __block CMSampleBufferRef sampleBuffer = nil; - dispatch_sync(_sessionQueue, ^{ - sampleBuffer = sampleBufferHolder.sampleBuffer; + __block CMSampleBufferRef sampleBuffer = nil; + dispatch_sync(_sessionQueue, ^{ + sampleBuffer = sampleBufferHolder.sampleBuffer; - if (sampleBuffer != nil) { - CFRetain(sampleBuffer); - } - }); + if (sampleBuffer != nil) { + CFRetain(sampleBuffer); + } + }); - if (sampleBuffer == nil) { - return nil; - } + if (sampleBuffer == nil) { + return nil; + } - CVPixelBufferRef buffer = CMSampleBufferGetImageBuffer(sampleBuffer); - CIImage *ciImage = [CIImage imageWithCVPixelBuffer:buffer]; + CVPixelBufferRef buffer = CMSampleBufferGetImageBuffer(sampleBuffer); + CIImage *ciImage = [CIImage imageWithCVPixelBuffer:buffer]; - CGImageRef cgImage = [_context createCGImage:ciImage fromRect:CGRectMake(0, 0, CVPixelBufferGetWidth(buffer), CVPixelBufferGetHeight(buffer))]; + CGImageRef cgImage = [_context createCGImage:ciImage fromRect:CGRectMake(0, 0, CVPixelBufferGetWidth(buffer), CVPixelBufferGetHeight(buffer))]; - UIImage *image = [UIImage imageWithCGImage:cgImage]; + UIImage *image = [UIImage imageWithCGImage:cgImage]; - CGImageRelease(cgImage); - CFRelease(sampleBuffer); + CGImageRelease(cgImage); + CFRelease(sampleBuffer); - return image; + return image; } - (UIImage *)snapshotOfLastVideoBuffer { - return [self _imageFromSampleBufferHolder:_lastVideoBuffer]; + return [self _imageFromSampleBufferHolder:_lastVideoBuffer]; } - (void)capturePhoto:(void(^)(NSError*, UIImage*))completionHandler { - AVCaptureConnection *connection = [_photoOutput connectionWithMediaType:AVMediaTypeVideo]; - if (connection != nil) { - [_photoOutput captureStillImageAsynchronouslyFromConnection:connection completionHandler: - ^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) { - - if (imageDataSampleBuffer != nil && error == nil) { - NSData *jpegData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer]; - if (jpegData) { - UIImage *image = [UIImage imageWithData:jpegData]; - if (completionHandler != nil) { - completionHandler(nil, image); - } - } else { - if (completionHandler != nil) { - completionHandler([SCRecorder createError:@"Failed to create jpeg data"], nil); - } - } - } else { - if (completionHandler != nil) { - completionHandler(error, nil); - } - } - }]; - } else { - if (completionHandler != nil) { - completionHandler([SCRecorder createError:@"Camera session not started or Photo disabled"], nil); - } - } + AVCaptureConnection *connection = [_photoOutput connectionWithMediaType:AVMediaTypeVideo]; + if (connection != nil) { + [_photoOutput captureStillImageAsynchronouslyFromConnection:connection completionHandler: + ^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) { + + if (imageDataSampleBuffer != nil && error == nil) { + NSData *jpegData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer]; + if (jpegData) { + UIImage *image = [UIImage imageWithData:jpegData]; + if (completionHandler != nil) { + completionHandler(nil, image); + } + } else { + if (completionHandler != nil) { + completionHandler([SCRecorder createError:@"Failed to create jpeg data"], nil); + } + } + } else { + if (completionHandler != nil) { + completionHandler(error, nil); + } + } + }]; + } else { + if (completionHandler != nil) { + completionHandler([SCRecorder createError:@"Camera session not started or Photo disabled"], nil); + } + } } - (void)unprepare { - if (_captureSession != nil) { - for (AVCaptureDeviceInput *input in _captureSession.inputs) { - [_captureSession removeInput:input]; - if ([input.device hasMediaType:AVMediaTypeVideo]) { - [self removeVideoObservers:input.device]; - } - } - - for (AVCaptureOutput *output in _captureSession.outputs) { - [_captureSession removeOutput:output]; - } - - _previewLayer.session = nil; - _captureSession = nil; - } + if (_captureSession != nil) { + for (AVCaptureDeviceInput *input in _captureSession.inputs) { + [_captureSession removeInput:input]; + if ([input.device hasMediaType:AVMediaTypeVideo]) { + [self removeVideoObservers:input.device]; + } + } + + for (AVCaptureOutput *output in _captureSession.outputs) { + [_captureSession removeOutput:output]; + } + + _previewLayer.session = nil; + _captureSession = nil; + } [self _reconfigureSession:_isRecording]; } - (void)_progressTimerFired:(NSTimer *)progressTimer { - CMTime recordedDuration = _movieOutput.recordedDuration; + CMTime recordedDuration = _movieOutput.recordedDuration; - if (CMTIME_COMPARE_INLINE(recordedDuration, !=, _lastMovieFileOutputTime)) { - SCRecordSession *recordSession = _session; - id delegate = self.delegate; + if (CMTIME_COMPARE_INLINE(recordedDuration, !=, _lastMovieFileOutputTime)) { + SCRecordSession *recordSession = _session; + id delegate = self.delegate; - if (recordSession != nil) { + if (recordSession != nil) { __weak typeof(self) wSelf = self; - if ([delegate respondsToSelector:@selector(recorder:didAppendVideoSampleBufferInSession:)]) { - dispatch_async(dispatch_get_main_queue(), ^{ - [delegate recorder:wSelf didAppendVideoSampleBufferInSession:recordSession]; - }); - } - if ([delegate respondsToSelector:@selector(recorder:didAppendAudioSampleBufferInSession:)]) { - dispatch_async(dispatch_get_main_queue(), ^{ - [delegate recorder:wSelf didAppendAudioSampleBufferInSession:wSelf.session]; - }); - } - } - } - - _lastMovieFileOutputTime = recordedDuration; + if ([delegate respondsToSelector:@selector(recorder:didAppendVideoSampleBufferInSession:)]) { + dispatch_async(dispatch_get_main_queue(), ^{ + [delegate recorder:wSelf didAppendVideoSampleBufferInSession:recordSession]; + }); + } + if ([delegate respondsToSelector:@selector(recorder:didAppendAudioSampleBufferInSession:)]) { + dispatch_async(dispatch_get_main_queue(), ^{ + [delegate recorder:wSelf didAppendAudioSampleBufferInSession:wSelf.session]; + }); + } + } + } + + _lastMovieFileOutputTime = recordedDuration; } - (void)prerecord { @@ -515,203 +517,203 @@ - (void)prerecord { - (void)record { _didCaptureFirstAudioBuffer = NO; __weak typeof(self) wSelf = self; - void (^block)(void) = ^{ + void (^block)(void) = ^{ typeof(self) internal = wSelf; - internal->_isRecording = YES; - if (internal->_movieOutput != nil && internal.session != nil) { - internal->_movieOutput.maxRecordedDuration = internal.maxRecordDuration; - [self beginRecordSegmentIfNeeded:internal.session]; - if (internal->_movieOutputProgressTimer == nil) { - internal->_movieOutputProgressTimer = [NSTimer scheduledTimerWithTimeInterval:1.0 / 60.0 target:self selector:@selector(_progressTimerFired:) userInfo:nil repeats:YES]; - } - } - }; - - if ([SCRecorder isSessionQueue]) { - block(); - } else { - dispatch_sync(_sessionQueue, block); - } + internal->_isRecording = YES; + if (internal->_movieOutput != nil && internal.session != nil) { + internal->_movieOutput.maxRecordedDuration = internal.maxRecordDuration; + [self beginRecordSegmentIfNeeded:internal.session]; + if (internal->_movieOutputProgressTimer == nil) { + internal->_movieOutputProgressTimer = [NSTimer scheduledTimerWithTimeInterval:1.0 / 60.0 target:self selector:@selector(_progressTimerFired:) userInfo:nil repeats:YES]; + } + } + }; + + if ([SCRecorder isSessionQueue]) { + block(); + } else { + dispatch_sync(_sessionQueue, block); + } } - (void)pause { - [self pause:nil]; + [self pause:nil]; } - (void)pause:(void(^)(void))completionHandler { - _isRecording = NO; + _isRecording = NO; __weak typeof(self) wSelf = self; - void (^block)(void) = ^{ + void (^block)(void) = ^{ typeof(self) iSelf = wSelf; - SCRecordSession *recordSession = iSelf->_session; - - if (recordSession != nil) { - if (recordSession.recordSegmentReady) { - NSDictionary *info = [iSelf _createSegmentInfo]; - if (recordSession.isUsingMovieFileOutput) { - [iSelf->_movieOutputProgressTimer invalidate]; - iSelf->_movieOutputProgressTimer = nil; - if ([recordSession endSegmentWithInfo:info completionHandler:nil]) { - iSelf->_pauseCompletionHandler = completionHandler; - } else { - dispatch_handler(completionHandler); - } - } else { - [recordSession endSegmentWithInfo:info completionHandler:^(SCRecordSessionSegment *segment, NSError *error) { - id delegate = iSelf.delegate; - if ([delegate respondsToSelector:@selector(recorder:didCompleteSegment:inSession:error:)]) { - [delegate recorder:self didCompleteSegment:segment inSession:recordSession error:error]; - } - if (completionHandler != nil) { - completionHandler(); - } - }]; - } - } else { - dispatch_handler(completionHandler); - } - } else { - dispatch_handler(completionHandler); - } - }; - - if ([SCRecorder isSessionQueue]) { - block(); - } else { - dispatch_async(_sessionQueue, block); - } + SCRecordSession *recordSession = iSelf->_session; + + if (recordSession != nil) { + if (recordSession.recordSegmentReady) { + NSDictionary *info = [iSelf _createSegmentInfo]; + if (recordSession.isUsingMovieFileOutput) { + [iSelf->_movieOutputProgressTimer invalidate]; + iSelf->_movieOutputProgressTimer = nil; + if ([recordSession endSegmentWithInfo:info completionHandler:nil]) { + iSelf->_pauseCompletionHandler = completionHandler; + } else { + dispatch_handler(completionHandler); + } + } else { + [recordSession endSegmentWithInfo:info completionHandler:^(SCRecordSessionSegment *segment, NSError *error) { + id delegate = iSelf.delegate; + if ([delegate respondsToSelector:@selector(recorder:didCompleteSegment:inSession:error:)]) { + [delegate recorder:self didCompleteSegment:segment inSession:recordSession error:error]; + } + if (completionHandler != nil) { + completionHandler(); + } + }]; + } + } else { + dispatch_handler(completionHandler); + } + } else { + dispatch_handler(completionHandler); + } + }; + + if ([SCRecorder isSessionQueue]) { + block(); + } else { + dispatch_async(_sessionQueue, block); + } } + (NSError*)createError:(NSString*)errorDescription { - return [NSError errorWithDomain:@"SCRecorder" code:200 userInfo:@{NSLocalizedDescriptionKey : errorDescription}]; + return [NSError errorWithDomain:@"SCRecorder" code:200 userInfo:@{NSLocalizedDescriptionKey : errorDescription}]; } - (void)beginRecordSegmentIfNeeded:(SCRecordSession *)recordSession { - if (!recordSession.recordSegmentBegan) { - NSError *error = nil; - BOOL beginSegment = YES; - if (_movieOutput != nil && self.fastRecordMethodEnabled) { - if (recordSession.recordSegmentReady || !recordSession.isUsingMovieFileOutput) { - [recordSession beginRecordSegmentUsingMovieFileOutput:_movieOutput error:&error delegate:self]; - } else { - beginSegment = NO; - } - } else { - [recordSession beginSegment:&error]; - } - - id delegate = self.delegate; - if (beginSegment && [delegate respondsToSelector:@selector(recorder:didBeginSegmentInSession:error:)]) { - dispatch_async(dispatch_get_main_queue(), ^{ - [delegate recorder:self didBeginSegmentInSession:recordSession error:error]; - }); - } - } + if (!recordSession.recordSegmentBegan) { + NSError *error = nil; + BOOL beginSegment = YES; + if (_movieOutput != nil && self.fastRecordMethodEnabled) { + if (recordSession.recordSegmentReady || !recordSession.isUsingMovieFileOutput) { + [recordSession beginRecordSegmentUsingMovieFileOutput:_movieOutput error:&error delegate:self]; + } else { + beginSegment = NO; + } + } else { + [recordSession beginSegment:&error]; + } + + id delegate = self.delegate; + if (beginSegment && [delegate respondsToSelector:@selector(recorder:didBeginSegmentInSession:error:)]) { + dispatch_async(dispatch_get_main_queue(), ^{ + [delegate recorder:self didBeginSegmentInSession:recordSession error:error]; + }); + } + } } - (void)checkRecordSessionDuration:(SCRecordSession *)recordSession { - CMTime currentRecordDuration = recordSession.duration; - CMTime suggestedMaxRecordDuration = _maxRecordDuration; - - if (CMTIME_IS_VALID(suggestedMaxRecordDuration)) { - if (CMTIME_COMPARE_INLINE(currentRecordDuration, >=, suggestedMaxRecordDuration)) { - _isRecording = NO; - - dispatch_async(_sessionQueue, ^{ - [recordSession endSegmentWithInfo:[self _createSegmentInfo] completionHandler:^(SCRecordSessionSegment *segment, NSError *error) { - id delegate = self.delegate; - if ([delegate respondsToSelector:@selector(recorder:didCompleteSegment:inSession:error:)]) { - [delegate recorder:self didCompleteSegment:segment inSession:recordSession error:error]; - } + CMTime currentRecordDuration = recordSession.duration; + CMTime suggestedMaxRecordDuration = _maxRecordDuration; + + if (CMTIME_IS_VALID(suggestedMaxRecordDuration)) { + if (CMTIME_COMPARE_INLINE(currentRecordDuration, >=, suggestedMaxRecordDuration)) { + _isRecording = NO; + + dispatch_async(_sessionQueue, ^{ + [recordSession endSegmentWithInfo:[self _createSegmentInfo] completionHandler:^(SCRecordSessionSegment *segment, NSError *error) { + id delegate = self.delegate; + if ([delegate respondsToSelector:@selector(recorder:didCompleteSegment:inSession:error:)]) { + [delegate recorder:self didCompleteSegment:segment inSession:recordSession error:error]; + } - if ([delegate respondsToSelector:@selector(recorder:didCompleteSession:)]) { - [delegate recorder:self didCompleteSession:recordSession]; - } - }]; - }); - } - } + if ([delegate respondsToSelector:@selector(recorder:didCompleteSession:)]) { + [delegate recorder:self didCompleteSession:recordSession]; + } + }]; + }); + } + } } - (CMTime)frameDurationFromConnection:(AVCaptureConnection *)connection { - AVCaptureDevice *device = [self currentVideoDeviceInput].device; + AVCaptureDevice *device = [self currentVideoDeviceInput].device; - if ([device respondsToSelector:@selector(activeVideoMaxFrameDuration)]) { - return device.activeVideoMinFrameDuration; - } + if ([device respondsToSelector:@selector(activeVideoMaxFrameDuration)]) { + return device.activeVideoMinFrameDuration; + } #pragma clang diagnostic push #pragma clang diagnostic ignored "-Wdeprecated-declarations" - return connection.videoMinFrameDuration; + return connection.videoMinFrameDuration; #pragma clang diagnostic pop } - (SCFilter *)_transformFilterUsingBufferWidth:(size_t)bufferWidth bufferHeight:(size_t)bufferHeight mirrored:(BOOL)mirrored { - if (_transformFilter == nil || _transformFilterBufferWidth != bufferWidth || _transformFilterBufferHeight != bufferHeight) { - BOOL shouldMirrorBuffer = _keepMirroringOnWrite && mirrored; + if (_transformFilter == nil || _transformFilterBufferWidth != bufferWidth || _transformFilterBufferHeight != bufferHeight) { + BOOL shouldMirrorBuffer = _keepMirroringOnWrite && mirrored; - if (!shouldMirrorBuffer) { - _transformFilter = nil; - } else { - CGAffineTransform tx = CGAffineTransformIdentity; + if (!shouldMirrorBuffer) { + _transformFilter = nil; + } else { + CGAffineTransform tx = CGAffineTransformIdentity; - _transformFilter = [SCFilter filterWithAffineTransform:CGAffineTransformTranslate(CGAffineTransformScale(tx, -1, 1), -(CGFloat)bufferWidth, 0)]; - } + _transformFilter = [SCFilter filterWithAffineTransform:CGAffineTransformTranslate(CGAffineTransformScale(tx, -1, 1), -(CGFloat)bufferWidth, 0)]; + } - _transformFilterBufferWidth = bufferWidth; - _transformFilterBufferHeight = bufferHeight; - } + _transformFilterBufferWidth = bufferWidth; + _transformFilterBufferHeight = bufferHeight; + } - return _transformFilter; + return _transformFilter; } - (void)appendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer toRecordSession:(SCRecordSession *)recordSession duration:(CMTime)duration connection:(AVCaptureConnection *)connection completion:(void(^)(BOOL success))completion { - CVPixelBufferRef sampleBufferImage = CMSampleBufferGetImageBuffer(sampleBuffer); + CVPixelBufferRef sampleBufferImage = CMSampleBufferGetImageBuffer(sampleBuffer); - size_t bufferWidth = (CGFloat)CVPixelBufferGetWidth(sampleBufferImage); - size_t bufferHeight = (CGFloat)CVPixelBufferGetHeight(sampleBufferImage); + size_t bufferWidth = (CGFloat)CVPixelBufferGetWidth(sampleBufferImage); + size_t bufferHeight = (CGFloat)CVPixelBufferGetHeight(sampleBufferImage); - CMTime time = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); - SCFilter *filterGroup = _videoConfiguration.filter; - SCFilter *transformFilter = [self _transformFilterUsingBufferWidth:bufferWidth bufferHeight:bufferHeight mirrored: - _device == AVCaptureDevicePositionFront - ]; + CMTime time = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); + SCFilter *filterGroup = _videoConfiguration.filter; + SCFilter *transformFilter = [self _transformFilterUsingBufferWidth:bufferWidth bufferHeight:bufferHeight mirrored: + _device == AVCaptureDevicePositionFront + ]; - if (filterGroup == nil && transformFilter == nil) { - [recordSession appendVideoPixelBuffer:sampleBufferImage atTime:time duration:duration completion:completion]; - return; - } + if (filterGroup == nil && transformFilter == nil) { + [recordSession appendVideoPixelBuffer:sampleBufferImage atTime:time duration:duration completion:completion]; + return; + } - CVPixelBufferRef pixelBuffer = [recordSession createPixelBuffer]; + CVPixelBufferRef pixelBuffer = [recordSession createPixelBuffer]; - if (pixelBuffer == nil) { - completion(NO); - return; - } + if (pixelBuffer == nil) { + completion(NO); + return; + } - CIImage *image = [CIImage imageWithCVPixelBuffer:sampleBufferImage]; - CFTimeInterval seconds = CMTimeGetSeconds(time); + CIImage *image = [CIImage imageWithCVPixelBuffer:sampleBufferImage]; + CFTimeInterval seconds = CMTimeGetSeconds(time); - if (transformFilter != nil) { - image = [transformFilter imageByProcessingImage:image atTime:seconds]; - } + if (transformFilter != nil) { + image = [transformFilter imageByProcessingImage:image atTime:seconds]; + } - if (filterGroup != nil) { - image = [filterGroup imageByProcessingImage:image atTime:seconds]; - } + if (filterGroup != nil) { + image = [filterGroup imageByProcessingImage:image atTime:seconds]; + } - CVPixelBufferLockBaseAddress(pixelBuffer, 0); + CVPixelBufferLockBaseAddress(pixelBuffer, 0); - [_context render:image toCVPixelBuffer:pixelBuffer]; + [_context render:image toCVPixelBuffer:pixelBuffer]; - [recordSession appendVideoPixelBuffer:pixelBuffer atTime:time duration:duration completion:^(BOOL success) { - CVPixelBufferUnlockBaseAddress(pixelBuffer, 0); + [recordSession appendVideoPixelBuffer:pixelBuffer atTime:time duration:duration completion:^(BOOL success) { + CVPixelBufferUnlockBaseAddress(pixelBuffer, 0); - CVPixelBufferRelease(pixelBuffer); + CVPixelBufferRelease(pixelBuffer); - completion(success); - }]; + completion(success); + }]; } - (void)captureOutput:(AVCaptureFileOutput *)captureOutput didStartRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections { @@ -727,17 +729,17 @@ - (void)captureOutput:(AVCaptureFileOutput *)captureOutput didStartRecordingToOu } - (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error { - _isRecording = NO; + _isRecording = NO; __weak typeof(self) wSelf = self; - dispatch_async(_sessionQueue, ^{ + dispatch_async(_sessionQueue, ^{ typeof(self) iSelf = wSelf; - BOOL hasComplete = NO; - NSError *actualError = error; - if ([actualError.localizedDescription isEqualToString:@"Recording Stopped"]) { - actualError = nil; - hasComplete = YES; - } + BOOL hasComplete = NO; + NSError *actualError = error; + if ([actualError.localizedDescription isEqualToString:@"Recording Stopped"]) { + actualError = nil; + hasComplete = YES; + } [iSelf->_session appendRecordSegmentUrl:outputFileURL info:[iSelf _createSegmentInfo] error:actualError completionHandler:^(SCRecordSessionSegment *segment, NSError *error) { void (^pauseCompletionHandler)(void) = iSelf->_pauseCompletionHandler; @@ -763,48 +765,48 @@ - (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToO } }]; - if (iSelf->_isRecording) { - [iSelf record]; - } - }); + if (iSelf->_isRecording) { + [iSelf record]; + } + }); } - (void)_handleVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer withSession:(SCRecordSession *)recordSession connection:(AVCaptureConnection *)connection { - if (!recordSession.videoInitializationFailed && !_videoConfiguration.shouldIgnore) { - if (!recordSession.videoInitialized) { - NSError *error = nil; - NSDictionary *settings = [self.videoConfiguration createAssetWriterOptionsUsingSampleBuffer:sampleBuffer]; + if (!recordSession.videoInitializationFailed && !_videoConfiguration.shouldIgnore) { + if (!recordSession.videoInitialized) { + NSError *error = nil; + NSDictionary *settings = [self.videoConfiguration createAssetWriterOptionsUsingSampleBuffer:sampleBuffer]; - CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer); - [recordSession initializeVideo:settings formatDescription:formatDescription error:&error]; + CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer); + [recordSession initializeVideo:settings formatDescription:formatDescription error:&error]; // NSLog(@"INITIALIZED VIDEO"); - id delegate = self.delegate; - if ([delegate respondsToSelector:@selector(recorder:didInitializeVideoInSession:error:)]) { - dispatch_async(dispatch_get_main_queue(), ^{ - [delegate recorder:self didInitializeVideoInSession:recordSession error:error]; - }); - } - } - - if (!self.audioEnabledAndReady || recordSession.audioInitialized || recordSession.audioInitializationFailed) { - [self beginRecordSegmentIfNeeded:recordSession]; - - if (_isRecording && recordSession.recordSegmentReady) { - id delegate = self.delegate; - CMTime duration = [self frameDurationFromConnection:connection]; - - double timeToWait = kMinTimeBetweenAppend - (CACurrentMediaTime() - _lastAppendedVideoTime); - - if (timeToWait > 0) { - // Letting some time to for the AVAssetWriter to be ready - // NSLog(@"Too fast! Waiting %fs", timeToWait); - [NSThread sleepForTimeInterval:timeToWait]; - } - BOOL isFirstVideoBuffer = !recordSession.currentSegmentHasVideo; + id delegate = self.delegate; + if ([delegate respondsToSelector:@selector(recorder:didInitializeVideoInSession:error:)]) { + dispatch_async(dispatch_get_main_queue(), ^{ + [delegate recorder:self didInitializeVideoInSession:recordSession error:error]; + }); + } + } + + if (!self.audioEnabledAndReady || recordSession.audioInitialized || recordSession.audioInitializationFailed) { + [self beginRecordSegmentIfNeeded:recordSession]; + + if (_isRecording && recordSession.recordSegmentReady) { + id delegate = self.delegate; + CMTime duration = [self frameDurationFromConnection:connection]; + + double timeToWait = kMinTimeBetweenAppend - (CACurrentMediaTime() - _lastAppendedVideoTime); + + if (timeToWait > 0) { + // Letting some time to for the AVAssetWriter to be ready + // NSLog(@"Too fast! Waiting %fs", timeToWait); + [NSThread sleepForTimeInterval:timeToWait]; + } + BOOL isFirstVideoBuffer = !recordSession.currentSegmentHasVideo; // NSLog(@"APPENDING"); __weak typeof(self) wSelf = self; - [self appendVideoSampleBuffer:sampleBuffer + [self appendVideoSampleBuffer:sampleBuffer toRecordSession:recordSession duration:duration connection:connection @@ -828,71 +830,71 @@ - (void)_handleVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer withSession:(SC } }]; - if (isFirstVideoBuffer && !recordSession.currentSegmentHasAudio) { - CMSampleBufferRef audioBuffer = _lastAudioBuffer.sampleBuffer; - if (audioBuffer != nil) { - CMTime lastAudioEndTime = CMTimeAdd(CMSampleBufferGetPresentationTimeStamp(audioBuffer), CMSampleBufferGetDuration(audioBuffer)); + if (isFirstVideoBuffer && !recordSession.currentSegmentHasAudio) { + CMSampleBufferRef audioBuffer = _lastAudioBuffer.sampleBuffer; + if (audioBuffer != nil) { + CMTime lastAudioEndTime = CMTimeAdd(CMSampleBufferGetPresentationTimeStamp(audioBuffer), CMSampleBufferGetDuration(audioBuffer)); CMTime videoStartTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); - // If the end time of the last audio buffer is after this video buffer, we need to re-use it, - // since it was skipped on the last cycle to wait until the video becomes ready. - if (CMTIME_COMPARE_INLINE(lastAudioEndTime, >, videoStartTime)) { + // If the end time of the last audio buffer is after this video buffer, we need to re-use it, + // since it was skipped on the last cycle to wait until the video becomes ready. + if (CMTIME_COMPARE_INLINE(lastAudioEndTime, >, videoStartTime)) { [self _handleAudioSampleBuffer:audioBuffer withSession:recordSession]; - } - } - } - } - } else { + } + } + } + } + } else { // NSLog(@"SKIPPING"); - } - } + } + } } - (void)_handleAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer withSession:(SCRecordSession *)recordSession { - if (!recordSession.audioInitializationFailed && !_audioConfiguration.shouldIgnore) { - if (!recordSession.audioInitialized) { - NSError *error = nil; - NSDictionary *settings = [self.audioConfiguration createAssetWriterOptionsUsingSampleBuffer:sampleBuffer]; - CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer); - [recordSession initializeAudio:settings formatDescription:formatDescription error:&error]; + if (!recordSession.audioInitializationFailed && !_audioConfiguration.shouldIgnore) { + if (!recordSession.audioInitialized) { + NSError *error = nil; + NSDictionary *settings = [self.audioConfiguration createAssetWriterOptionsUsingSampleBuffer:sampleBuffer]; + CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer); + [recordSession initializeAudio:settings formatDescription:formatDescription error:&error]; // NSLog(@"INITIALIZED AUDIO"); - id delegate = self.delegate; - if ([delegate respondsToSelector:@selector(recorder:didInitializeAudioInSession:error:)]) { - dispatch_async(dispatch_get_main_queue(), ^{ - [delegate recorder:self didInitializeAudioInSession:recordSession error:error]; - }); - } - } + id delegate = self.delegate; + if ([delegate respondsToSelector:@selector(recorder:didInitializeAudioInSession:error:)]) { + dispatch_async(dispatch_get_main_queue(), ^{ + [delegate recorder:self didInitializeAudioInSession:recordSession error:error]; + }); + } + } - if (!self.videoEnabledAndReady || recordSession.videoInitialized || recordSession.videoInitializationFailed) { - [self beginRecordSegmentIfNeeded:recordSession]; + if (!self.videoEnabledAndReady || recordSession.videoInitialized || recordSession.videoInitializationFailed) { + [self beginRecordSegmentIfNeeded:recordSession]; - if (_isRecording && recordSession.recordSegmentReady && (!self.videoEnabledAndReady || recordSession.currentSegmentHasVideo)) { - id delegate = self.delegate; + if (_isRecording && recordSession.recordSegmentReady && (!self.videoEnabledAndReady || recordSession.currentSegmentHasVideo)) { + id delegate = self.delegate; // NSLog(@"APPENDING"); - [recordSession appendAudioSampleBuffer:sampleBuffer completion:^(BOOL success) { - if (success) { - if ([delegate respondsToSelector:@selector(recorder:didAppendAudioSampleBufferInSession:)]) { - dispatch_async(dispatch_get_main_queue(), ^{ - [delegate recorder:self didAppendAudioSampleBufferInSession:recordSession]; - }); - } - - [self checkRecordSessionDuration:recordSession]; - } else { - if ([delegate respondsToSelector:@selector(recorder:didSkipAudioSampleBufferInSession:)]) { - dispatch_async(dispatch_get_main_queue(), ^{ - [delegate recorder:self didSkipAudioSampleBufferInSession:recordSession]; - }); - } - } - }]; - } else { + [recordSession appendAudioSampleBuffer:sampleBuffer completion:^(BOOL success) { + if (success) { + if ([delegate respondsToSelector:@selector(recorder:didAppendAudioSampleBufferInSession:)]) { + dispatch_async(dispatch_get_main_queue(), ^{ + [delegate recorder:self didAppendAudioSampleBufferInSession:recordSession]; + }); + } + + [self checkRecordSessionDuration:recordSession]; + } else { + if ([delegate respondsToSelector:@selector(recorder:didSkipAudioSampleBufferInSession:)]) { + dispatch_async(dispatch_get_main_queue(), ^{ + [delegate recorder:self didSkipAudioSampleBufferInSession:recordSession]; + }); + } + } + }]; + } else { // NSLog(@"SKIPPING"); - } - } + } + } if (hasDidAcquireAudioBuffer) { CMTime sampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); @@ -933,7 +935,7 @@ - (void)_handleAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer withSession:(SC free(data); }); } - } + } } - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection { @@ -1003,519 +1005,519 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CM } - (NSDictionary *)_createSegmentInfo { - id delegate = self.delegate; - NSDictionary *segmentInfo = nil; + id delegate = self.delegate; + NSDictionary *segmentInfo = nil; - if ([delegate respondsToSelector:@selector(createSegmentInfoForRecorder:)]) { - segmentInfo = [delegate createSegmentInfoForRecorder:self]; - } + if ([delegate respondsToSelector:@selector(createSegmentInfoForRecorder:)]) { + segmentInfo = [delegate createSegmentInfoForRecorder:self]; + } - return segmentInfo; + return segmentInfo; } - (void)_focusDidComplete { - id delegate = self.delegate; + id delegate = self.delegate; - [self setAdjustingFocus:NO]; + [self setAdjustingFocus:NO]; - if ([delegate respondsToSelector:@selector(recorderDidEndFocus:)]) { - [delegate recorderDidEndFocus:self]; - } + if ([delegate respondsToSelector:@selector(recorderDidEndFocus:)]) { + [delegate recorderDidEndFocus:self]; + } - if (_needsSwitchBackToContinuousFocus) { - _needsSwitchBackToContinuousFocus = NO; - [self continuousFocusAtPoint:self.focusPointOfInterest]; - } + if (_needsSwitchBackToContinuousFocus) { + _needsSwitchBackToContinuousFocus = NO; + [self continuousFocusAtPoint:self.focusPointOfInterest]; + } } - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context { - id delegate = self.delegate; - - if (context == SCRecorderFocusContext) { - BOOL isFocusing = [[change objectForKey:NSKeyValueChangeNewKey] boolValue]; - if (isFocusing) { - [self setAdjustingFocus:YES]; - - if ([delegate respondsToSelector:@selector(recorderDidStartFocus:)]) { - [delegate recorderDidStartFocus:self]; - } - } else { - [self _focusDidComplete]; - } - } else if (context == SCRecorderExposureContext) { - BOOL isAdjustingExposure = [[change objectForKey:NSKeyValueChangeNewKey] boolValue]; - - [self setAdjustingExposure:isAdjustingExposure]; - - if (isAdjustingExposure) { - if ([delegate respondsToSelector:@selector(recorderDidStartAdjustingExposure:)]) { - [delegate recorderDidStartAdjustingExposure:self]; - } - } else { - if ([delegate respondsToSelector:@selector(recorderDidEndAdjustingExposure:)]) { - [delegate recorderDidEndAdjustingExposure:self]; - } - } - } else if (context == SCRecorderAudioEnabledContext) { - if ([NSThread isMainThread]) { - [self reconfigureVideoInput:NO audioInput:YES]; - } else { - dispatch_sync(dispatch_get_main_queue(), ^{ - [self reconfigureVideoInput:NO audioInput:YES]; - }); - } - } else if (context == SCRecorderVideoEnabledContext) { - if ([NSThread isMainThread]) { - [self reconfigureVideoInput:YES audioInput:NO]; - } else { - dispatch_sync(dispatch_get_main_queue(), ^{ - [self reconfigureVideoInput:YES audioInput:NO]; - }); - } - } else if (context == SCRecorderPhotoOptionsContext) { - _photoOutput.outputSettings = [_photoConfiguration createOutputSettings]; - } + id delegate = self.delegate; + + if (context == SCRecorderFocusContext) { + BOOL isFocusing = [[change objectForKey:NSKeyValueChangeNewKey] boolValue]; + if (isFocusing) { + [self setAdjustingFocus:YES]; + + if ([delegate respondsToSelector:@selector(recorderDidStartFocus:)]) { + [delegate recorderDidStartFocus:self]; + } + } else { + [self _focusDidComplete]; + } + } else if (context == SCRecorderExposureContext) { + BOOL isAdjustingExposure = [[change objectForKey:NSKeyValueChangeNewKey] boolValue]; + + [self setAdjustingExposure:isAdjustingExposure]; + + if (isAdjustingExposure) { + if ([delegate respondsToSelector:@selector(recorderDidStartAdjustingExposure:)]) { + [delegate recorderDidStartAdjustingExposure:self]; + } + } else { + if ([delegate respondsToSelector:@selector(recorderDidEndAdjustingExposure:)]) { + [delegate recorderDidEndAdjustingExposure:self]; + } + } + } else if (context == SCRecorderAudioEnabledContext) { + if ([NSThread isMainThread]) { + [self reconfigureVideoInput:NO audioInput:YES]; + } else { + dispatch_sync(dispatch_get_main_queue(), ^{ + [self reconfigureVideoInput:NO audioInput:YES]; + }); + } + } else if (context == SCRecorderVideoEnabledContext) { + if ([NSThread isMainThread]) { + [self reconfigureVideoInput:YES audioInput:NO]; + } else { + dispatch_sync(dispatch_get_main_queue(), ^{ + [self reconfigureVideoInput:YES audioInput:NO]; + }); + } + } else if (context == SCRecorderPhotoOptionsContext) { + _photoOutput.outputSettings = [_photoConfiguration createOutputSettings]; + } } - (void)addVideoObservers:(AVCaptureDevice*)videoDevice { - [videoDevice addObserver:self forKeyPath:@"adjustingFocus" options:NSKeyValueObservingOptionNew context:SCRecorderFocusContext]; - [videoDevice addObserver:self forKeyPath:@"adjustingExposure" options:NSKeyValueObservingOptionNew context:SCRecorderExposureContext]; + [videoDevice addObserver:self forKeyPath:@"adjustingFocus" options:NSKeyValueObservingOptionNew context:SCRecorderFocusContext]; + [videoDevice addObserver:self forKeyPath:@"adjustingExposure" options:NSKeyValueObservingOptionNew context:SCRecorderExposureContext]; } - (void)removeVideoObservers:(AVCaptureDevice*)videoDevice { - [videoDevice removeObserver:self forKeyPath:@"adjustingFocus"]; - [videoDevice removeObserver:self forKeyPath:@"adjustingExposure"]; + [videoDevice removeObserver:self forKeyPath:@"adjustingFocus"]; + [videoDevice removeObserver:self forKeyPath:@"adjustingExposure"]; } - (void)_configureVideoStabilization { - AVCaptureConnection *videoConnection = [self videoConnection]; - if ([videoConnection isVideoStabilizationSupported]) { - if ([videoConnection respondsToSelector:@selector(setPreferredVideoStabilizationMode:)]) { - videoConnection.preferredVideoStabilizationMode = _videoStabilizationMode; - } - } + AVCaptureConnection *videoConnection = [self videoConnection]; + if ([videoConnection isVideoStabilizationSupported]) { + if ([videoConnection respondsToSelector:@selector(setPreferredVideoStabilizationMode:)]) { + videoConnection.preferredVideoStabilizationMode = _videoStabilizationMode; + } + } } - (void)_configureFrontCameraMirroring:(BOOL)videoMirrored { - AVCaptureConnection *videoConnection = [self videoConnection]; - if ([videoConnection isVideoMirroringSupported]) { - if ([videoConnection respondsToSelector:@selector(setVideoMirrored:)]) { - videoConnection.videoMirrored = videoMirrored; - } - } + AVCaptureConnection *videoConnection = [self videoConnection]; + if ([videoConnection isVideoMirroringSupported]) { + if ([videoConnection respondsToSelector:@selector(setVideoMirrored:)]) { + videoConnection.videoMirrored = videoMirrored; + } + } } - (void)configureDevice:(AVCaptureDevice*)newDevice mediaType:(NSString*)mediaType error:(NSError**)error { - AVCaptureDeviceInput *currentInput = [self currentDeviceInputForMediaType:mediaType]; - AVCaptureDevice *currentUsedDevice = currentInput.device; - - if (currentUsedDevice != newDevice) { - if ([mediaType isEqualToString:AVMediaTypeVideo]) { - NSError *error; - if ([newDevice lockForConfiguration:&error]) { - if (newDevice.isSmoothAutoFocusSupported) { - newDevice.smoothAutoFocusEnabled = YES; - } - newDevice.subjectAreaChangeMonitoringEnabled = true; - - if (newDevice.isLowLightBoostSupported) { - newDevice.automaticallyEnablesLowLightBoostWhenAvailable = YES; - } - [newDevice unlockForConfiguration]; - } else { - NSLog(@"Failed to configure device: %@", error); - } - _videoInputAdded = NO; - } else { - _audioInputAdded = NO; - } - - AVCaptureDeviceInput *newInput = nil; - - if (newDevice != nil) { - newInput = [[AVCaptureDeviceInput alloc] initWithDevice:newDevice error:error]; - } - - if (*error == nil) { - if (currentInput != nil) { - [_captureSession removeInput:currentInput]; - if ([currentInput.device hasMediaType:AVMediaTypeVideo]) { - [self removeVideoObservers:currentInput.device]; - } - } - - if (newInput != nil) { - if ([_captureSession canAddInput:newInput]) { - [_captureSession addInput:newInput]; - if ([newInput.device hasMediaType:AVMediaTypeVideo]) { - _videoInputAdded = YES; - - [self addVideoObservers:newInput.device]; - [self _configureVideoStabilization]; - [self _configureFrontCameraMirroring:_mirrorOnFrontCamera && newInput.device.position == AVCaptureDevicePositionFront]; - - } else { - _audioInputAdded = YES; - } - } else { - *error = [SCRecorder createError:@"Failed to add input to capture session"]; - } - } - } - } + AVCaptureDeviceInput *currentInput = [self currentDeviceInputForMediaType:mediaType]; + AVCaptureDevice *currentUsedDevice = currentInput.device; + + if (currentUsedDevice != newDevice) { + if ([mediaType isEqualToString:AVMediaTypeVideo]) { + NSError *error; + if ([newDevice lockForConfiguration:&error]) { + if (newDevice.isSmoothAutoFocusSupported) { + newDevice.smoothAutoFocusEnabled = YES; + } + newDevice.subjectAreaChangeMonitoringEnabled = true; + + if (newDevice.isLowLightBoostSupported) { + newDevice.automaticallyEnablesLowLightBoostWhenAvailable = YES; + } + [newDevice unlockForConfiguration]; + } else { + NSLog(@"Failed to configure device: %@", error); + } + _videoInputAdded = NO; + } else { + _audioInputAdded = NO; + } + + AVCaptureDeviceInput *newInput = nil; + + if (newDevice != nil) { + newInput = [[AVCaptureDeviceInput alloc] initWithDevice:newDevice error:error]; + } + + if (*error == nil) { + if (currentInput != nil) { + [_captureSession removeInput:currentInput]; + if ([currentInput.device hasMediaType:AVMediaTypeVideo]) { + [self removeVideoObservers:currentInput.device]; + } + } + + if (newInput != nil) { + if ([_captureSession canAddInput:newInput]) { + [_captureSession addInput:newInput]; + if ([newInput.device hasMediaType:AVMediaTypeVideo]) { + _videoInputAdded = YES; + + [self addVideoObservers:newInput.device]; + [self _configureVideoStabilization]; + [self _configureFrontCameraMirroring:_mirrorOnFrontCamera && newInput.device.position == AVCaptureDevicePositionFront]; + + } else { + _audioInputAdded = YES; + } + } else { + *error = [SCRecorder createError:@"Failed to add input to capture session"]; + } + } + } + } } - (void)reconfigureVideoInput:(BOOL)shouldConfigureVideo audioInput:(BOOL)shouldConfigureAudio { - if (_reconfiguring) { - return; - } - - if (_captureSession != nil) { - _reconfiguring = YES; - [self beginConfiguration]; - - NSError *videoError = nil; - if (shouldConfigureVideo) { - [self configureDevice:[self videoDevice] mediaType:AVMediaTypeVideo error:&videoError]; - _transformFilter = nil; - dispatch_sync(_sessionQueue, ^{ - [self updateVideoOrientation]; - }); - } - - NSError *audioError = nil; - - if (shouldConfigureAudio) { - [self configureDevice:[self audioDevice] mediaType:AVMediaTypeAudio error:&audioError]; - } - - [self commitConfiguration]; - - id delegate = self.delegate; - if (shouldConfigureAudio) { - if ([delegate respondsToSelector:@selector(recorder:didReconfigureAudioInput:)]) { - [delegate recorder:self didReconfigureAudioInput:audioError]; - } - } - if (shouldConfigureVideo) { - if ([delegate respondsToSelector:@selector(recorder:didReconfigureVideoInput:)]) { - [delegate recorder:self didReconfigureVideoInput:videoError]; - } - } - - _reconfiguring = NO; - } + if (_reconfiguring) { + return; + } + + if (_captureSession != nil) { + _reconfiguring = YES; + [self beginConfiguration]; + + NSError *videoError = nil; + if (shouldConfigureVideo) { + [self configureDevice:[self videoDevice] mediaType:AVMediaTypeVideo error:&videoError]; + _transformFilter = nil; + dispatch_sync(_sessionQueue, ^{ + [self updateVideoOrientation]; + }); + } + + NSError *audioError = nil; + + if (shouldConfigureAudio) { + [self configureDevice:[self audioDevice] mediaType:AVMediaTypeAudio error:&audioError]; + } + + [self commitConfiguration]; + + id delegate = self.delegate; + if (shouldConfigureAudio) { + if ([delegate respondsToSelector:@selector(recorder:didReconfigureAudioInput:)]) { + [delegate recorder:self didReconfigureAudioInput:audioError]; + } + } + if (shouldConfigureVideo) { + if ([delegate respondsToSelector:@selector(recorder:didReconfigureVideoInput:)]) { + [delegate recorder:self didReconfigureVideoInput:videoError]; + } + } + + _reconfiguring = NO; + } } - (void)switchCaptureDevices { - if (self.device == AVCaptureDevicePositionBack) { - self.device = AVCaptureDevicePositionFront; - } else { - self.device = AVCaptureDevicePositionBack; - } + if (self.device == AVCaptureDevicePositionBack) { + self.device = AVCaptureDevicePositionFront; + } else { + self.device = AVCaptureDevicePositionBack; + } } - (void)previewViewFrameChanged { - _previewLayer.affineTransform = CGAffineTransformIdentity; - _previewLayer.frame = _previewView.bounds; + _previewLayer.affineTransform = CGAffineTransformIdentity; + _previewLayer.frame = _previewView.bounds; } #pragma mark - FOCUS - (CGPoint)convertToPointOfInterestFromViewCoordinates:(CGPoint)viewCoordinates { - return [self.previewLayer captureDevicePointOfInterestForPoint:viewCoordinates]; + return [self.previewLayer captureDevicePointOfInterestForPoint:viewCoordinates]; } - (CGPoint)convertPointOfInterestToViewCoordinates:(CGPoint)pointOfInterest { - return [self.previewLayer pointForCaptureDevicePointOfInterest:pointOfInterest]; + return [self.previewLayer pointForCaptureDevicePointOfInterest:pointOfInterest]; } - (void)mediaServicesWereReset:(NSNotification *)notification { - NSLog(@"MEDIA SERVICES WERE RESET"); + NSLog(@"MEDIA SERVICES WERE RESET"); } - (void)mediaServicesWereLost:(NSNotification *)notification { - NSLog(@"MEDIA SERVICES WERE LOST"); + NSLog(@"MEDIA SERVICES WERE LOST"); } - (void)sessionInterrupted:(NSNotification *)notification { - NSNumber *interruption = [notification.userInfo objectForKey:AVAudioSessionInterruptionOptionKey]; + NSNumber *interruption = [notification.userInfo objectForKey:AVAudioSessionInterruptionOptionKey]; - if (interruption != nil) { - AVAudioSessionInterruptionOptions options = interruption.unsignedIntValue; - if (options == AVAudioSessionInterruptionOptionShouldResume) { - [self reconfigureVideoInput:NO audioInput:self.audioConfiguration.enabled]; - } - } + if (interruption != nil) { + AVAudioSessionInterruptionOptions options = interruption.unsignedIntValue; + if (options == AVAudioSessionInterruptionOptionShouldResume) { + [self reconfigureVideoInput:NO audioInput:self.audioConfiguration.enabled]; + } + } } - (void)lockFocus { - AVCaptureDevice *device = [self.currentVideoDeviceInput device]; - if ([device isFocusModeSupported:AVCaptureFocusModeLocked]) { - NSError *error; - if ([device lockForConfiguration:&error]) { - [device setFocusMode:AVCaptureFocusModeLocked]; - [device unlockForConfiguration]; - } - } + AVCaptureDevice *device = [self.currentVideoDeviceInput device]; + if ([device isFocusModeSupported:AVCaptureFocusModeLocked]) { + NSError *error; + if ([device lockForConfiguration:&error]) { + [device setFocusMode:AVCaptureFocusModeLocked]; + [device unlockForConfiguration]; + } + } } - (void)_applyPointOfInterest:(CGPoint)point continuousMode:(BOOL)continuousMode { - AVCaptureDevice *device = [self.currentVideoDeviceInput device]; - AVCaptureFocusMode focusMode = continuousMode ? AVCaptureFocusModeContinuousAutoFocus : AVCaptureFocusModeAutoFocus; - AVCaptureExposureMode exposureMode = continuousMode ? AVCaptureExposureModeContinuousAutoExposure : AVCaptureExposureModeAutoExpose; - AVCaptureWhiteBalanceMode whiteBalanceMode = continuousMode ? AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance : AVCaptureWhiteBalanceModeAutoWhiteBalance; - - NSError *error; - if ([device lockForConfiguration:&error]) { - BOOL focusing = NO; - BOOL adjustingExposure = NO; - - if (device.isFocusPointOfInterestSupported) { - device.focusPointOfInterest = point; - } - if ([device isFocusModeSupported:focusMode]) { - device.focusMode = focusMode; - focusing = YES; - } + AVCaptureDevice *device = [self.currentVideoDeviceInput device]; + AVCaptureFocusMode focusMode = continuousMode ? AVCaptureFocusModeContinuousAutoFocus : AVCaptureFocusModeAutoFocus; + AVCaptureExposureMode exposureMode = continuousMode ? AVCaptureExposureModeContinuousAutoExposure : AVCaptureExposureModeAutoExpose; + AVCaptureWhiteBalanceMode whiteBalanceMode = continuousMode ? AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance : AVCaptureWhiteBalanceModeAutoWhiteBalance; + + NSError *error; + if ([device lockForConfiguration:&error]) { + BOOL focusing = NO; + BOOL adjustingExposure = NO; + + if (device.isFocusPointOfInterestSupported) { + device.focusPointOfInterest = point; + } + if ([device isFocusModeSupported:focusMode]) { + device.focusMode = focusMode; + focusing = YES; + } - if (device.isExposurePointOfInterestSupported) { - device.exposurePointOfInterest = point; - } + if (device.isExposurePointOfInterestSupported) { + device.exposurePointOfInterest = point; + } - if ([device isExposureModeSupported:exposureMode]) { - device.exposureMode = exposureMode; - adjustingExposure = YES; - } + if ([device isExposureModeSupported:exposureMode]) { + device.exposureMode = exposureMode; + adjustingExposure = YES; + } - if ([device isWhiteBalanceModeSupported:whiteBalanceMode]) { - device.whiteBalanceMode = whiteBalanceMode; - } + if ([device isWhiteBalanceModeSupported:whiteBalanceMode]) { + device.whiteBalanceMode = whiteBalanceMode; + } - device.subjectAreaChangeMonitoringEnabled = !continuousMode; + device.subjectAreaChangeMonitoringEnabled = !continuousMode; - [device unlockForConfiguration]; + [device unlockForConfiguration]; - id delegate = self.delegate; - if (focusMode != AVCaptureFocusModeContinuousAutoFocus && focusing) { - if ([delegate respondsToSelector:@selector(recorderWillStartFocus:)]) { - [delegate recorderWillStartFocus:self]; - } + id delegate = self.delegate; + if (focusMode != AVCaptureFocusModeContinuousAutoFocus && focusing) { + if ([delegate respondsToSelector:@selector(recorderWillStartFocus:)]) { + [delegate recorderWillStartFocus:self]; + } - [self setAdjustingFocus:YES]; - } + [self setAdjustingFocus:YES]; + } - if (exposureMode != AVCaptureExposureModeContinuousAutoExposure && adjustingExposure) { - [self setAdjustingExposure:YES]; + if (exposureMode != AVCaptureExposureModeContinuousAutoExposure && adjustingExposure) { + [self setAdjustingExposure:YES]; - if ([delegate respondsToSelector:@selector(recorderWillStartAdjustingExposure:)]) { - [delegate recorderWillStartAdjustingExposure:self]; - } - } - } + if ([delegate respondsToSelector:@selector(recorderWillStartAdjustingExposure:)]) { + [delegate recorderWillStartAdjustingExposure:self]; + } + } + } } // Perform an auto focus at the specified point. The focus mode will automatically change to locked once the auto focus is complete. - (void)autoFocusAtPoint:(CGPoint)point { - [self _applyPointOfInterest:point continuousMode:NO]; + [self _applyPointOfInterest:point continuousMode:NO]; } // Switch to continuous auto focus mode at the specified point - (void)continuousFocusAtPoint:(CGPoint)point { - [self _applyPointOfInterest:point continuousMode:YES]; + [self _applyPointOfInterest:point continuousMode:YES]; } - (void)focusCenter { - _needsSwitchBackToContinuousFocus = YES; - [self autoFocusAtPoint:CGPointMake(0.5, 0.5)]; + _needsSwitchBackToContinuousFocus = YES; + [self autoFocusAtPoint:CGPointMake(0.5, 0.5)]; } - (void)refocus { - _needsSwitchBackToContinuousFocus = YES; - [self autoFocusAtPoint:self.focusPointOfInterest]; + _needsSwitchBackToContinuousFocus = YES; + [self autoFocusAtPoint:self.focusPointOfInterest]; } - (CGPoint)exposurePointOfInterest { - return [self.currentVideoDeviceInput device].exposurePointOfInterest; + return [self.currentVideoDeviceInput device].exposurePointOfInterest; } - (BOOL)exposureSupported { - return [self.currentVideoDeviceInput device].isExposurePointOfInterestSupported; + return [self.currentVideoDeviceInput device].isExposurePointOfInterestSupported; } - (CGPoint)focusPointOfInterest { - return [self.currentVideoDeviceInput device].focusPointOfInterest; + return [self.currentVideoDeviceInput device].focusPointOfInterest; } - (BOOL)focusSupported { - return [self currentVideoDeviceInput].device.isFocusPointOfInterestSupported; + return [self currentVideoDeviceInput].device.isFocusPointOfInterestSupported; } - (AVCaptureDeviceInput*)currentAudioDeviceInput { - return [self currentDeviceInputForMediaType:AVMediaTypeAudio]; + return [self currentDeviceInputForMediaType:AVMediaTypeAudio]; } - (AVCaptureDeviceInput*)currentVideoDeviceInput { - return [self currentDeviceInputForMediaType:AVMediaTypeVideo]; + return [self currentDeviceInputForMediaType:AVMediaTypeVideo]; } - (AVCaptureDeviceInput*)currentDeviceInputForMediaType:(NSString*)mediaType { - for (AVCaptureDeviceInput* deviceInput in _captureSession.inputs) { - if ([deviceInput.device hasMediaType:mediaType]) { - return deviceInput; - } - } + for (AVCaptureDeviceInput* deviceInput in _captureSession.inputs) { + if ([deviceInput.device hasMediaType:mediaType]) { + return deviceInput; + } + } - return nil; + return nil; } - (AVCaptureDevice*)audioDevice { - if (!self.audioConfiguration.enabled) { - return nil; - } + if (!self.audioConfiguration.enabled) { + return nil; + } - return [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio]; + return [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio]; } - (AVCaptureDevice*)videoDevice { - if (!self.videoConfiguration.enabled) { - return nil; - } + if (!self.videoConfiguration.enabled) { + return nil; + } - return [SCRecorderTools videoDeviceForPosition:_device]; + return [SCRecorderTools videoDeviceForPosition:_device]; } - (AVCaptureVideoOrientation)actualVideoOrientation { - AVCaptureVideoOrientation videoOrientation = _videoOrientation; - - if (_autoSetVideoOrientation) { - UIDeviceOrientation deviceOrientation = [[UIDevice currentDevice] orientation]; - - switch (deviceOrientation) { - case UIDeviceOrientationLandscapeLeft: - videoOrientation = AVCaptureVideoOrientationLandscapeRight; - break; - case UIDeviceOrientationLandscapeRight: - videoOrientation = AVCaptureVideoOrientationLandscapeLeft; - break; - case UIDeviceOrientationPortrait: - videoOrientation = AVCaptureVideoOrientationPortrait; - break; - case UIDeviceOrientationPortraitUpsideDown: - videoOrientation = AVCaptureVideoOrientationPortraitUpsideDown; - break; - default: - break; - } - } - - return videoOrientation; + AVCaptureVideoOrientation videoOrientation = _videoOrientation; + + if (_autoSetVideoOrientation) { + UIDeviceOrientation deviceOrientation = [[UIDevice currentDevice] orientation]; + + switch (deviceOrientation) { + case UIDeviceOrientationLandscapeLeft: + videoOrientation = AVCaptureVideoOrientationLandscapeRight; + break; + case UIDeviceOrientationLandscapeRight: + videoOrientation = AVCaptureVideoOrientationLandscapeLeft; + break; + case UIDeviceOrientationPortrait: + videoOrientation = AVCaptureVideoOrientationPortrait; + break; + case UIDeviceOrientationPortraitUpsideDown: + videoOrientation = AVCaptureVideoOrientationPortraitUpsideDown; + break; + default: + break; + } + } + + return videoOrientation; } - (AVCaptureSession*)captureSession { - return _captureSession; + return _captureSession; } - (void)setPreviewView:(UIView *)previewView { - [_previewLayer removeFromSuperlayer]; + [_previewLayer removeFromSuperlayer]; - _previewView = previewView; + _previewView = previewView; - if (_previewView != nil) { - [_previewView.layer insertSublayer:_previewLayer atIndex:0]; + if (_previewView != nil) { + [_previewView.layer insertSublayer:_previewLayer atIndex:0]; - [self previewViewFrameChanged]; - } + [self previewViewFrameChanged]; + } } - (UIView*)previewView { - return _previewView; + return _previewView; } - (NSDictionary*)photoOutputSettings { - return _photoOutput.outputSettings; + return _photoOutput.outputSettings; } - (void)setPhotoOutputSettings:(NSDictionary *)photoOutputSettings { - _photoOutput.outputSettings = photoOutputSettings; + _photoOutput.outputSettings = photoOutputSettings; } - (void)setDevice:(AVCaptureDevicePosition)device { - [self willChangeValueForKey:@"device"]; + [self willChangeValueForKey:@"device"]; - _device = device; - if (_resetZoomOnChangeDevice) { - self.videoZoomFactor = 1; - } - if (_captureSession != nil && !_reconfiguring) { - [self reconfigureVideoInput:self.videoConfiguration.enabled audioInput:NO]; - } - [self didChangeValueForKey:@"device"]; + _device = device; + if (_resetZoomOnChangeDevice) { + self.videoZoomFactor = 1; + } + if (_captureSession != nil && !_reconfiguring) { + [self reconfigureVideoInput:self.videoConfiguration.enabled audioInput:NO]; + } + [self didChangeValueForKey:@"device"]; } - (void)setFlashMode:(SCFlashMode)flashMode { - AVCaptureDevice *currentDevice = [self videoDevice]; - NSError *error = nil; - - if (currentDevice.hasFlash) { - if ([currentDevice lockForConfiguration:&error]) { - if (flashMode == SCFlashModeLight) { - if ([currentDevice isTorchModeSupported:AVCaptureTorchModeOn]) { - [currentDevice setTorchMode:AVCaptureTorchModeOn]; - } - if ([currentDevice isFlashModeSupported:AVCaptureFlashModeOff]) { - [currentDevice setFlashMode:AVCaptureFlashModeOff]; - } - } else { - if ([currentDevice isTorchModeSupported:AVCaptureTorchModeOff]) { - [currentDevice setTorchMode:AVCaptureTorchModeOff]; - } - if ([currentDevice isFlashModeSupported:(AVCaptureFlashMode)flashMode]) { - [currentDevice setFlashMode:(AVCaptureFlashMode)flashMode]; - } - } - - [currentDevice unlockForConfiguration]; - } - } else { - error = [SCRecorder createError:@"Current device does not support flash"]; - } - - id delegate = self.delegate; - if ([delegate respondsToSelector:@selector(recorder:didChangeFlashMode:error:)]) { - [delegate recorder:self didChangeFlashMode:flashMode error:error]; - } - - if (error == nil) { - _flashMode = flashMode; - } + AVCaptureDevice *currentDevice = [self videoDevice]; + NSError *error = nil; + + if (currentDevice.hasFlash) { + if ([currentDevice lockForConfiguration:&error]) { + if (flashMode == SCFlashModeLight) { + if ([currentDevice isTorchModeSupported:AVCaptureTorchModeOn]) { + [currentDevice setTorchMode:AVCaptureTorchModeOn]; + } + if ([currentDevice isFlashModeSupported:AVCaptureFlashModeOff]) { + [currentDevice setFlashMode:AVCaptureFlashModeOff]; + } + } else { + if ([currentDevice isTorchModeSupported:AVCaptureTorchModeOff]) { + [currentDevice setTorchMode:AVCaptureTorchModeOff]; + } + if ([currentDevice isFlashModeSupported:(AVCaptureFlashMode)flashMode]) { + [currentDevice setFlashMode:(AVCaptureFlashMode)flashMode]; + } + } + + [currentDevice unlockForConfiguration]; + } + } else { + error = [SCRecorder createError:@"Current device does not support flash"]; + } + + id delegate = self.delegate; + if ([delegate respondsToSelector:@selector(recorder:didChangeFlashMode:error:)]) { + [delegate recorder:self didChangeFlashMode:flashMode error:error]; + } + + if (error == nil) { + _flashMode = flashMode; + } } - (BOOL)deviceHasFlash { - AVCaptureDevice *currentDevice = [self videoDevice]; - return currentDevice.hasFlash; + AVCaptureDevice *currentDevice = [self videoDevice]; + return currentDevice.hasFlash; } - (AVCaptureVideoPreviewLayer*)previewLayer { - return _previewLayer; + return _previewLayer; } - (BOOL)isPrepared { - return _captureSession != nil; + return _captureSession != nil; } - (void)setCaptureSessionPreset:(NSString *)sessionPreset { - _captureSessionPreset = sessionPreset; + _captureSessionPreset = sessionPreset; - if (_captureSession != nil) { + if (_captureSession != nil) { [self _reconfigureSession:_isRecording]; - _captureSessionPreset = _captureSession.sessionPreset; - } + _captureSessionPreset = _captureSession.sessionPreset; + } } - (void)setVideoOrientation:(AVCaptureVideoOrientation)videoOrientation { - _videoOrientation = videoOrientation; - [self updateVideoOrientation]; + _videoOrientation = videoOrientation; + [self updateVideoOrientation]; } - (void)setSession:(SCRecordSession *)recordSession { @@ -1533,280 +1535,282 @@ - (void)setSession:(SCRecordSession *)recordSession { } - (AVCaptureFocusMode)focusMode { - return [self currentVideoDeviceInput].device.focusMode; + return [self currentVideoDeviceInput].device.focusMode; } - (BOOL)isAdjustingFocus { - return _adjustingFocus; + return _adjustingFocus; } - (int)beginSessionConfigurationCount { - return _beginSessionConfigurationCount; + return _beginSessionConfigurationCount; } - (void)setAdjustingExposure:(BOOL)adjustingExposure { - if (_isAdjustingExposure != adjustingExposure) { - [self willChangeValueForKey:@"isAdjustingExposure"]; + if (_isAdjustingExposure != adjustingExposure) { + [self willChangeValueForKey:@"isAdjustingExposure"]; - _isAdjustingExposure = adjustingExposure; + _isAdjustingExposure = adjustingExposure; - [self didChangeValueForKey:@"isAdjustingExposure"]; - } + [self didChangeValueForKey:@"isAdjustingExposure"]; + } } - (void)setAdjustingFocus:(BOOL)adjustingFocus { - if (_adjustingFocus != adjustingFocus) { - [self willChangeValueForKey:@"isAdjustingFocus"]; + if (_adjustingFocus != adjustingFocus) { + [self willChangeValueForKey:@"isAdjustingFocus"]; - _adjustingFocus = adjustingFocus; + _adjustingFocus = adjustingFocus; - [self didChangeValueForKey:@"isAdjustingFocus"]; - } + [self didChangeValueForKey:@"isAdjustingFocus"]; + } } - (AVCaptureConnection*)videoConnection { - for (AVCaptureConnection * connection in _videoOutput.connections) { - for (AVCaptureInputPort * port in connection.inputPorts) { - if ([port.mediaType isEqual:AVMediaTypeVideo]) { - return connection; - } - } - } + for (AVCaptureConnection * connection in _videoOutput.connections) { + for (AVCaptureInputPort * port in connection.inputPorts) { + if ([port.mediaType isEqual:AVMediaTypeVideo]) { + return connection; + } + } + } - return nil; + return nil; } - (CMTimeScale)frameRate { - AVCaptureDeviceInput * deviceInput = [self currentVideoDeviceInput]; + AVCaptureDeviceInput * deviceInput = [self currentVideoDeviceInput]; - CMTimeScale framerate = 0; + CMTimeScale framerate = 0; - if (deviceInput != nil) { - if ([deviceInput.device respondsToSelector:@selector(activeVideoMaxFrameDuration)]) { - framerate = deviceInput.device.activeVideoMaxFrameDuration.timescale; - } else { + if (deviceInput != nil) { + if ([deviceInput.device respondsToSelector:@selector(activeVideoMaxFrameDuration)]) { + framerate = deviceInput.device.activeVideoMaxFrameDuration.timescale; + } else { #pragma clang diagnostic push #pragma clang diagnostic ignored "-Wdeprecated-declarations" - AVCaptureConnection *videoConnection = [self videoConnection]; - framerate = videoConnection.videoMaxFrameDuration.timescale; + AVCaptureConnection *videoConnection = [self videoConnection]; + framerate = videoConnection.videoMaxFrameDuration.timescale; #pragma clang diagnostic pop - } - } + } + } - return framerate; + return framerate; } - (void)setFrameRate:(CMTimeScale)framePerSeconds { - CMTime fps = CMTimeMake(1, framePerSeconds); - - AVCaptureDevice * device = [self videoDevice]; - - if (device != nil) { - NSError * error = nil; - BOOL formatSupported = [SCRecorderTools formatInRange:device.activeFormat frameRate:framePerSeconds]; - - if (formatSupported) { - if ([device respondsToSelector:@selector(activeVideoMinFrameDuration)]) { - if ([device lockForConfiguration:&error]) { - device.activeVideoMaxFrameDuration = fps; - device.activeVideoMinFrameDuration = fps; - [device unlockForConfiguration]; - } else { - NSLog(@"Failed to set FramePerSeconds into camera device: %@", error.description); - } - } else { - AVCaptureConnection *connection = [self videoConnection]; + CMTime fps = CMTimeMake(1, framePerSeconds); + + AVCaptureDevice * device = [self videoDevice]; + + if (device != nil) { + NSError * error = nil; + BOOL formatSupported = [SCRecorderTools formatInRange:device.activeFormat frameRate:framePerSeconds]; + + if (formatSupported) { + if ([device respondsToSelector:@selector(activeVideoMinFrameDuration)]) { + if ([device lockForConfiguration:&error]) { + device.activeVideoMaxFrameDuration = fps; + device.activeVideoMinFrameDuration = fps; + [device unlockForConfiguration]; + } else { + NSLog(@"Failed to set FramePerSeconds into camera device: %@", error.description); + } + } else { + AVCaptureConnection *connection = [self videoConnection]; #pragma clang diagnostic push #pragma clang diagnostic ignored "-Wdeprecated-declarations" - if (connection.isVideoMaxFrameDurationSupported) { - connection.videoMaxFrameDuration = fps; - } else { - NSLog(@"Failed to set FrameRate into camera device"); - } - if (connection.isVideoMinFrameDurationSupported) { - connection.videoMinFrameDuration = fps; - } else { - NSLog(@"Failed to set FrameRate into camera device"); - } + if (connection.isVideoMaxFrameDurationSupported) { + connection.videoMaxFrameDuration = fps; + } else { + NSLog(@"Failed to set FrameRate into camera device"); + } + if (connection.isVideoMinFrameDurationSupported) { + connection.videoMinFrameDuration = fps; + } else { + NSLog(@"Failed to set FrameRate into camera device"); + } #pragma clang diagnostic pop - } - } else { - NSLog(@"Unsupported frame rate %ld on current device format.", (long)framePerSeconds); - } - } + } + } else { + NSLog(@"Unsupported frame rate %ld on current device format.", (long)framePerSeconds); + } + } } - (BOOL)setActiveFormatWithFrameRate:(CMTimeScale)frameRate error:(NSError *__autoreleasing *)error { - return [self setActiveFormatWithFrameRate:frameRate width:self.videoConfiguration.size.width andHeight:self.videoConfiguration.size.height error:error]; + return [self setActiveFormatWithFrameRate:frameRate width:self.videoConfiguration.size.width andHeight:self.videoConfiguration.size.height error:error]; } - (BOOL)setActiveFormatWithFrameRate:(CMTimeScale)frameRate width:(int)width andHeight:(int)height error:(NSError *__autoreleasing *)error { - AVCaptureDevice *device = [self videoDevice]; - CMVideoDimensions dimensions; - dimensions.width = width; - dimensions.height = height; - - BOOL foundSupported = NO; - - if (device != nil) { - AVCaptureDeviceFormat *bestFormat = nil; - - for (AVCaptureDeviceFormat *format in device.formats) { - if ([SCRecorderTools formatInRange:format frameRate:frameRate dimensions:dimensions]) { - if (bestFormat == nil) { - bestFormat = format; - } else { - CMVideoDimensions bestDimensions = CMVideoFormatDescriptionGetDimensions(bestFormat.formatDescription); - CMVideoDimensions currentDimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription); - - if (currentDimensions.width < bestDimensions.width && currentDimensions.height < bestDimensions.height) { - bestFormat = format; - } else if (currentDimensions.width == bestDimensions.width && currentDimensions.height == bestDimensions.height) { - if ([SCRecorderTools maxFrameRateForFormat:bestFormat minFrameRate:frameRate] > [SCRecorderTools maxFrameRateForFormat:format minFrameRate:frameRate]) { - bestFormat = format; - } - } - } - } - } - - if (bestFormat != nil) { - if ([device lockForConfiguration:error]) { - CMTime frameDuration = CMTimeMake(1, frameRate); - - device.activeFormat = bestFormat; - foundSupported = true; - - device.activeVideoMinFrameDuration = frameDuration; - device.activeVideoMaxFrameDuration = frameDuration; - - [device unlockForConfiguration]; - } - } else { - if (error != nil) { - *error = [SCRecorder createError:[NSString stringWithFormat:@"No format that supports framerate %d and dimensions %d/%d was found", (int)frameRate, dimensions.width, dimensions.height]]; - } - } - } else { - if (error != nil) { - *error = [SCRecorder createError:@"The camera must be initialized before setting active format"]; - } - } - - if (foundSupported && error != nil) { - *error = nil; - } - - return foundSupported; + AVCaptureDevice *device = [self videoDevice]; + CMVideoDimensions dimensions; + dimensions.width = width; + dimensions.height = height; + + BOOL foundSupported = NO; + + if (device != nil) { + AVCaptureDeviceFormat *bestFormat = nil; + + for (AVCaptureDeviceFormat *format in device.formats) { + if ([SCRecorderTools formatInRange:format frameRate:frameRate dimensions:dimensions]) { + if (bestFormat == nil) { + bestFormat = format; + } else { + CMVideoDimensions bestDimensions = CMVideoFormatDescriptionGetDimensions(bestFormat.formatDescription); + CMVideoDimensions currentDimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription); + + if (currentDimensions.width < bestDimensions.width && currentDimensions.height < bestDimensions.height) { + bestFormat = format; + } else if (currentDimensions.width == bestDimensions.width && currentDimensions.height == bestDimensions.height) { + CMTimeScale s1 = [SCRecorderTools maxFrameRateForFormat:bestFormat minFrameRate:frameRate]; + CMTimeScale s2 = [SCRecorderTools maxFrameRateForFormat:format minFrameRate:frameRate]; + if (s1 >= s2) { + bestFormat = format; + } + } + } + } + } + + if (bestFormat != nil) { + if ([device lockForConfiguration:error]) { + CMTime frameDuration = CMTimeMake(1, frameRate); + + device.activeFormat = bestFormat; + foundSupported = true; + + device.activeVideoMinFrameDuration = frameDuration; + device.activeVideoMaxFrameDuration = frameDuration; + + [device unlockForConfiguration]; + } + } else { + if (error != nil) { + *error = [SCRecorder createError:[NSString stringWithFormat:@"No format that supports framerate %d and dimensions %d/%d was found", (int)frameRate, dimensions.width, dimensions.height]]; + } + } + } else { + if (error != nil) { + *error = [SCRecorder createError:@"The camera must be initialized before setting active format"]; + } + } + + if (foundSupported && error != nil) { + *error = nil; + } + + return foundSupported; } - (CGFloat)ratioRecorded { - CGFloat ratio = 0; + CGFloat ratio = 0; - if (CMTIME_IS_VALID(_maxRecordDuration)) { - Float64 maxRecordDuration = CMTimeGetSeconds(_maxRecordDuration); - Float64 recordedTime = CMTimeGetSeconds(_session.duration); + if (CMTIME_IS_VALID(_maxRecordDuration)) { + Float64 maxRecordDuration = CMTimeGetSeconds(_maxRecordDuration); + Float64 recordedTime = CMTimeGetSeconds(_session.duration); - ratio = (CGFloat)(recordedTime / maxRecordDuration); - } + ratio = (CGFloat)(recordedTime / maxRecordDuration); + } - return ratio; + return ratio; } - (AVCaptureVideoDataOutput *)videoOutput { - return _videoOutput; + return _videoOutput; } - (AVCaptureAudioDataOutput *)audioOutput { - return _audioOutput; + return _audioOutput; } - (AVCaptureStillImageOutput *)photoOutput { - return _photoOutput; + return _photoOutput; } - (BOOL)audioEnabledAndReady { - return _audioOutputAdded && _audioInputAdded && !_audioConfiguration.shouldIgnore; + return _audioOutputAdded && _audioInputAdded && !_audioConfiguration.shouldIgnore; } - (BOOL)videoEnabledAndReady { - return _videoOutputAdded && _videoInputAdded && !_videoConfiguration.shouldIgnore; + return _videoOutputAdded && _videoInputAdded && !_videoConfiguration.shouldIgnore; } - (void)setKeepMirroringOnWrite:(BOOL)keepMirroringOnWrite { __weak typeof(self) wSelf = self; dispatch_sync(_sessionQueue, ^{ typeof(self) iSelf = wSelf; - iSelf->_keepMirroringOnWrite = keepMirroringOnWrite; - iSelf->_transformFilter = nil; - }); + iSelf->_keepMirroringOnWrite = keepMirroringOnWrite; + iSelf->_transformFilter = nil; + }); } - (CGFloat)videoZoomFactor { - AVCaptureDevice *device = [self videoDevice]; + AVCaptureDevice *device = [self videoDevice]; - if ([device respondsToSelector:@selector(videoZoomFactor)]) { - return device.videoZoomFactor; - } + if ([device respondsToSelector:@selector(videoZoomFactor)]) { + return device.videoZoomFactor; + } - return 1; + return 1; } - (CGFloat)maxVideoZoomFactor { - return [self maxVideoZoomFactorForDevice:_device]; + return [self maxVideoZoomFactorForDevice:_device]; } - (CGFloat)maxVideoZoomFactorForDevice:(AVCaptureDevicePosition)devicePosition { - return [SCRecorderTools videoDeviceForPosition:devicePosition].activeFormat.videoMaxZoomFactor; + return [SCRecorderTools videoDeviceForPosition:devicePosition].activeFormat.videoMaxZoomFactor; } - (void)setVideoZoomFactor:(CGFloat)videoZoomFactor { - AVCaptureDevice *device = [self videoDevice]; + AVCaptureDevice *device = [self videoDevice]; - if ([device respondsToSelector:@selector(videoZoomFactor)]) { - NSError *error; - if ([device lockForConfiguration:&error]) { - if (videoZoomFactor <= device.activeFormat.videoMaxZoomFactor) { - device.videoZoomFactor = videoZoomFactor; - } else { - NSLog(@"Unable to set videoZoom: (max %f, asked %f)", device.activeFormat.videoMaxZoomFactor, videoZoomFactor); - } + if ([device respondsToSelector:@selector(videoZoomFactor)]) { + NSError *error; + if ([device lockForConfiguration:&error]) { + if (videoZoomFactor <= device.activeFormat.videoMaxZoomFactor) { + device.videoZoomFactor = videoZoomFactor; + } else { + NSLog(@"Unable to set videoZoom: (max %f, asked %f)", device.activeFormat.videoMaxZoomFactor, videoZoomFactor); + } - [device unlockForConfiguration]; - } else { - NSLog(@"Unable to set videoZoom: %@", error.localizedDescription); - } - } + [device unlockForConfiguration]; + } else { + NSLog(@"Unable to set videoZoom: %@", error.localizedDescription); + } + } } - (void)setFastRecordMethodEnabled:(BOOL)fastRecordMethodEnabled { - if (_fastRecordMethodEnabled != fastRecordMethodEnabled) { - _fastRecordMethodEnabled = fastRecordMethodEnabled; + if (_fastRecordMethodEnabled != fastRecordMethodEnabled) { + _fastRecordMethodEnabled = fastRecordMethodEnabled; [self _reconfigureSession:_isRecording]; - } + } } - (void)setVideoStabilizationMode:(AVCaptureVideoStabilizationMode)videoStabilizationMode { - _videoStabilizationMode = videoStabilizationMode; - [self beginConfiguration]; - [self _configureVideoStabilization]; - [self commitConfiguration]; + _videoStabilizationMode = videoStabilizationMode; + [self beginConfiguration]; + [self _configureVideoStabilization]; + [self commitConfiguration]; } + (SCRecorder *)sharedRecorder { - static SCRecorder *_sharedRecorder = nil; - static dispatch_once_t onceToken; - dispatch_once(&onceToken, ^{ - _sharedRecorder = [SCRecorder new]; - }); + static SCRecorder *_sharedRecorder = nil; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + _sharedRecorder = [SCRecorder new]; + }); - return _sharedRecorder; + return _sharedRecorder; } + (BOOL)isSessionQueue { - return dispatch_get_specific(kSCRecorderRecordSessionQueueKey) != nil; + return dispatch_get_specific(kSCRecorderRecordSessionQueueKey) != nil; } - (void)setAudioMute:(BOOL)muting { diff --git a/Library/Sources/SCRecorderTools.m b/Library/Sources/SCRecorderTools.m index c4338300..27847f1d 100644 --- a/Library/Sources/SCRecorderTools.m +++ b/Library/Sources/SCRecorderTools.m @@ -17,48 +17,51 @@ + (BOOL)formatInRange:(AVCaptureDeviceFormat*)format frameRate:(CMTimeScale)fram CMVideoDimensions dimensions; dimensions.width = 0; dimensions.height = 0; - + return [SCRecorderTools formatInRange:format frameRate:frameRate dimensions:dimensions]; } + (BOOL)formatInRange:(AVCaptureDeviceFormat*)format frameRate:(CMTimeScale)frameRate dimensions:(CMVideoDimensions)dimensions { CMVideoDimensions size = CMVideoFormatDescriptionGetDimensions(format.formatDescription); - + if (size.width >= dimensions.width && size.height >= dimensions.height) { for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) { if (range.minFrameDuration.timescale >= frameRate && range.maxFrameDuration.timescale <= frameRate) { - return YES; - } + return YES; + } } } - + return NO; } + (CMTimeScale)maxFrameRateForFormat:(AVCaptureDeviceFormat *)format minFrameRate:(CMTimeScale)minFrameRate { CMTimeScale lowerTimeScale = 0; for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) { - if (range.minFrameDuration.timescale >= minFrameRate && (lowerTimeScale == 0 || range.minFrameDuration.timescale < lowerTimeScale)) { - lowerTimeScale = range.minFrameDuration.timescale; + CMTimeScale rangeMinDur = range.minFrameDuration.timescale; + if (rangeMinDur >= minFrameRate && (lowerTimeScale == 0 || rangeMinDur < lowerTimeScale)) { + lowerTimeScale = rangeMinDur; } } - + return lowerTimeScale; } + (AVCaptureDevice *)videoDeviceForPosition:(AVCaptureDevicePosition)position { NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; - + for (AVCaptureDevice *device in videoDevices) { if (device.position == (AVCaptureDevicePosition)position) { return device; } } - return nil; } + (NSString *)captureSessionPresetForDimension:(CMVideoDimensions)videoDimension { + if (videoDimension.width >= 3840 && videoDimension.height >= 2160) { + return AVCaptureSessionPreset3840x2160; + } if (videoDimension.width >= 1920 && videoDimension.height >= 1080) { return AVCaptureSessionPreset1920x1080; } @@ -74,7 +77,7 @@ + (NSString *)captureSessionPresetForDimension:(CMVideoDimensions)videoDimension if (videoDimension.width >= 352 && videoDimension.height >= 288) { return AVCaptureSessionPreset352x288; } - + return AVCaptureSessionPresetLow; } @@ -86,42 +89,44 @@ + (NSString *)bestCaptureSessionPresetForDevice:(AVCaptureDevice *)device withMa CMVideoDimensions highestDeviceDimension; highestDeviceDimension.width = 0; highestDeviceDimension.height = 0; - + for (AVCaptureDeviceFormat *format in device.formats) { CMVideoDimensions dimension = CMVideoFormatDescriptionGetDimensions(format.formatDescription); - - if (dimension.width <= (int)maxSize.width && dimension.height <= (int)maxSize.height && dimension.width * dimension.height > highestDeviceDimension.width * highestDeviceDimension.height) { + + if (dimension.width <= (int)maxSize.width && + dimension.height <= (int)maxSize.height && + dimension.width * dimension.height > highestDeviceDimension.width * highestDeviceDimension.height) { highestDeviceDimension = dimension; } } - + return [SCRecorderTools captureSessionPresetForDimension:highestDeviceDimension]; } + (NSString *)bestCaptureSessionPresetCompatibleWithAllDevices { NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; - CMVideoDimensions highestCompatibleDimension = {0,0}; + CMVideoDimensions highestCompatibleDimension = {0,0}; BOOL lowestSet = NO; - + for (AVCaptureDevice *device in videoDevices) { CMVideoDimensions highestDeviceDimension; highestDeviceDimension.width = 0; highestDeviceDimension.height = 0; - + for (AVCaptureDeviceFormat *format in device.formats) { CMVideoDimensions dimension = CMVideoFormatDescriptionGetDimensions(format.formatDescription); - + if (dimension.width * dimension.height > highestDeviceDimension.width * highestDeviceDimension.height) { highestDeviceDimension = dimension; } } - + if (!lowestSet || (highestCompatibleDimension.width * highestCompatibleDimension.height > highestDeviceDimension.width * highestDeviceDimension.height)) { lowestSet = YES; highestCompatibleDimension = highestDeviceDimension; } - + } return [SCRecorderTools captureSessionPresetForDimension:highestCompatibleDimension]; @@ -132,12 +137,12 @@ + (NSArray *)assetWriterMetadata { creationDate.keySpace = AVMetadataKeySpaceCommon; creationDate.key = AVMetadataCommonKeyCreationDate; creationDate.value = [[NSDate date] toISO8601]; - + AVMutableMetadataItem *software = [AVMutableMetadataItem new]; software.keySpace = AVMetadataKeySpaceCommon; software.key = AVMetadataCommonKeySoftware; software.value = @"SCRecorder"; - + return @[software, creationDate]; } diff --git a/Library/Sources/SCVideoConfiguration.h b/Library/Sources/SCVideoConfiguration.h index 1ae29f3f..1a4a0005 100644 --- a/Library/Sources/SCVideoConfiguration.h +++ b/Library/Sources/SCVideoConfiguration.h @@ -11,7 +11,8 @@ #import "SCMediaTypeConfiguration.h" #import "SCFilter.h" -#define kSCVideoConfigurationDefaultCodec AVVideoCodecH264 +//#define kSCVideoConfigurationDefaultCodec AVVideoCodecH264 +#define kSCVideoConfigurationDefaultCodec AVVideoCodecTypeHEVC #define kSCVideoConfigurationDefaultScalingMode AVVideoScalingModeResizeAspectFill #define kSCVideoConfigurationDefaultBitrate 2000000 @@ -29,7 +30,7 @@ typedef enum : NSUInteger { /** Called to determine whether setFrame:, updateWithVideoTime: and layoutIfNeeded should be called on the main thread. You should avoid returning YES as much as possible from this method, since it will potentially - greatly reduce the encoding speed. Some views like UITextView requires to layout on the main thread. + greatly reduce the encoding speed. Some views like UITextView requires to layout on the main thread. */ - (BOOL)requiresUpdateOnMainThreadAtVideoTime:(NSTimeInterval)time videoSize:(CGSize)videoSize; @@ -83,7 +84,7 @@ typedef enum : NSUInteger { A value more than 1 will make the buffers last longer, it creates a slow motion effect. A value less than 1 will make the buffers be shorter, it creates a timelapse effect. - + Only used in SCRecorder. */ @property (assign, nonatomic) CGFloat timeScale; @@ -120,14 +121,14 @@ typedef enum : NSUInteger { /** If YES, the affineTransform will be ignored and the output affineTransform will be the same as the input asset. - + Only used in SCAssetExportSession. */ @property (assign, nonatomic) BOOL keepInputAffineTransform; /** The video composition to use. - + Only used in SCAssetExportSession. */ @property (strong, nonatomic) AVVideoComposition *__nullable composition; @@ -135,14 +136,14 @@ typedef enum : NSUInteger { /** The watermark to use. If the composition is not set, this watermark image will be applied on the exported video. - + Only used in SCAssetExportSession. */ @property (strong, nonatomic) UIImage *__nullable watermarkImage; /** The watermark image location and size in the input video frame coordinates. - + Only used in SCAssetExportSession. */ @property (assign, nonatomic) CGRect watermarkFrame; @@ -152,7 +153,7 @@ typedef enum : NSUInteger { to figure out which size to use by looking at the composition and the natural size of the inputAsset. If the filter you set return back an image with a different size, you should put the output size here. - + Only used in SCAssetExportSession. Default is CGSizeZero */ @@ -165,16 +166,16 @@ typedef enum : NSUInteger { /** The overlay view that will be drawn on top of the video. - + Only used in SCAssetExportSession. */ @property (strong, nonatomic) UIView *__nullable overlay; /** The watermark anchor location. - + Default is top left - + Only used in SCAssetExportSession. */ @property (assign, nonatomic) SCWatermarkAnchorLocation watermarkAnchorLocation; diff --git a/Library/Sources/SCVideoConfiguration.m b/Library/Sources/SCVideoConfiguration.m index d89a27d4..a7ebe733 100644 --- a/Library/Sources/SCVideoConfiguration.m +++ b/Library/Sources/SCVideoConfiguration.m @@ -11,29 +11,29 @@ @implementation SCVideoConfiguration - (id)init { - self = [super init]; - - if (self) { - self.bitrate = kSCVideoConfigurationDefaultBitrate; - _size = CGSizeZero; - _codec = kSCVideoConfigurationDefaultCodec; - _scalingMode = kSCVideoConfigurationDefaultScalingMode; - _affineTransform = CGAffineTransformIdentity; - _timeScale = 1; - _keepInputAffineTransform = YES; - } - - return self; + self = [super init]; + + if (self) { + self.bitrate = kSCVideoConfigurationDefaultBitrate; + _size = CGSizeZero; + _codec = kSCVideoConfigurationDefaultCodec; + _scalingMode = kSCVideoConfigurationDefaultScalingMode; + _affineTransform = CGAffineTransformIdentity; + _timeScale = 1; + _keepInputAffineTransform = YES; + } + + return self; } static CGSize MakeVideoSize(CGSize videoSize, float requestedWidth) { - float ratio = videoSize.width / requestedWidth; - - if (ratio <= 1) { - return videoSize; - } - - return CGSizeMake(videoSize.width / ratio, videoSize.height / ratio); + float ratio = videoSize.width / requestedWidth; + + if (ratio <= 1) { + return videoSize; + } + + return CGSizeMake(videoSize.width / ratio, videoSize.height / ratio); } - (NSDictionary *__nonnull)createAssetWriterOptionsWithVideoSize:(CGSize)videoSize { @@ -42,76 +42,86 @@ - (NSDictionary *__nonnull)createAssetWriterOptionsWithVideoSize:(CGSize)videoSi - (NSDictionary *)createAssetWriterOptionsWithVideoSize:(CGSize)videoSize sizeIsSuggestion:(BOOL)suggestion { - NSDictionary *options = self.options; - if (options != nil) { - return options; - } - - CGSize outputSize = self.size; - unsigned long bitrate = (unsigned long)self.bitrate; - - if (self.preset != nil) { - if ([self.preset isEqualToString:SCPresetLowQuality]) { - bitrate = 500000; - if (suggestion) + NSDictionary *options = self.options; + if (options != nil) { + return options; + } + + CGSize outputSize = self.size; + unsigned long bitrate = (unsigned long)self.bitrate; + + if (self.preset != nil) { + if ([self.preset isEqualToString:SCPresetLowQuality]) { + bitrate = 500000; + if (suggestion) outputSize = MakeVideoSize(videoSize, 640); - } else if ([self.preset isEqualToString:SCPresetMediumQuality]) { - bitrate = 1000000; - if (suggestion) + } else if ([self.preset isEqualToString:SCPresetMediumQuality]) { + bitrate = 1000000; + if (suggestion) outputSize = MakeVideoSize(videoSize, 1280); - } else if ([self.preset isEqualToString:SCPresetHighestQuality]) { - bitrate = 6000000; - if (suggestion) + } else if ([self.preset isEqualToString:SCPresetHighestQuality]) { + bitrate = 6000000; + if (suggestion) outputSize = MakeVideoSize(videoSize, 1920); - } else { - NSLog(@"Unrecognized video preset %@", self.preset); - } - } + } else { + NSLog(@"Unrecognized video preset %@", self.preset); + } + } if (suggestion == NO) outputSize = videoSize; - - if (CGSizeEqualToSize(outputSize, CGSizeZero)) { - outputSize = videoSize; - } - - if (self.sizeAsSquare) { - if (videoSize.width > videoSize.height) { - outputSize.width = videoSize.height; - } else { - outputSize.height = videoSize.width; - } - } - - NSMutableDictionary *compressionSettings = [NSMutableDictionary dictionaryWithObject:[NSNumber numberWithUnsignedLong:bitrate] forKey:AVVideoAverageBitRateKey]; - - if (self.shouldKeepOnlyKeyFrames) { - [compressionSettings setObject:@1 forKey:AVVideoMaxKeyFrameIntervalKey]; - } - - if (self.profileLevel) { - [compressionSettings setObject:self.profileLevel forKey:AVVideoProfileLevelKey]; - } -// [compressionSettings setObject:@30 forKey:AVVideoAverageNonDroppableFrameRateKey]; - [compressionSettings setObject:@NO forKey:AVVideoAllowFrameReorderingKey]; + + if (CGSizeEqualToSize(outputSize, CGSizeZero)) { + outputSize = videoSize; + } + + if (self.sizeAsSquare) { + if (videoSize.width > videoSize.height) { + outputSize.width = videoSize.height; + } else { + outputSize.height = videoSize.width; + } + } + + NSMutableDictionary *compressionSettings = NSMutableDictionary.dictionary; + + if (self.codec == AVVideoCodecH264) { + compressionSettings[AVVideoAverageBitRateKey] = @(bitrate); + compressionSettings[AVVideoMaxKeyFrameIntervalDurationKey] = @0.0f; + if (self.shouldKeepOnlyKeyFrames) { + compressionSettings[AVVideoMaxKeyFrameIntervalKey] = @1; + } + //only for h264 + // compressionSettings[AVVideoAverageNonDroppableFrameRateKey] = @30; + } else if (self.codec == AVVideoCodecHEVC) { +// compressionSettings[AVVideoQualityKey] = @1.0; + } + + if (self.profileLevel) { + compressionSettings[AVVideoProfileLevelKey] = self.profileLevel; + } + //seems to break shit +// compressionSettings[AVVideoAllowWideColorKey] = @(YES); + compressionSettings[AVVideoAllowFrameReorderingKey] = @NO; // [compressionSettings setObject:AVVideoH264EntropyModeCABAC forKey:AVVideoH264EntropyModeKey]; - [compressionSettings setObject:@30 forKey:AVVideoExpectedSourceFrameRateKey]; - - return @{ - AVVideoCodecKey : self.codec, - AVVideoScalingModeKey : self.scalingMode, - AVVideoWidthKey : [NSNumber numberWithInteger:outputSize.width], - AVVideoHeightKey : [NSNumber numberWithInteger:outputSize.height], - AVVideoCompressionPropertiesKey : compressionSettings - }; +//got rid of setting the frame rates.. not sure if it helped or not +// compressionSettings[AVVideoExpectedSourceFrameRateKey] = @60; + + return @{ + AVVideoCodecKey : self.codec, + AVVideoScalingModeKey : self.scalingMode, + AVVideoWidthKey : [NSNumber numberWithInteger:outputSize.width], + AVVideoHeightKey : [NSNumber numberWithInteger:outputSize.height], + AVVideoCompressionPropertiesKey : compressionSettings, + }; } - (NSDictionary *)createAssetWriterOptionsUsingSampleBuffer:(CMSampleBufferRef)sampleBuffer { - CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); - size_t width = CVPixelBufferGetWidth(imageBuffer); - size_t height = CVPixelBufferGetHeight(imageBuffer); - - return [self createAssetWriterOptionsWithVideoSize:CGSizeMake(width, height)]; + CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); + size_t width = CVPixelBufferGetWidth(imageBuffer); + size_t height = CVPixelBufferGetHeight(imageBuffer); + + return [self createAssetWriterOptionsWithVideoSize:CGSizeMake(width, height)]; } @end From 7a1406a24fe73cd512fcd0cebbb2a0b9f264f005 Mon Sep 17 00:00:00 2001 From: Gabriel Rozenberg Date: Mon, 12 Nov 2018 12:17:28 -0500 Subject: [PATCH 34/55] updated how settings are figured out --- .../ObjC/Sources/SCRecorderViewController.m | 16 ++- Library/Sources/SCAssetExportSession.m | 3 +- Library/Sources/SCAudioConfiguration.m | 113 +++++++++--------- Library/Sources/SCMediaTypeConfiguration.h | 2 +- Library/Sources/SCMediaTypeConfiguration.m | 6 +- Library/Sources/SCRecorder.m | 4 +- Library/Sources/SCVideoConfiguration.h | 3 +- Library/Sources/SCVideoConfiguration.m | 81 +++++++++---- 8 files changed, 137 insertions(+), 91 deletions(-) diff --git a/Examples/ObjC/Sources/SCRecorderViewController.m b/Examples/ObjC/Sources/SCRecorderViewController.m index 0cb45c3d..9f3580c8 100644 --- a/Examples/ObjC/Sources/SCRecorderViewController.m +++ b/Examples/ObjC/Sources/SCRecorderViewController.m @@ -66,6 +66,17 @@ - (void)viewDidLoad { [self.view insertSubview:_ghostImageView aboveSubview:self.previewView]; + NSArray *types = @[AVCaptureDeviceTypeBuiltInMicrophone, + AVCaptureDeviceTypeBuiltInWideAngleCamera, + AVCaptureDeviceTypeBuiltInTelephotoCamera, + AVCaptureDeviceTypeBuiltInDualCamera, AVCaptureDeviceTypeBuiltInTrueDepthCamera]; + AVCaptureDeviceDiscoverySession *session = [AVCaptureDeviceDiscoverySession + discoverySessionWithDeviceTypes:types + mediaType:AVMediaTypeVideo + position:AVCaptureDevicePositionUnspecified]; + NSLog(@"devices %@", session.devices); + + _recorder = [SCRecorder recorder]; [_recorder beginConfiguration]; _recorder.captureSessionPreset = AVCaptureSessionPresetInputPriority; @@ -81,8 +92,9 @@ - (void)viewDidLoad { // videoConfiguration.preset = SCPresetHighestQuality; // videoConfiguration.bitrate = 10000000; //(10 mbps) // videoConfiguration.bitrate = 85000000; //(85 mbps) -// videoConfiguration.bitrate = 100000000; //(100 mbps) -// videoConfiguration.bitrate = 150000000; //(510 mbps) +// videoConfiguration.bitrate = 100 000 000; //(100 mbps) +// videoConfiguration.bitrate = 50 366 912 + videoConfiguration.bitrate = 150000000; //(150 mbps) // videoConfiguration.size = CGSizeMake(1080, 1920); videoConfiguration.size = CGSizeMake(2160, 3840); videoConfiguration.maxFrameRate = 0; diff --git a/Library/Sources/SCAssetExportSession.m b/Library/Sources/SCAssetExportSession.m index 07a9eebc..a6fd5362 100644 --- a/Library/Sources/SCAssetExportSession.m +++ b/Library/Sources/SCAssetExportSession.m @@ -552,7 +552,7 @@ - (SCFilter *)_buildWatermarkFilterForVideoSize:(CGSize)videoSize { - (void)_setupAudioUsingTracks:(NSArray *)audioTracks { if (audioTracks.count > 0 && self.audioConfiguration.enabled && !self.audioConfiguration.shouldIgnore) { // Input - NSDictionary *audioSettings = [_audioConfiguration createAssetWriterOptionsUsingSampleBuffer:nil]; + NSDictionary *audioSettings = [_audioConfiguration createAssetWriterOptionsUsingSampleBuffer:nil usingOutput:_audioOutput]; _audioInput = [self addWriter:AVMediaTypeAudio withSettings:audioSettings]; // Output @@ -597,6 +597,7 @@ - (void)_setupVideoUsingTracks:(NSArray *)videoTracks { // Input NSDictionary *videoSettings = [_videoConfiguration createAssetWriterOptionsWithVideoSize:_inputBufferSize + usingOutput:_videoOutput sizeIsSuggestion:videoComposition == nil]; _videoInput = [self addWriter:AVMediaTypeVideo withSettings:videoSettings]; diff --git a/Library/Sources/SCAudioConfiguration.m b/Library/Sources/SCAudioConfiguration.m index ecd960ae..aac8c3bb 100644 --- a/Library/Sources/SCAudioConfiguration.m +++ b/Library/Sources/SCAudioConfiguration.m @@ -11,64 +11,65 @@ @implementation SCAudioConfiguration - (id)init { - self = [super init]; - - if (self) { - self.bitrate = kSCAudioConfigurationDefaultBitrate; - _format = kSCAudioConfigurationDefaultAudioFormat; - } - - return self; + self = [super init]; + + if (self) { + self.bitrate = kSCAudioConfigurationDefaultBitrate; + _format = kSCAudioConfigurationDefaultAudioFormat; + } + + return self; } -- (NSDictionary *)createAssetWriterOptionsUsingSampleBuffer:(CMSampleBufferRef)sampleBuffer { - NSDictionary *options = self.options; - if (options != nil) { - return options; - } - - Float64 sampleRate = self.sampleRate; - int channels = self.channelsCount; - unsigned long bitrate = (unsigned long)self.bitrate; - - if (self.preset != nil) { - if ([self.preset isEqualToString:SCPresetLowQuality]) { - bitrate = 64000; - channels = 1; - } else if ([self.preset isEqualToString:SCPresetMediumQuality]) { - bitrate = 128000; - } else if ([self.preset isEqualToString:SCPresetHighestQuality]) { - bitrate = 320000; - } else { - NSLog(@"Unrecognized video preset %@", self.preset); - } - } - - if (sampleBuffer != nil) { - CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer); - const AudioStreamBasicDescription *streamBasicDescription = CMAudioFormatDescriptionGetStreamBasicDescription(formatDescription); - - if (sampleRate == 0) { - sampleRate = streamBasicDescription->mSampleRate; - } - if (channels == 0) { - channels = streamBasicDescription->mChannelsPerFrame; - } - } - - if (sampleRate == 0) { - sampleRate = kSCAudioConfigurationDefaultSampleRate; - } - if (channels == 0) { - channels = kSCAudioConfigurationDefaultNumberOfChannels; - } - - return @{ - AVFormatIDKey : [NSNumber numberWithInt: self.format], - AVEncoderBitRateKey : [NSNumber numberWithUnsignedLong: bitrate], - AVNumberOfChannelsKey : [NSNumber numberWithInt: channels], - AVSampleRateKey : [NSNumber numberWithInt: sampleRate] - }; +- (NSDictionary *)createAssetWriterOptionsUsingSampleBuffer:(CMSampleBufferRef)sampleBuffer usingOutput:(AVCaptureVideoDataOutput *)output { + NSDictionary *options = self.options; + if (options != nil) { + return options; + } + + Float64 sampleRate = self.sampleRate; + int channels = self.channelsCount; + unsigned long bitrate = (unsigned long)self.bitrate; + + if (self.preset != nil) { + if ([self.preset isEqualToString:SCPresetLowQuality]) { + bitrate = 64000; + channels = 1; + } else if ([self.preset isEqualToString:SCPresetMediumQuality]) { + bitrate = 128000; + } else if ([self.preset isEqualToString:SCPresetHighestQuality]) { + bitrate = 320000; + } else { + NSLog(@"Unrecognized video preset %@", self.preset); + } + } + + if (sampleBuffer != nil) { + CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer); + const AudioStreamBasicDescription *streamBasicDescription = CMAudioFormatDescriptionGetStreamBasicDescription(formatDescription); + + if (sampleRate == 0) { + sampleRate = streamBasicDescription->mSampleRate; + } + if (channels == 0) { + channels = streamBasicDescription->mChannelsPerFrame; + } + } + + if (sampleRate == 0) { + sampleRate = kSCAudioConfigurationDefaultSampleRate; + } + if (channels == 0) { + channels = kSCAudioConfigurationDefaultNumberOfChannels; + } + + return @{ + AVFormatIDKey : @(self.format), + AVEncoderBitRateKey : @(bitrate), + AVNumberOfChannelsKey : @(channels), + AVSampleRateKey : [NSNumber numberWithInt: sampleRate], + AVEncoderAudioQualityKey : @(AVAudioQualityMax) + }; } @end diff --git a/Library/Sources/SCMediaTypeConfiguration.h b/Library/Sources/SCMediaTypeConfiguration.h index 3e90a9f1..f5f3f6b9 100644 --- a/Library/Sources/SCMediaTypeConfiguration.h +++ b/Library/Sources/SCMediaTypeConfiguration.h @@ -47,6 +47,6 @@ extern NSString *__nonnull SCPresetLowQuality; */ @property (copy, nonatomic) NSString *__nullable preset; -- (NSDictionary *__nonnull)createAssetWriterOptionsUsingSampleBuffer:(CMSampleBufferRef __nullable)sampleBuffer; +- (NSDictionary *__nonnull)createAssetWriterOptionsUsingSampleBuffer:(CMSampleBufferRef __nullable)sampleBuffer usingOutput:(AVCaptureVideoDataOutput *)output; @end diff --git a/Library/Sources/SCMediaTypeConfiguration.m b/Library/Sources/SCMediaTypeConfiguration.m index 2476cbf6..82e48efb 100644 --- a/Library/Sources/SCMediaTypeConfiguration.m +++ b/Library/Sources/SCMediaTypeConfiguration.m @@ -16,15 +16,15 @@ @implementation SCMediaTypeConfiguration - (id)init { self = [super init]; - + if (self) { _enabled = YES; } - + return self; } -- (NSDictionary *)createAssetWriterOptionsUsingSampleBuffer:(CMSampleBufferRef)sampleBuffer { +- (NSDictionary *)createAssetWriterOptionsUsingSampleBuffer:(CMSampleBufferRef)sampleBuffer usingOutput:(AVCaptureVideoDataOutput *)output { return nil; } diff --git a/Library/Sources/SCRecorder.m b/Library/Sources/SCRecorder.m index 8ebea9ca..2b38c36e 100644 --- a/Library/Sources/SCRecorder.m +++ b/Library/Sources/SCRecorder.m @@ -775,7 +775,7 @@ - (void)_handleVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer withSession:(SC if (!recordSession.videoInitializationFailed && !_videoConfiguration.shouldIgnore) { if (!recordSession.videoInitialized) { NSError *error = nil; - NSDictionary *settings = [self.videoConfiguration createAssetWriterOptionsUsingSampleBuffer:sampleBuffer]; + NSDictionary *settings = [self.videoConfiguration createAssetWriterOptionsUsingSampleBuffer:sampleBuffer usingOutput:_videoOutput]; CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer); [recordSession initializeVideo:settings formatDescription:formatDescription error:&error]; @@ -854,7 +854,7 @@ - (void)_handleAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer withSession:(SC if (!recordSession.audioInitializationFailed && !_audioConfiguration.shouldIgnore) { if (!recordSession.audioInitialized) { NSError *error = nil; - NSDictionary *settings = [self.audioConfiguration createAssetWriterOptionsUsingSampleBuffer:sampleBuffer]; + NSDictionary *settings = [self.audioConfiguration createAssetWriterOptionsUsingSampleBuffer:sampleBuffer usingOutput:_audioOutput]; CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer); [recordSession initializeAudio:settings formatDescription:formatDescription error:&error]; // NSLog(@"INITIALIZED AUDIO"); diff --git a/Library/Sources/SCVideoConfiguration.h b/Library/Sources/SCVideoConfiguration.h index 1a4a0005..5019c5e3 100644 --- a/Library/Sources/SCVideoConfiguration.h +++ b/Library/Sources/SCVideoConfiguration.h @@ -181,8 +181,9 @@ typedef enum : NSUInteger { @property (assign, nonatomic) SCWatermarkAnchorLocation watermarkAnchorLocation; -- (NSDictionary *__nonnull)createAssetWriterOptionsWithVideoSize:(CGSize)videoSize; +- (NSDictionary *__nonnull)createAssetWriterOptionsWithVideoSize:(CGSize)videoSize usingOutput:(AVCaptureVideoDataOutput *)output; - (NSDictionary *__nonnull)createAssetWriterOptionsWithVideoSize:(CGSize)videoSize + usingOutput:(AVCaptureVideoDataOutput *)output sizeIsSuggestion:(BOOL)suggestion; @end diff --git a/Library/Sources/SCVideoConfiguration.m b/Library/Sources/SCVideoConfiguration.m index a7ebe733..6ad90d19 100644 --- a/Library/Sources/SCVideoConfiguration.m +++ b/Library/Sources/SCVideoConfiguration.m @@ -36,11 +36,20 @@ static CGSize MakeVideoSize(CGSize videoSize, float requestedWidth) { return CGSizeMake(videoSize.width / ratio, videoSize.height / ratio); } -- (NSDictionary *__nonnull)createAssetWriterOptionsWithVideoSize:(CGSize)videoSize { - return [self createAssetWriterOptionsWithVideoSize:videoSize sizeIsSuggestion:YES]; +- (NSDictionary *__nonnull)createAssetWriterOptionsWithVideoSize:(CGSize)videoSize usingOutput:(AVCaptureVideoDataOutput *)output { + return [self createAssetWriterOptionsWithVideoSize:videoSize usingOutput:output sizeIsSuggestion:YES]; +} + +- (NSDictionary *)createAssetWriterOptionsUsingSampleBuffer:(CMSampleBufferRef)sampleBuffer usingOutput:(AVCaptureVideoDataOutput *)output { + CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); + size_t width = CVPixelBufferGetWidth(imageBuffer); + size_t height = CVPixelBufferGetHeight(imageBuffer); + + return [self createAssetWriterOptionsWithVideoSize:CGSizeMake(width, height) usingOutput:output]; } - (NSDictionary *)createAssetWriterOptionsWithVideoSize:(CGSize)videoSize + usingOutput:(AVCaptureVideoDataOutput *)output sizeIsSuggestion:(BOOL)suggestion { NSDictionary *options = self.options; if (options != nil) { @@ -81,47 +90,69 @@ - (NSDictionary *)createAssetWriterOptionsWithVideoSize:(CGSize)videoSize outputSize.height = videoSize.width; } } + /*NSMutableDictionary *compressionSettings = NSMutableDictionary.dictionary; - NSMutableDictionary *compressionSettings = NSMutableDictionary.dictionary; + compressionSettings[AVVideoAverageBitRateKey] = @(bitrate); + + if (self.codec == AVVideoCodecTypeH264) { - if (self.codec == AVVideoCodecH264) { - compressionSettings[AVVideoAverageBitRateKey] = @(bitrate); compressionSettings[AVVideoMaxKeyFrameIntervalDurationKey] = @0.0f; if (self.shouldKeepOnlyKeyFrames) { compressionSettings[AVVideoMaxKeyFrameIntervalKey] = @1; } //only for h264 // compressionSettings[AVVideoAverageNonDroppableFrameRateKey] = @30; - } else if (self.codec == AVVideoCodecHEVC) { + } else if (self.codec == AVVideoCodecTypeHEVC) { // compressionSettings[AVVideoQualityKey] = @1.0; } - if (self.profileLevel) { compressionSettings[AVVideoProfileLevelKey] = self.profileLevel; } //seems to break shit -// compressionSettings[AVVideoAllowWideColorKey] = @(YES); - compressionSettings[AVVideoAllowFrameReorderingKey] = @NO; + compressionSettings[AVVideoAllowWideColorKey] = @(YES); + compressionSettings[AVVideoAllowFrameReorderingKey] = @(NO);*/ // [compressionSettings setObject:AVVideoH264EntropyModeCABAC forKey:AVVideoH264EntropyModeKey]; //got rid of setting the frame rates.. not sure if it helped or not // compressionSettings[AVVideoExpectedSourceFrameRateKey] = @60; - return @{ - AVVideoCodecKey : self.codec, - AVVideoScalingModeKey : self.scalingMode, - AVVideoWidthKey : [NSNumber numberWithInteger:outputSize.width], - AVVideoHeightKey : [NSNumber numberWithInteger:outputSize.height], - AVVideoCompressionPropertiesKey : compressionSettings, - }; - -} - -- (NSDictionary *)createAssetWriterOptionsUsingSampleBuffer:(CMSampleBufferRef)sampleBuffer { - CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); - size_t width = CVPixelBufferGetWidth(imageBuffer); - size_t height = CVPixelBufferGetHeight(imageBuffer); - - return [self createAssetWriterOptionsWithVideoSize:CGSizeMake(width, height)]; + NSMutableDictionary *colorSettings = NSMutableDictionary.dictionary; + //HD + // colorSettings[AVVideoColorPrimariesKey] = AVVideoColorPrimaries_ITU_R_709_2; + // colorSettings[AVVideoTransferFunctionKey] = AVVideoTransferFunction_ITU_R_709_2; + // colorSettings[AVVideoYCbCrMatrixKey] = AVVideoYCbCrMatrix_ITU_R_709_2; + //Wide-color + colorSettings[AVVideoColorPrimariesKey] = AVVideoColorPrimaries_P3_D65; + colorSettings[AVVideoTransferFunctionKey] = AVVideoTransferFunction_ITU_R_709_2; + colorSettings[AVVideoYCbCrMatrixKey] = AVVideoYCbCrMatrix_ITU_R_709_2; + + NSMutableDictionary *recommendedSettings = [[output + recommendedVideoSettingsForVideoCodecType:self.codec + assetWriterOutputFileType:AVFileTypeQuickTimeMovie] mutableCopy]; + + NSMutableDictionary *apertureSettings = NSMutableDictionary.dictionary; + apertureSettings[AVVideoCleanApertureWidthKey] = recommendedSettings[AVVideoWidthKey]; + apertureSettings[AVVideoCleanApertureHeightKey] = recommendedSettings[AVVideoHeightKey]; + apertureSettings[AVVideoCleanApertureHorizontalOffsetKey] = @(0); + apertureSettings[AVVideoCleanApertureVerticalOffsetKey] = @(0); + + recommendedSettings[AVVideoColorPropertiesKey] = colorSettings; + recommendedSettings[AVVideoCleanApertureKey] = apertureSettings; + + recommendedSettings[AVVideoScalingModeKey] = self.scalingMode; + recommendedSettings[AVVideoWidthKey] = [NSNumber numberWithInteger:outputSize.width]; + recommendedSettings[AVVideoHeightKey] = [NSNumber numberWithInteger:outputSize.height]; + + NSLog(@"recommmended %@", recommendedSettings); + return recommendedSettings; +// return @{ +// AVVideoCodecKey : self.codec, +// AVVideoScalingModeKey : self.scalingMode, +// AVVideoWidthKey : [NSNumber numberWithInteger:outputSize.width], +// AVVideoHeightKey : [NSNumber numberWithInteger:outputSize.height], +// AVVideoCompressionPropertiesKey : compressionSettings, +// AVVideoPixelAspectRatioKey : AVVideoPixelAspectRatioHorizontalSpacingKey, +// AVVideoColorPropertiesKey : colorSettings, +// }; } @end From 74e750d23dabec1e41d5bf1deec5567727e16043 Mon Sep 17 00:00:00 2001 From: Gabriel Rozenberg Date: Mon, 12 Nov 2018 12:53:51 -0500 Subject: [PATCH 35/55] nullability fixes --- Library/Sources/SCMediaTypeConfiguration.h | 2 +- Library/Sources/SCVideoConfiguration.h | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Library/Sources/SCMediaTypeConfiguration.h b/Library/Sources/SCMediaTypeConfiguration.h index f5f3f6b9..178b5d21 100644 --- a/Library/Sources/SCMediaTypeConfiguration.h +++ b/Library/Sources/SCMediaTypeConfiguration.h @@ -47,6 +47,6 @@ extern NSString *__nonnull SCPresetLowQuality; */ @property (copy, nonatomic) NSString *__nullable preset; -- (NSDictionary *__nonnull)createAssetWriterOptionsUsingSampleBuffer:(CMSampleBufferRef __nullable)sampleBuffer usingOutput:(AVCaptureVideoDataOutput *)output; +- (NSDictionary *__nonnull)createAssetWriterOptionsUsingSampleBuffer:(CMSampleBufferRef __nullable)sampleBuffer usingOutput:(AVCaptureVideoDataOutput *__nullable)output; @end diff --git a/Library/Sources/SCVideoConfiguration.h b/Library/Sources/SCVideoConfiguration.h index 5019c5e3..3d12545e 100644 --- a/Library/Sources/SCVideoConfiguration.h +++ b/Library/Sources/SCVideoConfiguration.h @@ -181,9 +181,9 @@ typedef enum : NSUInteger { @property (assign, nonatomic) SCWatermarkAnchorLocation watermarkAnchorLocation; -- (NSDictionary *__nonnull)createAssetWriterOptionsWithVideoSize:(CGSize)videoSize usingOutput:(AVCaptureVideoDataOutput *)output; +- (NSDictionary *__nonnull)createAssetWriterOptionsWithVideoSize:(CGSize)videoSize usingOutput:(AVCaptureVideoDataOutput *__nullable)output; - (NSDictionary *__nonnull)createAssetWriterOptionsWithVideoSize:(CGSize)videoSize - usingOutput:(AVCaptureVideoDataOutput *)output + usingOutput:(AVCaptureVideoDataOutput *__nullable)output sizeIsSuggestion:(BOOL)suggestion; @end From 27decfa81f2995fb5ad1ee90ff4fc3ed22bc80a9 Mon Sep 17 00:00:00 2001 From: Gabriel Rozenberg Date: Tue, 13 Nov 2018 13:38:15 -0500 Subject: [PATCH 36/55] recommended settings updates --- .../ObjC/Sources/SCRecorderViewController.m | 1 + Library/Sources/SCVideoConfiguration.h | 6 + Library/Sources/SCVideoConfiguration.m | 130 ++++++++++-------- 3 files changed, 79 insertions(+), 58 deletions(-) diff --git a/Examples/ObjC/Sources/SCRecorderViewController.m b/Examples/ObjC/Sources/SCRecorderViewController.m index 9f3580c8..deb3ae4c 100644 --- a/Examples/ObjC/Sources/SCRecorderViewController.m +++ b/Examples/ObjC/Sources/SCRecorderViewController.m @@ -106,6 +106,7 @@ - (void)viewDidLoad { videoConfiguration.sizeAsSquare = NO; videoConfiguration.codec = AVVideoCodecTypeHEVC; videoConfiguration.enabled = YES; + videoConfiguration.usesRecommendedSettings = YES; _recorder.videoOrientation = AVCaptureVideoOrientationPortrait; _recorder.photoConfiguration.enabled = NO; diff --git a/Library/Sources/SCVideoConfiguration.h b/Library/Sources/SCVideoConfiguration.h index 3d12545e..34c89d3c 100644 --- a/Library/Sources/SCVideoConfiguration.h +++ b/Library/Sources/SCVideoConfiguration.h @@ -180,6 +180,12 @@ typedef enum : NSUInteger { */ @property (assign, nonatomic) SCWatermarkAnchorLocation watermarkAnchorLocation; +/* + * Uses the recommended settings for the current camera device (front/back) + * + * */ +@property (assign, nonatomic) BOOL usesRecommendedSettings; + - (NSDictionary *__nonnull)createAssetWriterOptionsWithVideoSize:(CGSize)videoSize usingOutput:(AVCaptureVideoDataOutput *__nullable)output; - (NSDictionary *__nonnull)createAssetWriterOptionsWithVideoSize:(CGSize)videoSize diff --git a/Library/Sources/SCVideoConfiguration.m b/Library/Sources/SCVideoConfiguration.m index 6ad90d19..896622a9 100644 --- a/Library/Sources/SCVideoConfiguration.m +++ b/Library/Sources/SCVideoConfiguration.m @@ -10,6 +10,8 @@ @implementation SCVideoConfiguration +@synthesize usesRecommendedSettings; + - (id)init { self = [super init]; @@ -21,6 +23,7 @@ - (id)init { _affineTransform = CGAffineTransformIdentity; _timeScale = 1; _keepInputAffineTransform = YES; + usesRecommendedSettings = NO; } return self; @@ -90,69 +93,80 @@ - (NSDictionary *)createAssetWriterOptionsWithVideoSize:(CGSize)videoSize outputSize.height = videoSize.width; } } - /*NSMutableDictionary *compressionSettings = NSMutableDictionary.dictionary; - - compressionSettings[AVVideoAverageBitRateKey] = @(bitrate); - - if (self.codec == AVVideoCodecTypeH264) { - - compressionSettings[AVVideoMaxKeyFrameIntervalDurationKey] = @0.0f; - if (self.shouldKeepOnlyKeyFrames) { - compressionSettings[AVVideoMaxKeyFrameIntervalKey] = @1; - } - //only for h264 - // compressionSettings[AVVideoAverageNonDroppableFrameRateKey] = @30; - } else if (self.codec == AVVideoCodecTypeHEVC) { -// compressionSettings[AVVideoQualityKey] = @1.0; - } - if (self.profileLevel) { - compressionSettings[AVVideoProfileLevelKey] = self.profileLevel; - } - //seems to break shit - compressionSettings[AVVideoAllowWideColorKey] = @(YES); - compressionSettings[AVVideoAllowFrameReorderingKey] = @(NO);*/ -// [compressionSettings setObject:AVVideoH264EntropyModeCABAC forKey:AVVideoH264EntropyModeKey]; -//got rid of setting the frame rates.. not sure if it helped or not -// compressionSettings[AVVideoExpectedSourceFrameRateKey] = @60; - - NSMutableDictionary *colorSettings = NSMutableDictionary.dictionary; - //HD - // colorSettings[AVVideoColorPrimariesKey] = AVVideoColorPrimaries_ITU_R_709_2; - // colorSettings[AVVideoTransferFunctionKey] = AVVideoTransferFunction_ITU_R_709_2; - // colorSettings[AVVideoYCbCrMatrixKey] = AVVideoYCbCrMatrix_ITU_R_709_2; - //Wide-color - colorSettings[AVVideoColorPrimariesKey] = AVVideoColorPrimaries_P3_D65; - colorSettings[AVVideoTransferFunctionKey] = AVVideoTransferFunction_ITU_R_709_2; - colorSettings[AVVideoYCbCrMatrixKey] = AVVideoYCbCrMatrix_ITU_R_709_2; NSMutableDictionary *recommendedSettings = [[output recommendedVideoSettingsForVideoCodecType:self.codec assetWriterOutputFileType:AVFileTypeQuickTimeMovie] mutableCopy]; + if (usesRecommendedSettings) { + NSNumber *recWidth = recommendedSettings[AVVideoWidthKey]; + NSNumber *recHeight = recommendedSettings[AVVideoHeightKey]; + outputSize = CGSizeMake(recWidth.floatValue, recHeight.floatValue); + + NSMutableDictionary *recommendedCompressionSettings = recommendedSettings[AVVideoCompressionPropertiesKey]; + + NSMutableDictionary *apertureSettings = NSMutableDictionary.dictionary; + apertureSettings[AVVideoCleanApertureWidthKey] = [NSNumber numberWithInteger:outputSize.width]; + apertureSettings[AVVideoCleanApertureHeightKey] = [NSNumber numberWithInteger:outputSize.height]; + apertureSettings[AVVideoCleanApertureHorizontalOffsetKey] = @(0); + apertureSettings[AVVideoCleanApertureVerticalOffsetKey] = @(0); + + NSMutableDictionary *colorSettings = NSMutableDictionary.dictionary; + BOOL supportsWideColor = [recommendedCompressionSettings[AVVideoAllowWideColorKey] boolValue]; + + if (!supportsWideColor) { + //HD + colorSettings[AVVideoColorPrimariesKey] = AVVideoColorPrimaries_ITU_R_709_2; + colorSettings[AVVideoTransferFunctionKey] = AVVideoTransferFunction_ITU_R_709_2; + colorSettings[AVVideoYCbCrMatrixKey] = AVVideoYCbCrMatrix_ITU_R_709_2; + } else { + //Wide-color + colorSettings[AVVideoColorPrimariesKey] = AVVideoColorPrimaries_P3_D65; + colorSettings[AVVideoTransferFunctionKey] = AVVideoTransferFunction_ITU_R_709_2; + colorSettings[AVVideoYCbCrMatrixKey] = AVVideoYCbCrMatrix_ITU_R_709_2; + } - NSMutableDictionary *apertureSettings = NSMutableDictionary.dictionary; - apertureSettings[AVVideoCleanApertureWidthKey] = recommendedSettings[AVVideoWidthKey]; - apertureSettings[AVVideoCleanApertureHeightKey] = recommendedSettings[AVVideoHeightKey]; - apertureSettings[AVVideoCleanApertureHorizontalOffsetKey] = @(0); - apertureSettings[AVVideoCleanApertureVerticalOffsetKey] = @(0); - - recommendedSettings[AVVideoColorPropertiesKey] = colorSettings; - recommendedSettings[AVVideoCleanApertureKey] = apertureSettings; - - recommendedSettings[AVVideoScalingModeKey] = self.scalingMode; - recommendedSettings[AVVideoWidthKey] = [NSNumber numberWithInteger:outputSize.width]; - recommendedSettings[AVVideoHeightKey] = [NSNumber numberWithInteger:outputSize.height]; - - NSLog(@"recommmended %@", recommendedSettings); - return recommendedSettings; -// return @{ -// AVVideoCodecKey : self.codec, -// AVVideoScalingModeKey : self.scalingMode, -// AVVideoWidthKey : [NSNumber numberWithInteger:outputSize.width], -// AVVideoHeightKey : [NSNumber numberWithInteger:outputSize.height], -// AVVideoCompressionPropertiesKey : compressionSettings, -// AVVideoPixelAspectRatioKey : AVVideoPixelAspectRatioHorizontalSpacingKey, -// AVVideoColorPropertiesKey : colorSettings, -// }; + recommendedSettings[AVVideoScalingModeKey] = self.scalingMode; + recommendedSettings[AVVideoWidthKey] = [NSNumber numberWithInteger:outputSize.width]; + recommendedSettings[AVVideoHeightKey] = [NSNumber numberWithInteger:outputSize.height]; + recommendedSettings[AVVideoColorPropertiesKey] = colorSettings; + recommendedSettings[AVVideoCleanApertureKey] = apertureSettings; + recommendedSettings[AVVideoCompressionPropertiesKey] = recommendedCompressionSettings; + NSLog(@"recommmended %@", recommendedSettings); + return recommendedSettings; + } else { + NSMutableDictionary *compressionSettings = NSMutableDictionary.dictionary; + + compressionSettings[AVVideoAverageBitRateKey] = @(bitrate); + + if (self.codec == AVVideoCodecTypeH264) { + compressionSettings[AVVideoMaxKeyFrameIntervalDurationKey] = @0.0f; + if (self.shouldKeepOnlyKeyFrames) { + compressionSettings[AVVideoMaxKeyFrameIntervalKey] = @1; + } + //only for h264 + // compressionSettings[AVVideoAverageNonDroppableFrameRateKey] = @30; + } else if (self.codec == AVVideoCodecTypeHEVC) { + // compressionSettings[AVVideoQualityKey] = @1.0; + } + if (self.profileLevel) { + compressionSettings[AVVideoProfileLevelKey] = self.profileLevel; + } + //seems to break shit + compressionSettings[AVVideoAllowWideColorKey] = @(YES); + compressionSettings[AVVideoAllowFrameReorderingKey] = @(NO); + // [compressionSettings setObject:AVVideoH264EntropyModeCABAC forKey:AVVideoH264EntropyModeKey]; + //got rid of setting the frame rates.. not sure if it helped or not + // compressionSettings[AVVideoExpectedSourceFrameRateKey] = @60; + + return @{ + AVVideoCodecKey : self.codec, + AVVideoScalingModeKey : self.scalingMode, + AVVideoWidthKey : [NSNumber numberWithInteger:outputSize.width], + AVVideoHeightKey : [NSNumber numberWithInteger:outputSize.height], + AVVideoCompressionPropertiesKey: compressionSettings, + AVVideoPixelAspectRatioKey : AVVideoPixelAspectRatioHorizontalSpacingKey, + }; + } } @end From bb340d9ff8e36eda34f1fb7164a3afc123e53af1 Mon Sep 17 00:00:00 2001 From: Gabriel Rozenberg Date: Tue, 13 Nov 2018 16:28:36 -0500 Subject: [PATCH 37/55] highest available format --- Library/Sources/SCRecorderTools.h | 4 + Library/Sources/SCRecorderTools.m | 246 ++++++++++++++++++------------ 2 files changed, 151 insertions(+), 99 deletions(-) diff --git a/Library/Sources/SCRecorderTools.h b/Library/Sources/SCRecorderTools.h index 1e4a26e3..832f50be 100644 --- a/Library/Sources/SCRecorderTools.h +++ b/Library/Sources/SCRecorderTools.h @@ -11,6 +11,10 @@ @interface SCRecorderTools : NSObject ++ (CGSize)getHighestAvailableFormatForDevicePosition:(AVCaptureDevicePosition)position + minFPS:(CGFloat)minFPS + maxFPS:(CGFloat)maxFPS; + /** Returns the best session preset that is compatible with all available video devices (front and back camera). It will ensure that buffer output from diff --git a/Library/Sources/SCRecorderTools.m b/Library/Sources/SCRecorderTools.m index 27847f1d..9fb9a70e 100644 --- a/Library/Sources/SCRecorderTools.m +++ b/Library/Sources/SCRecorderTools.m @@ -13,137 +13,185 @@ @implementation SCRecorderTools ++ (CGSize)getHighestAvailableFormatForDevicePosition:(AVCaptureDevicePosition)position + minFPS:(CGFloat)minFPS + maxFPS:(CGFloat)maxFPS { + struct deviceSetting { + CGSize resolution; + CGFloat fpsMax; + }; + + struct deviceSetting bestDeviceSetting; + bestDeviceSetting.resolution = CGSizeZero; + bestDeviceSetting.fpsMax = 0; + + CGFloat minRequiredFPS = minFPS; + CGFloat maxRequiredFPS = maxFPS; + + AVCaptureDeviceDiscoverySession *discoverySession = [AVCaptureDeviceDiscoverySession + discoverySessionWithDeviceTypes:@[ + AVCaptureDeviceTypeBuiltInMicrophone, + AVCaptureDeviceTypeBuiltInWideAngleCamera, + AVCaptureDeviceTypeBuiltInTelephotoCamera, + AVCaptureDeviceTypeBuiltInDualCamera] + mediaType:AVMediaTypeVideo + position:position]; + + for (AVCaptureDevice *device in discoverySession.devices) { + for (AVCaptureDeviceFormat *format in device.formats) { + + CGFloat maxFPS = [SCRecorderTools maxFrameRateForFormat:format minFrameRate:30]; + CMVideoDimensions dimension = CMVideoFormatDescriptionGetDimensions(format.formatDescription); + CGSize dSize = CGSizeMake(dimension.width, dimension.height); + + if (CGSizeEqualToSize(CGSizeZero, bestDeviceSetting.resolution)) { + bestDeviceSetting.resolution = dSize; + bestDeviceSetting.fpsMax = maxFPS; + } else if (dSize.width >= bestDeviceSetting.resolution.width && + dSize.height >= bestDeviceSetting.resolution.height && + maxFPS >= bestDeviceSetting.fpsMax && + maxFPS >= minRequiredFPS && + maxFPS <= maxRequiredFPS) { + bestDeviceSetting.resolution = dSize; + bestDeviceSetting.fpsMax = maxFPS; + } + } + } + return bestDeviceSetting.resolution; +} + + + (BOOL)formatInRange:(AVCaptureDeviceFormat*)format frameRate:(CMTimeScale)frameRate { - CMVideoDimensions dimensions; - dimensions.width = 0; - dimensions.height = 0; + CMVideoDimensions dimensions; + dimensions.width = 0; + dimensions.height = 0; - return [SCRecorderTools formatInRange:format frameRate:frameRate dimensions:dimensions]; + return [SCRecorderTools formatInRange:format frameRate:frameRate dimensions:dimensions]; } + (BOOL)formatInRange:(AVCaptureDeviceFormat*)format frameRate:(CMTimeScale)frameRate dimensions:(CMVideoDimensions)dimensions { - CMVideoDimensions size = CMVideoFormatDescriptionGetDimensions(format.formatDescription); + CMVideoDimensions size = CMVideoFormatDescriptionGetDimensions(format.formatDescription); - if (size.width >= dimensions.width && size.height >= dimensions.height) { - for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) { - if (range.minFrameDuration.timescale >= frameRate && range.maxFrameDuration.timescale <= frameRate) { + if (size.width >= dimensions.width && size.height >= dimensions.height) { + for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) { + if (range.minFrameDuration.timescale >= frameRate && range.maxFrameDuration.timescale <= frameRate) { return YES; } - } - } + } + } - return NO; + return NO; } + (CMTimeScale)maxFrameRateForFormat:(AVCaptureDeviceFormat *)format minFrameRate:(CMTimeScale)minFrameRate { - CMTimeScale lowerTimeScale = 0; - for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) { - CMTimeScale rangeMinDur = range.minFrameDuration.timescale; - if (rangeMinDur >= minFrameRate && (lowerTimeScale == 0 || rangeMinDur < lowerTimeScale)) { - lowerTimeScale = rangeMinDur; - } - } - - return lowerTimeScale; + CMTimeScale lowerTimeScale = 0; + for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) { + CMTimeScale rangeMinDur = range.minFrameDuration.timescale; + if (rangeMinDur >= minFrameRate && (lowerTimeScale == 0 || rangeMinDur < lowerTimeScale)) { + lowerTimeScale = rangeMinDur; + } + } + + return lowerTimeScale; } + (AVCaptureDevice *)videoDeviceForPosition:(AVCaptureDevicePosition)position { - NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; - - for (AVCaptureDevice *device in videoDevices) { - if (device.position == (AVCaptureDevicePosition)position) { - return device; - } - } - return nil; + NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; + + for (AVCaptureDevice *device in videoDevices) { + if (device.position == (AVCaptureDevicePosition)position) { + return device; + } + } + return nil; } + (NSString *)captureSessionPresetForDimension:(CMVideoDimensions)videoDimension { - if (videoDimension.width >= 3840 && videoDimension.height >= 2160) { - return AVCaptureSessionPreset3840x2160; - } - if (videoDimension.width >= 1920 && videoDimension.height >= 1080) { - return AVCaptureSessionPreset1920x1080; - } - if (videoDimension.width >= 1280 && videoDimension.height >= 720) { - return AVCaptureSessionPreset1280x720; - } - if (videoDimension.width >= 960 && videoDimension.height >= 540) { - return AVCaptureSessionPresetiFrame960x540; - } - if (videoDimension.width >= 640 && videoDimension.height >= 480) { - return AVCaptureSessionPreset640x480; - } - if (videoDimension.width >= 352 && videoDimension.height >= 288) { - return AVCaptureSessionPreset352x288; - } - - return AVCaptureSessionPresetLow; + if (videoDimension.width >= 3840 && videoDimension.height >= 2160) { + return AVCaptureSessionPreset3840x2160; + } + if (videoDimension.width >= 1920 && videoDimension.height >= 1080) { + return AVCaptureSessionPreset1920x1080; + } + if (videoDimension.width >= 1280 && videoDimension.height >= 720) { + return AVCaptureSessionPreset1280x720; + } + if (videoDimension.width >= 960 && videoDimension.height >= 540) { + return AVCaptureSessionPresetiFrame960x540; + } + if (videoDimension.width >= 640 && videoDimension.height >= 480) { + return AVCaptureSessionPreset640x480; + } + if (videoDimension.width >= 352 && videoDimension.height >= 288) { + return AVCaptureSessionPreset352x288; + } + + return AVCaptureSessionPresetLow; } + (NSString *)bestCaptureSessionPresetForDevicePosition:(AVCaptureDevicePosition)devicePosition withMaxSize:(CGSize)maxSize { - return [SCRecorderTools bestCaptureSessionPresetForDevice:[SCRecorderTools videoDeviceForPosition:devicePosition] withMaxSize:maxSize]; + return [SCRecorderTools bestCaptureSessionPresetForDevice:[SCRecorderTools videoDeviceForPosition:devicePosition] withMaxSize:maxSize]; } + (NSString *)bestCaptureSessionPresetForDevice:(AVCaptureDevice *)device withMaxSize:(CGSize)maxSize { - CMVideoDimensions highestDeviceDimension; - highestDeviceDimension.width = 0; - highestDeviceDimension.height = 0; + CMVideoDimensions highestDeviceDimension; + highestDeviceDimension.width = 0; + highestDeviceDimension.height = 0; - for (AVCaptureDeviceFormat *format in device.formats) { - CMVideoDimensions dimension = CMVideoFormatDescriptionGetDimensions(format.formatDescription); + for (AVCaptureDeviceFormat *format in device.formats) { + CMVideoDimensions dimension = CMVideoFormatDescriptionGetDimensions(format.formatDescription); - if (dimension.width <= (int)maxSize.width && + if (dimension.width <= (int)maxSize.width && dimension.height <= (int)maxSize.height && dimension.width * dimension.height > highestDeviceDimension.width * highestDeviceDimension.height) { - highestDeviceDimension = dimension; - } - } + highestDeviceDimension = dimension; + } + } - return [SCRecorderTools captureSessionPresetForDimension:highestDeviceDimension]; + return [SCRecorderTools captureSessionPresetForDimension:highestDeviceDimension]; } + (NSString *)bestCaptureSessionPresetCompatibleWithAllDevices { - NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; + NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; - CMVideoDimensions highestCompatibleDimension = {0,0}; - BOOL lowestSet = NO; + CMVideoDimensions highestCompatibleDimension = {0,0}; + BOOL lowestSet = NO; - for (AVCaptureDevice *device in videoDevices) { - CMVideoDimensions highestDeviceDimension; - highestDeviceDimension.width = 0; - highestDeviceDimension.height = 0; + for (AVCaptureDevice *device in videoDevices) { + CMVideoDimensions highestDeviceDimension; + highestDeviceDimension.width = 0; + highestDeviceDimension.height = 0; - for (AVCaptureDeviceFormat *format in device.formats) { - CMVideoDimensions dimension = CMVideoFormatDescriptionGetDimensions(format.formatDescription); + for (AVCaptureDeviceFormat *format in device.formats) { + CMVideoDimensions dimension = CMVideoFormatDescriptionGetDimensions(format.formatDescription); - if (dimension.width * dimension.height > highestDeviceDimension.width * highestDeviceDimension.height) { - highestDeviceDimension = dimension; - } - } + if (dimension.width * dimension.height > highestDeviceDimension.width * highestDeviceDimension.height) { + highestDeviceDimension = dimension; + } + } - if (!lowestSet || (highestCompatibleDimension.width * highestCompatibleDimension.height > highestDeviceDimension.width * highestDeviceDimension.height)) { - lowestSet = YES; - highestCompatibleDimension = highestDeviceDimension; - } + if (!lowestSet || (highestCompatibleDimension.width * highestCompatibleDimension.height > highestDeviceDimension.width * highestDeviceDimension.height)) { + lowestSet = YES; + highestCompatibleDimension = highestDeviceDimension; + } - } + } - return [SCRecorderTools captureSessionPresetForDimension:highestCompatibleDimension]; + return [SCRecorderTools captureSessionPresetForDimension:highestCompatibleDimension]; } + (NSArray *)assetWriterMetadata { - AVMutableMetadataItem *creationDate = [AVMutableMetadataItem new]; - creationDate.keySpace = AVMetadataKeySpaceCommon; - creationDate.key = AVMetadataCommonKeyCreationDate; - creationDate.value = [[NSDate date] toISO8601]; + AVMutableMetadataItem *creationDate = [AVMutableMetadataItem new]; + creationDate.keySpace = AVMetadataKeySpaceCommon; + creationDate.key = AVMetadataCommonKeyCreationDate; + creationDate.value = [[NSDate date] toISO8601]; - AVMutableMetadataItem *software = [AVMutableMetadataItem new]; - software.keySpace = AVMetadataKeySpaceCommon; - software.key = AVMetadataCommonKeySoftware; - software.value = @"SCRecorder"; + AVMutableMetadataItem *software = [AVMutableMetadataItem new]; + software.keySpace = AVMetadataKeySpaceCommon; + software.key = AVMetadataCommonKeySoftware; + software.value = @"SCRecorder"; - return @[software, creationDate]; + return @[software, creationDate]; } @end @@ -151,24 +199,24 @@ + (NSArray *)assetWriterMetadata { @implementation NSDate (SCRecorderTools) + (NSDateFormatter *)_getFormatter { - static NSDateFormatter *dateFormatter = nil; - static dispatch_once_t onceToken; - dispatch_once(&onceToken, ^{ - dateFormatter = [[NSDateFormatter alloc] init]; - NSLocale *enUSPOSIXLocale = [[NSLocale alloc] initWithLocaleIdentifier:@"en_US_POSIX"]; - [dateFormatter setLocale:enUSPOSIXLocale]; - [dateFormatter setDateFormat:@"yyyy-MM-dd'T'HH:mm:ssZZZZZ"]; - }); - - return dateFormatter; + static NSDateFormatter *dateFormatter = nil; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + dateFormatter = [[NSDateFormatter alloc] init]; + NSLocale *enUSPOSIXLocale = [[NSLocale alloc] initWithLocaleIdentifier:@"en_US_POSIX"]; + [dateFormatter setLocale:enUSPOSIXLocale]; + [dateFormatter setDateFormat:@"yyyy-MM-dd'T'HH:mm:ssZZZZZ"]; + }); + + return dateFormatter; } - (NSString*)toISO8601 { - return [[NSDate _getFormatter] stringFromDate:self]; + return [[NSDate _getFormatter] stringFromDate:self]; } + (NSDate *)fromISO8601:(NSString *)iso8601 { - return [[NSDate _getFormatter] dateFromString:iso8601]; + return [[NSDate _getFormatter] dateFromString:iso8601]; } @end From 44ab7ab3efd3121398518b47d55642dc78efb843 Mon Sep 17 00:00:00 2001 From: Gabriel Rozenberg Date: Tue, 13 Nov 2018 16:35:52 -0500 Subject: [PATCH 38/55] also returning FPS now --- Library/Sources/SCRecorderTools.h | 11 ++++++++--- Library/Sources/SCRecorderTools.m | 11 +++-------- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/Library/Sources/SCRecorderTools.h b/Library/Sources/SCRecorderTools.h index 832f50be..f8e66f8d 100644 --- a/Library/Sources/SCRecorderTools.h +++ b/Library/Sources/SCRecorderTools.h @@ -9,11 +9,16 @@ #import #import +struct SCDeviceSetting { + CGSize resolution; + CGFloat fpsMax; +}; + @interface SCRecorderTools : NSObject -+ (CGSize)getHighestAvailableFormatForDevicePosition:(AVCaptureDevicePosition)position - minFPS:(CGFloat)minFPS - maxFPS:(CGFloat)maxFPS; ++ (struct SCDeviceSetting)getHighestAvailableFormatForDevicePosition:(AVCaptureDevicePosition)position + minFPS:(CGFloat)minFPS + maxFPS:(CGFloat)maxFPS; /** Returns the best session preset that is compatible with all available video diff --git a/Library/Sources/SCRecorderTools.m b/Library/Sources/SCRecorderTools.m index 9fb9a70e..b8bc8d2f 100644 --- a/Library/Sources/SCRecorderTools.m +++ b/Library/Sources/SCRecorderTools.m @@ -13,15 +13,10 @@ @implementation SCRecorderTools -+ (CGSize)getHighestAvailableFormatForDevicePosition:(AVCaptureDevicePosition)position ++ (struct SCDeviceSetting)getHighestAvailableFormatForDevicePosition:(AVCaptureDevicePosition)position minFPS:(CGFloat)minFPS maxFPS:(CGFloat)maxFPS { - struct deviceSetting { - CGSize resolution; - CGFloat fpsMax; - }; - - struct deviceSetting bestDeviceSetting; + struct SCDeviceSetting bestDeviceSetting; bestDeviceSetting.resolution = CGSizeZero; bestDeviceSetting.fpsMax = 0; @@ -57,7 +52,7 @@ + (CGSize)getHighestAvailableFormatForDevicePosition:(AVCaptureDevicePosition)po } } } - return bestDeviceSetting.resolution; + return bestDeviceSetting; } From a8ed327b8bcd886af6e58a72ffdde15a1abb4dd2 Mon Sep 17 00:00:00 2001 From: Gabriel Rozenberg Date: Tue, 13 Nov 2018 18:26:51 -0500 Subject: [PATCH 39/55] config updates --- Library/Sources/SCVideoConfiguration.m | 27 +++++++++++++++++++------- 1 file changed, 20 insertions(+), 7 deletions(-) diff --git a/Library/Sources/SCVideoConfiguration.m b/Library/Sources/SCVideoConfiguration.m index 896622a9..6ce9bb09 100644 --- a/Library/Sources/SCVideoConfiguration.m +++ b/Library/Sources/SCVideoConfiguration.m @@ -152,19 +152,32 @@ - (NSDictionary *)createAssetWriterOptionsWithVideoSize:(CGSize)videoSize compressionSettings[AVVideoProfileLevelKey] = self.profileLevel; } //seems to break shit - compressionSettings[AVVideoAllowWideColorKey] = @(YES); +// compressionSettings[AVVideoAllowWideColorKey] = @(NO); compressionSettings[AVVideoAllowFrameReorderingKey] = @(NO); // [compressionSettings setObject:AVVideoH264EntropyModeCABAC forKey:AVVideoH264EntropyModeKey]; //got rid of setting the frame rates.. not sure if it helped or not // compressionSettings[AVVideoExpectedSourceFrameRateKey] = @60; + NSMutableDictionary *colorSettings = NSMutableDictionary.dictionary; + //HD + colorSettings[AVVideoColorPrimariesKey] = AVVideoColorPrimaries_ITU_R_709_2; + colorSettings[AVVideoTransferFunctionKey] = AVVideoTransferFunction_ITU_R_709_2; + colorSettings[AVVideoYCbCrMatrixKey] = AVVideoYCbCrMatrix_ITU_R_709_2; + + NSMutableDictionary *apertureSettings = NSMutableDictionary.dictionary; + apertureSettings[AVVideoCleanApertureWidthKey] = [NSNumber numberWithInteger:outputSize.width]; + apertureSettings[AVVideoCleanApertureHeightKey] = [NSNumber numberWithInteger:outputSize.height]; + apertureSettings[AVVideoCleanApertureHorizontalOffsetKey] = @(0); + apertureSettings[AVVideoCleanApertureVerticalOffsetKey] = @(0); return @{ - AVVideoCodecKey : self.codec, - AVVideoScalingModeKey : self.scalingMode, - AVVideoWidthKey : [NSNumber numberWithInteger:outputSize.width], - AVVideoHeightKey : [NSNumber numberWithInteger:outputSize.height], - AVVideoCompressionPropertiesKey: compressionSettings, - AVVideoPixelAspectRatioKey : AVVideoPixelAspectRatioHorizontalSpacingKey, + AVVideoCodecKey : self.codec, + AVVideoScalingModeKey : self.scalingMode, + AVVideoWidthKey : [NSNumber numberWithInteger:outputSize.width], + AVVideoHeightKey : [NSNumber numberWithInteger:outputSize.height], + AVVideoCompressionPropertiesKey : compressionSettings, + AVVideoColorPropertiesKey : colorSettings, + AVVideoCleanApertureKey : apertureSettings, + }; } } From ca9e798d57cbf02be34c09fad422ce0e80911c95 Mon Sep 17 00:00:00 2001 From: Gabriel Rozenberg Date: Tue, 13 Nov 2018 22:07:02 -0500 Subject: [PATCH 40/55] highest available 16x9 format --- Library/Sources/SCRecorderTools.h | 3 ++- Library/Sources/SCRecorderTools.m | 12 ++++++++++-- 2 files changed, 12 insertions(+), 3 deletions(-) diff --git a/Library/Sources/SCRecorderTools.h b/Library/Sources/SCRecorderTools.h index f8e66f8d..aa79e77d 100644 --- a/Library/Sources/SCRecorderTools.h +++ b/Library/Sources/SCRecorderTools.h @@ -18,7 +18,8 @@ struct SCDeviceSetting { + (struct SCDeviceSetting)getHighestAvailableFormatForDevicePosition:(AVCaptureDevicePosition)position minFPS:(CGFloat)minFPS - maxFPS:(CGFloat)maxFPS; + maxFPS:(CGFloat)maxFPS + is16x9:(BOOL)is16x9; /** Returns the best session preset that is compatible with all available video diff --git a/Library/Sources/SCRecorderTools.m b/Library/Sources/SCRecorderTools.m index b8bc8d2f..0f079dd0 100644 --- a/Library/Sources/SCRecorderTools.m +++ b/Library/Sources/SCRecorderTools.m @@ -14,8 +14,9 @@ @implementation SCRecorderTools + (struct SCDeviceSetting)getHighestAvailableFormatForDevicePosition:(AVCaptureDevicePosition)position - minFPS:(CGFloat)minFPS - maxFPS:(CGFloat)maxFPS { + minFPS:(CGFloat)minFPS + maxFPS:(CGFloat)maxFPS + is16x9:(BOOL)is16x9 { struct SCDeviceSetting bestDeviceSetting; bestDeviceSetting.resolution = CGSizeZero; bestDeviceSetting.fpsMax = 0; @@ -39,6 +40,13 @@ + (struct SCDeviceSetting)getHighestAvailableFormatForDevicePosition:(AVCaptureD CMVideoDimensions dimension = CMVideoFormatDescriptionGetDimensions(format.formatDescription); CGSize dSize = CGSizeMake(dimension.width, dimension.height); + double aspectRatio = (MAX(dSize.width, dSize.height) / MIN(dSize.width, dSize.height)); + double delta = ABS(aspectRatio - (16.0/9.0)); + BOOL closeEnoughTo16x9 = delta < 0.1; // 1.6777 .. 1.8777 tag:gabe - if this is encompassing too much - maybe 0.08? + + if (!closeEnoughTo16x9 && is16x9) + continue; + if (CGSizeEqualToSize(CGSizeZero, bestDeviceSetting.resolution)) { bestDeviceSetting.resolution = dSize; bestDeviceSetting.fpsMax = maxFPS; From 1bccdba9bf27b32b9b063f139e0b76490dad0b0c Mon Sep 17 00:00:00 2001 From: Gabriel Rozenberg Date: Wed, 14 Nov 2018 11:57:00 -0500 Subject: [PATCH 41/55] added a clause that lets us set our own sizes using the recommended settings --- Library/Sources/SCVideoConfiguration.h | 6 ++++++ Library/Sources/SCVideoConfiguration.m | 4 +++- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/Library/Sources/SCVideoConfiguration.h b/Library/Sources/SCVideoConfiguration.h index 34c89d3c..a47fbd33 100644 --- a/Library/Sources/SCVideoConfiguration.h +++ b/Library/Sources/SCVideoConfiguration.h @@ -186,6 +186,12 @@ typedef enum : NSUInteger { * */ @property (assign, nonatomic) BOOL usesRecommendedSettings; +/* + * Only used if recommended setttings == true. Ensures the given size is used + * + * */ +@property (assign, nonatomic) BOOL forceSizeWithRecommendedSettings; + - (NSDictionary *__nonnull)createAssetWriterOptionsWithVideoSize:(CGSize)videoSize usingOutput:(AVCaptureVideoDataOutput *__nullable)output; - (NSDictionary *__nonnull)createAssetWriterOptionsWithVideoSize:(CGSize)videoSize diff --git a/Library/Sources/SCVideoConfiguration.m b/Library/Sources/SCVideoConfiguration.m index 6ce9bb09..834cf8f8 100644 --- a/Library/Sources/SCVideoConfiguration.m +++ b/Library/Sources/SCVideoConfiguration.m @@ -11,6 +11,7 @@ @implementation SCVideoConfiguration @synthesize usesRecommendedSettings; +@synthesize forceSizeWithRecommendedSettings; - (id)init { self = [super init]; @@ -100,7 +101,8 @@ - (NSDictionary *)createAssetWriterOptionsWithVideoSize:(CGSize)videoSize if (usesRecommendedSettings) { NSNumber *recWidth = recommendedSettings[AVVideoWidthKey]; NSNumber *recHeight = recommendedSettings[AVVideoHeightKey]; - outputSize = CGSizeMake(recWidth.floatValue, recHeight.floatValue); + if (!forceSizeWithRecommendedSettings) + outputSize = CGSizeMake(recWidth.floatValue, recHeight.floatValue); NSMutableDictionary *recommendedCompressionSettings = recommendedSettings[AVVideoCompressionPropertiesKey]; From ab2a57b47e3e0af606269cfefacde878f199f083 Mon Sep 17 00:00:00 2001 From: Artem Yakovliev Date: Wed, 26 Dec 2018 15:50:04 +0200 Subject: [PATCH 42/55] video buffers delay handling --- Library/Sources/SCRecordSession.m | 25 +-- Library/Sources/SCRecorder.h | 5 + Library/Sources/SCRecorder.m | 317 ++++++++++++++++++------------ 3 files changed, 206 insertions(+), 141 deletions(-) diff --git a/Library/Sources/SCRecordSession.m b/Library/Sources/SCRecordSession.m index d3817b40..4f0f485a 100644 --- a/Library/Sources/SCRecordSession.m +++ b/Library/Sources/SCRecordSession.m @@ -768,14 +768,15 @@ - (void)appendAudioSampleBuffer:(CMSampleBufferRef)audioSampleBuffer completion: iSelf->_lastTimeAudio = lastTimeAudio; if (!iSelf->_currentSegmentHasVideo) { - iSelf->_currentSegmentDuration = CMTimeSubtract(lastTimeAudio, iSelf->_sessionStartTime); + iSelf->_currentSegmentDuration = CMTimeAdd(CMTimeSubtract(lastTimeAudio, iSelf->_sessionStartTime), [self.recorder videoDelay]); } - // NSLog(@"Appending audio at %fs (buffer: %fs)", CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(adjustedBuffer)), CMTimeGetSeconds(actualBufferTime)); + // NSLog(@"Appending audio at %.3fs", CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(adjustedBuffer))); iSelf->_currentSegmentHasAudio = YES; completion(YES); } else { + NSLog(@"Failed to Append audio at %.3fs", CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(adjustedBuffer))); completion(NO); } @@ -786,8 +787,9 @@ - (void)appendAudioSampleBuffer:(CMSampleBufferRef)audioSampleBuffer completion: - (void)_startSessionIfNeededAtTime:(CMTime)time { if (CMTIME_IS_INVALID(_sessionStartTime)) { - _sessionStartTime = time; - [_assetWriter startSessionAtSourceTime:time]; + CMTime bufferTimestamp = CMTimeAdd(time, [self.recorder videoDelay]); + _sessionStartTime = bufferTimestamp; + [_assetWriter startSessionAtSourceTime:bufferTimestamp]; } } @@ -804,26 +806,15 @@ - (void)appendVideoPixelBuffer:(CVPixelBufferRef)videoPixelBuffer atTime:(CMTime } duration = computedFrameDuration; } - /*{ - CMTime timeVideo = _lastTimeVideo; - CMTime actualBufferDuration = duration; - if (CMTIME_IS_VALID(timeVideo)) { - while (CMTIME_COMPARE_INLINE(CMTimeSubtract(actualBufferTime, timeVideo), >=, CMTimeMultiply(actualBufferDuration, 2))) { - NSLog(@"Missing buffer"); - timeVideo = CMTimeAdd(timeVideo, actualBufferDuration); - } - } - }*/ - + if ([_videoInput isReadyForMoreMediaData]) { if ([_videoPixelBufferAdaptor appendPixelBuffer:videoPixelBuffer withPresentationTime:bufferTimestamp]) { - _currentSegmentDuration = CMTimeSubtract(CMTimeAdd(bufferTimestamp, duration), _sessionStartTime); + _currentSegmentDuration = CMTimeAdd(CMTimeSubtract(CMTimeAdd(bufferTimestamp, duration), _sessionStartTime),[self.recorder videoDelay]); _lastTimeVideo = actualBufferTime; _currentSegmentHasVideo = YES; completion(YES); } else { - NSLog(@"Failed to append buffer"); completion(NO); } } else { diff --git a/Library/Sources/SCRecorder.h b/Library/Sources/SCRecorder.h index c81aa3ca..38108cb7 100644 --- a/Library/Sources/SCRecorder.h +++ b/Library/Sources/SCRecorder.h @@ -23,6 +23,11 @@ @interface SCRecorder : NSObject +/** + Time interval between audio and video sample buffer's presentation time + */ +- (CMTime)videoDelay; + /** Access the configuration for the video. */ diff --git a/Library/Sources/SCRecorder.m b/Library/Sources/SCRecorder.m index 2b38c36e..b707500d 100644 --- a/Library/Sources/SCRecorder.m +++ b/Library/Sources/SCRecorder.m @@ -49,9 +49,13 @@ @interface SCRecorder() { CMBlockBufferRef quietBlockBuffer; CMSampleBufferRef quietSampleBuffer; + SCSampleBufferHolder *everyVideoCapturedBuffer; + SCSampleBufferHolder *everyAudioCapturedBuffer; + SCSampleBufferHolder *firstVideoRecBuffer; } @property (nonatomic, strong) SCContext* scContext; - +@property (readonly, nonatomic) BOOL isRecordingActuallyStarted; +@property (assign, nonatomic) BOOL isPausingNow; @end @implementation SCRecorder @@ -509,31 +513,40 @@ - (void)_progressTimerFired:(NSTimer *)progressTimer { } - (void)prerecord { + if (self.isPausingNow) { + return; + } _didCaptureFirstSessionBuffer = NO; _runningTime = kCMTimeZero; hasDidAcquireAudioBuffer = [_delegate respondsToSelector:@selector(recorder:didAcquireAudioBuffer:length:timestamp:)]; } - (void)record { - _didCaptureFirstAudioBuffer = NO; - __weak typeof(self) wSelf = self; - void (^block)(void) = ^{ - typeof(self) internal = wSelf; - internal->_isRecording = YES; - if (internal->_movieOutput != nil && internal.session != nil) { - internal->_movieOutput.maxRecordedDuration = internal.maxRecordDuration; - [self beginRecordSegmentIfNeeded:internal.session]; - if (internal->_movieOutputProgressTimer == nil) { - internal->_movieOutputProgressTimer = [NSTimer scheduledTimerWithTimeInterval:1.0 / 60.0 target:self selector:@selector(_progressTimerFired:) userInfo:nil repeats:YES]; - } - } - }; - - if ([SCRecorder isSessionQueue]) { - block(); - } else { - dispatch_sync(_sessionQueue, block); - } + + if (self.isPausingNow) { + return; + } + _didCaptureFirstAudioBuffer = NO; + __weak typeof(self) wSelf = self; + void (^block)(void) = ^{ + typeof(self) internal = wSelf; + internal->_isRecording = YES; + internal->_isRecordingActuallyStarted = NO; + NSLog(@"SCRecord _isRecording == YES"); + if (internal->_movieOutput != nil && internal.session != nil) { + internal->_movieOutput.maxRecordedDuration = internal.maxRecordDuration; + [self beginRecordSegmentIfNeeded:internal.session]; + if (internal->_movieOutputProgressTimer == nil) { + internal->_movieOutputProgressTimer = [NSTimer scheduledTimerWithTimeInterval:1.0 / 60.0 target:self selector:@selector(_progressTimerFired:) userInfo:nil repeats:YES]; + } + } + }; + + if ([SCRecorder isSessionQueue]) { + block(); + } else { + dispatch_sync(_sessionQueue, block); + } } - (void)pause { @@ -541,48 +554,59 @@ - (void)pause { } - (void)pause:(void(^)(void))completionHandler { - _isRecording = NO; - - __weak typeof(self) wSelf = self; - void (^block)(void) = ^{ - typeof(self) iSelf = wSelf; - SCRecordSession *recordSession = iSelf->_session; - - if (recordSession != nil) { - if (recordSession.recordSegmentReady) { - NSDictionary *info = [iSelf _createSegmentInfo]; - if (recordSession.isUsingMovieFileOutput) { - [iSelf->_movieOutputProgressTimer invalidate]; - iSelf->_movieOutputProgressTimer = nil; - if ([recordSession endSegmentWithInfo:info completionHandler:nil]) { - iSelf->_pauseCompletionHandler = completionHandler; - } else { - dispatch_handler(completionHandler); - } - } else { - [recordSession endSegmentWithInfo:info completionHandler:^(SCRecordSessionSegment *segment, NSError *error) { - id delegate = iSelf.delegate; - if ([delegate respondsToSelector:@selector(recorder:didCompleteSegment:inSession:error:)]) { - [delegate recorder:self didCompleteSegment:segment inSession:recordSession error:error]; - } - if (completionHandler != nil) { - completionHandler(); - } - }]; - } - } else { - dispatch_handler(completionHandler); - } - } else { - dispatch_handler(completionHandler); - } - }; - - if ([SCRecorder isSessionQueue]) { - block(); - } else { - dispatch_async(_sessionQueue, block); - } + __weak typeof(self) wSelf = self; + if (self.isPausingNow) { + return; + } + self.isPausingNow = YES; + float delayInSec = CMTimeGetSeconds([self videoDelay]); + dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(delayInSec * NSEC_PER_SEC)), _sessionQueue, ^{ + + _isRecording = NO; + [wSelf clearBufferHolder:firstVideoRecBuffer]; + + void (^block)(void) = ^{ + typeof(self) iSelf = wSelf; + SCRecordSession *recordSession = iSelf->_session; + + if (recordSession != nil) { + if (recordSession.recordSegmentReady) { + NSDictionary *info = [iSelf _createSegmentInfo]; + if (recordSession.isUsingMovieFileOutput) { + [iSelf->_movieOutputProgressTimer invalidate]; + iSelf->_movieOutputProgressTimer = nil; + if ([recordSession endSegmentWithInfo:info completionHandler:nil]) { + iSelf->_pauseCompletionHandler = completionHandler; + } else { + dispatch_handler(completionHandler); + } + } else { + [recordSession endSegmentWithInfo:info completionHandler:^(SCRecordSessionSegment *segment, NSError *error) { + id delegate = iSelf.delegate; + if ([delegate respondsToSelector:@selector(recorder:didCompleteSegment:inSession:error:)]) { + [delegate recorder:self didCompleteSegment:segment inSession:recordSession error:error]; + } + if (completionHandler != nil) { + completionHandler(); + } + }]; + } + } else { + dispatch_handler(completionHandler); + } + } else { + dispatch_handler(completionHandler); + } + wSelf.isPausingNow = NO; + }; + + if ([SCRecorder isSessionQueue]) { + block(); + } else { + dispatch_async(_sessionQueue, block); + } + }); + } + (NSError*)createError:(NSString*)errorDescription { @@ -619,6 +643,7 @@ - (void)checkRecordSessionDuration:(SCRecordSession *)recordSession { if (CMTIME_IS_VALID(suggestedMaxRecordDuration)) { if (CMTIME_COMPARE_INLINE(currentRecordDuration, >=, suggestedMaxRecordDuration)) { _isRecording = NO; + [self clearBufferHolder:firstVideoRecBuffer]; dispatch_async(_sessionQueue, ^{ [recordSession endSegmentWithInfo:[self _createSegmentInfo] completionHandler:^(SCRecordSessionSegment *segment, NSError *error) { @@ -730,6 +755,7 @@ - (void)captureOutput:(AVCaptureFileOutput *)captureOutput didStartRecordingToOu - (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error { _isRecording = NO; + [self clearBufferHolder:firstVideoRecBuffer]; __weak typeof(self) wSelf = self; dispatch_async(_sessionQueue, ^{ @@ -938,70 +964,108 @@ - (void)_handleAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer withSession:(SC } } -- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection { - - if (captureOutput == _videoOutput) { - _lastVideoBuffer.sampleBuffer = sampleBuffer; - // NSLog(@"VIDEO BUFFER: %fs (%fs)", CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)), CMTimeGetSeconds(CMSampleBufferGetDuration(sampleBuffer))); - - if (_videoConfiguration.shouldIgnore) { - return; - } - - SCImageView *imageView = _SCImageView; - if (imageView != nil) { - CFRetain(sampleBuffer); - dispatch_async(dispatch_get_main_queue(), ^{ - [imageView setImageBySampleBuffer:sampleBuffer]; - CFRelease(sampleBuffer); - }); - } - } else if (captureOutput == _audioOutput) { - _lastAudioBuffer.sampleBuffer = sampleBuffer; - // NSLog(@"AUDIO BUFFER: %fs (%fs)", CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)), CMTimeGetSeconds(CMSampleBufferGetDuration(sampleBuffer))); +- (void)storeSampleBuffer:(CMSampleBufferRef)sampleBuffer forOutput:(AVCaptureOutput *)captureOutput { + + if (captureOutput == _videoOutput) { + everyVideoCapturedBuffer = [SCSampleBufferHolder new]; + everyVideoCapturedBuffer.sampleBuffer = sampleBuffer; + } + if (captureOutput == _audioOutput) { + everyAudioCapturedBuffer = [SCSampleBufferHolder new]; + everyAudioCapturedBuffer.sampleBuffer = sampleBuffer; + } + if (captureOutput == _videoOutput && _isRecording && !firstVideoRecBuffer) { + firstVideoRecBuffer = [SCSampleBufferHolder new]; + firstVideoRecBuffer.sampleBuffer = sampleBuffer; + } + + if (captureOutput == _videoOutput && _isRecording && !_isRecordingActuallyStarted) { + + CMTime delay = CMTimeSubtract(CMSampleBufferGetPresentationTimeStamp(sampleBuffer),CMSampleBufferGetPresentationTimeStamp(firstVideoRecBuffer.sampleBuffer)); + _isRecordingActuallyStarted = CMTimeGetSeconds(delay) >= CMTimeGetSeconds([self videoDelay]); + } +} - if (_audioConfiguration.shouldIgnore) { - return; - } - if (_audioMuting) { - if (quietSampleBuffer == nil) { - CMItemCount numSamples = CMSampleBufferGetNumSamples(sampleBuffer); - size_t sampleSize = CMSampleBufferGetSampleSize(sampleBuffer, 0); - CMFormatDescriptionRef format = CMSampleBufferGetFormatDescription(sampleBuffer); - size_t dataLength = numSamples * sampleSize; - OSStatus status = noErr; - static SInt16* blockOfZeros = nil; - - if (blockOfZeros == nil) - blockOfZeros = malloc(dataLength); - - memset(blockOfZeros, 0x0000, dataLength); - - status = CMBlockBufferCreateWithMemoryBlock(kCFAllocatorDefault, blockOfZeros, dataLength, kCFAllocatorDefault, nil, 0, dataLength, kCMBlockBufferAssureMemoryNowFlag, &quietBlockBuffer); - if (status) NSLog(@"CMBlockBuffer OSStatus = %i", status); - if (status == noErr) { - status = CMAudioSampleBufferCreateWithPacketDescriptions(kCFAllocatorDefault, quietBlockBuffer, YES, nil, nil, format, numSamples, kCMTimeZero, nil, &quietSampleBuffer); - if (status) NSLog(@"CMSampleBuffer OSStatus = %i", status); - } - } - if (quietSampleBuffer) { - CMSampleBufferSetOutputPresentationTimeStamp(quietSampleBuffer, CMSampleBufferGetPresentationTimeStamp(sampleBuffer)); - sampleBuffer = quietSampleBuffer; - _lastAudioBuffer.sampleBuffer = sampleBuffer; - } - } - } +- (void)clearBufferHolder:(SCSampleBufferHolder*)sampleBufferHolder{ + sampleBufferHolder.sampleBuffer = nil; + sampleBufferHolder = nil; +} - if (!_initializeSessionLazily || _isRecording) { - SCRecordSession *recordSession = _session; - if (recordSession != nil) { - if (captureOutput == _videoOutput && _didCaptureFirstAudioBuffer) { - [self _handleVideoSampleBuffer:sampleBuffer withSession:recordSession connection:connection]; - } else if (captureOutput == _audioOutput) { - [self _handleAudioSampleBuffer:sampleBuffer withSession:recordSession]; - } - } - } +- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection { + + [self storeSampleBuffer:sampleBuffer forOutput:captureOutput]; + + if (captureOutput == _videoOutput) { + _lastVideoBuffer.sampleBuffer = sampleBuffer; + if (_isRecording){ + // NSLog(@"VIDEO BUFFER: %.3fs (%.3fs)", CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)), CMTimeGetSeconds(CMSampleBufferGetDuration(sampleBuffer))); + } + if (_videoConfiguration.shouldIgnore) { + return; + } + + SCImageView *imageView = _SCImageView; + if (imageView != nil) { + CFRetain(sampleBuffer); + dispatch_async(dispatch_get_main_queue(), ^{ + [imageView setImageBySampleBuffer:sampleBuffer]; + CFRelease(sampleBuffer); + }); + } + } else if (captureOutput == _audioOutput) { + _lastAudioBuffer.sampleBuffer = sampleBuffer; + if (_isRecording){ + if (CMTimeGetSeconds(CMSampleBufferGetDuration(sampleBuffer)) > 0) { + // NSLog(@"AUDIO BUFFER: %.3fs (%.3fs)", CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)), CMTimeGetSeconds(CMSampleBufferGetDuration(sampleBuffer))); + } + + } + if (_audioConfiguration.shouldIgnore) { + return; + } + if (_audioMuting) { + if (quietSampleBuffer == nil) { + CMItemCount numSamples = CMSampleBufferGetNumSamples(sampleBuffer); + size_t sampleSize = CMSampleBufferGetSampleSize(sampleBuffer, 0); + CMFormatDescriptionRef format = CMSampleBufferGetFormatDescription(sampleBuffer); + size_t dataLength = numSamples * sampleSize; + OSStatus status = noErr; + static SInt16* blockOfZeros = nil; + + if (blockOfZeros == nil) + blockOfZeros = malloc(dataLength); + + memset(blockOfZeros, 0x0000, dataLength); + + status = CMBlockBufferCreateWithMemoryBlock(kCFAllocatorDefault, blockOfZeros, dataLength, kCFAllocatorDefault, nil, 0, dataLength, kCMBlockBufferAssureMemoryNowFlag, &quietBlockBuffer); + if (status) NSLog(@"CMBlockBuffer OSStatus = %i", status); + if (status == noErr) { + status = CMAudioSampleBufferCreateWithPacketDescriptions(kCFAllocatorDefault, quietBlockBuffer, YES, nil, nil, format, numSamples, kCMTimeZero, nil, &quietSampleBuffer); + if (status) NSLog(@"CMSampleBuffer OSStatus = %i", status); + } + } + if (quietSampleBuffer) { + CMSampleBufferSetOutputPresentationTimeStamp(quietSampleBuffer, CMSampleBufferGetPresentationTimeStamp(sampleBuffer)); + sampleBuffer = quietSampleBuffer; + _lastAudioBuffer.sampleBuffer = sampleBuffer; + } + } + } + + if (!_initializeSessionLazily || _isRecording) { + + SCRecordSession *recordSession = _session; + if (recordSession != nil) { + + if ((!recordSession.videoInitialized || _isRecordingActuallyStarted) && captureOutput == _videoOutput && _didCaptureFirstAudioBuffer) { + [self _handleVideoSampleBuffer:sampleBuffer withSession:recordSession connection:connection]; + } else if (captureOutput == _audioOutput) { + if (!_isPausingNow) { + [self _handleAudioSampleBuffer:sampleBuffer withSession:recordSession]; + } + } + } + } } - (NSDictionary *)_createSegmentInfo { @@ -1222,6 +1286,11 @@ - (void)previewViewFrameChanged { _previewLayer.frame = _previewView.bounds; } +- (CMTime)videoDelay{ + CMTime videoDelay = CMTimeSubtract(CMSampleBufferGetPresentationTimeStamp(everyAudioCapturedBuffer.sampleBuffer),CMSampleBufferGetPresentationTimeStamp(everyVideoCapturedBuffer.sampleBuffer)); + return videoDelay; +} + #pragma mark - FOCUS - (CGPoint)convertToPointOfInterestFromViewCoordinates:(CGPoint)viewCoordinates { From edc0c6810e2617df5a08691b47eb4ad8e9303311 Mon Sep 17 00:00:00 2001 From: Artem Yakovliev Date: Thu, 27 Dec 2018 17:50:58 +0200 Subject: [PATCH 43/55] video recording fix with sync. Screenshots disappear from timeline #39 --- Library/Sources/SCRecorder.h | 5 +++++ Library/Sources/SCRecorder.m | 18 +++++++++++------- 2 files changed, 16 insertions(+), 7 deletions(-) diff --git a/Library/Sources/SCRecorder.h b/Library/Sources/SCRecorder.h index 38108cb7..b5fb8765 100644 --- a/Library/Sources/SCRecorder.h +++ b/Library/Sources/SCRecorder.h @@ -424,6 +424,11 @@ */ - (void)pause:( void(^ __nullable)(void)) completionHandler; +/** + Indicates that recording is in pausing process now + */ +@property (assign, nonatomic, readonly) BOOL isPausingNow; + /** Capture a photo from the camera @param completionHandler called on the main queue with the image taken or an error in case of a problem diff --git a/Library/Sources/SCRecorder.m b/Library/Sources/SCRecorder.m index b707500d..034b5034 100644 --- a/Library/Sources/SCRecorder.m +++ b/Library/Sources/SCRecorder.m @@ -563,7 +563,9 @@ - (void)pause:(void(^)(void))completionHandler { dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(delayInSec * NSEC_PER_SEC)), _sessionQueue, ^{ _isRecording = NO; - [wSelf clearBufferHolder:firstVideoRecBuffer]; + [wSelf clearFirstRecVideoBufferHolder]; + wSelf.isPausingNow = NO; + _isRecordingActuallyStarted = NO; void (^block)(void) = ^{ typeof(self) iSelf = wSelf; @@ -597,7 +599,6 @@ - (void)pause:(void(^)(void))completionHandler { } else { dispatch_handler(completionHandler); } - wSelf.isPausingNow = NO; }; if ([SCRecorder isSessionQueue]) { @@ -643,7 +644,7 @@ - (void)checkRecordSessionDuration:(SCRecordSession *)recordSession { if (CMTIME_IS_VALID(suggestedMaxRecordDuration)) { if (CMTIME_COMPARE_INLINE(currentRecordDuration, >=, suggestedMaxRecordDuration)) { _isRecording = NO; - [self clearBufferHolder:firstVideoRecBuffer]; + [self clearFirstRecVideoBufferHolder]; dispatch_async(_sessionQueue, ^{ [recordSession endSegmentWithInfo:[self _createSegmentInfo] completionHandler:^(SCRecordSessionSegment *segment, NSError *error) { @@ -755,7 +756,7 @@ - (void)captureOutput:(AVCaptureFileOutput *)captureOutput didStartRecordingToOu - (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error { _isRecording = NO; - [self clearBufferHolder:firstVideoRecBuffer]; + [self clearFirstRecVideoBufferHolder]; __weak typeof(self) wSelf = self; dispatch_async(_sessionQueue, ^{ @@ -986,9 +987,9 @@ - (void)storeSampleBuffer:(CMSampleBufferRef)sampleBuffer forOutput:(AVCaptureOu } } -- (void)clearBufferHolder:(SCSampleBufferHolder*)sampleBufferHolder{ - sampleBufferHolder.sampleBuffer = nil; - sampleBufferHolder = nil; +- (void)clearFirstRecVideoBufferHolder{ + firstVideoRecBuffer.sampleBuffer = nil; + firstVideoRecBuffer = nil; } - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection { @@ -1288,6 +1289,9 @@ - (void)previewViewFrameChanged { - (CMTime)videoDelay{ CMTime videoDelay = CMTimeSubtract(CMSampleBufferGetPresentationTimeStamp(everyAudioCapturedBuffer.sampleBuffer),CMSampleBufferGetPresentationTimeStamp(everyVideoCapturedBuffer.sampleBuffer)); + if (CMTimeGetSeconds(videoDelay) < 0) { + videoDelay = kCMTimeZero; + } return videoDelay; } From 7ab3e7f0cd740769a4550158fa24e9b3a4b96bb6 Mon Sep 17 00:00:00 2001 From: "gebe1987@gmail.com" Date: Tue, 26 Mar 2019 16:38:26 -0400 Subject: [PATCH 44/55] ios 11 crash fix --- Library/Sources/SCAudioConfiguration.m | 25 +++++++++++++++++-------- 1 file changed, 17 insertions(+), 8 deletions(-) diff --git a/Library/Sources/SCAudioConfiguration.m b/Library/Sources/SCAudioConfiguration.m index aac8c3bb..87ca223c 100644 --- a/Library/Sources/SCAudioConfiguration.m +++ b/Library/Sources/SCAudioConfiguration.m @@ -62,14 +62,23 @@ - (NSDictionary *)createAssetWriterOptionsUsingSampleBuffer:(CMSampleBufferRef)s if (channels == 0) { channels = kSCAudioConfigurationDefaultNumberOfChannels; } - - return @{ - AVFormatIDKey : @(self.format), - AVEncoderBitRateKey : @(bitrate), - AVNumberOfChannelsKey : @(channels), - AVSampleRateKey : [NSNumber numberWithInt: sampleRate], - AVEncoderAudioQualityKey : @(AVAudioQualityMax) - }; + //crash when using AVEncoderAudioQualityKey in ios 11 + if (@available(iOS 12.0, *)) { + return @{ + AVFormatIDKey : @(self.format), + AVEncoderBitRateKey : @(bitrate), + AVNumberOfChannelsKey : @(channels), + AVSampleRateKey : [NSNumber numberWithInt: sampleRate], + AVEncoderAudioQualityKey : @(AVAudioQualityMax) + }; + } else { + return @{ + AVFormatIDKey : @(self.format), + AVEncoderBitRateKey : @(bitrate), + AVNumberOfChannelsKey : @(channels), + AVSampleRateKey : [NSNumber numberWithInt: sampleRate], + }; + } } @end From 3ee47030f51b5080e4a2b3f6da2ab8eaaa83198a Mon Sep 17 00:00:00 2001 From: Artem Iakovliev Date: Wed, 31 Jul 2019 16:15:20 +0300 Subject: [PATCH 45/55] pause/resume functionality added for AssetExportSession --- Library/Sources/SCAssetExportSession.h | 15 +++++++++++++++ Library/Sources/SCAssetExportSession.m | 14 ++++++++++++++ 2 files changed, 29 insertions(+) diff --git a/Library/Sources/SCAssetExportSession.h b/Library/Sources/SCAssetExportSession.h index 432b82c8..87625e19 100644 --- a/Library/Sources/SCAssetExportSession.h +++ b/Library/Sources/SCAssetExportSession.h @@ -68,6 +68,11 @@ */ @property (readonly, atomic) BOOL cancelled; +/** + Will be set to YES if pauseExport was called + */ +@property (readonly, nonatomic, assign) BOOL paused; + /** The timeRange to read from the inputAsset */ @@ -103,6 +108,16 @@ */ - (void)cancelExport; +/** + Pauses export. Call will set paused property to YES. + */ +- (void)pauseExport; + +/** + Resumes export. Call will set paused property to NO. + */ +- (void)resumeExport; + /** Starts the asynchronous execution of the export session */ diff --git a/Library/Sources/SCAssetExportSession.m b/Library/Sources/SCAssetExportSession.m index a6fd5362..6b41b73a 100644 --- a/Library/Sources/SCAssetExportSession.m +++ b/Library/Sources/SCAssetExportSession.m @@ -41,6 +41,7 @@ @interface SCAssetExportSession() { @property (nonatomic, assign) BOOL needsLeaveAudio; @property (nonatomic, assign) BOOL needsLeaveVideo; @property (nonatomic, assign) CMTime nextAllowedVideoFrame; +@property (nonatomic, assign) BOOL paused; @end @@ -486,6 +487,19 @@ - (void)cancelExport }); } +- (void)pauseExport { + if (self.paused) { + return; + } + self.paused = YES; + dispatch_suspend(_videoQueue); +} + +- (void)resumeExport{ + self.paused = NO; + dispatch_resume(_videoQueue); +} + - (SCFilter *)_generateRenderingFilterForVideoSize:(CGSize)videoSize { SCFilter *watermarkFilter = [self _buildWatermarkFilterForVideoSize:videoSize]; SCFilter *renderingFilter = nil; From 6006095c73c1e4e10ca19d0d0e7d951da622b816 Mon Sep 17 00:00:00 2001 From: Artem Iakovliev Date: Thu, 1 Aug 2019 17:24:46 +0300 Subject: [PATCH 46/55] pause/resume fix --- Library/Sources/SCAssetExportSession.m | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/Library/Sources/SCAssetExportSession.m b/Library/Sources/SCAssetExportSession.m index 6b41b73a..f9f3f7df 100644 --- a/Library/Sources/SCAssetExportSession.m +++ b/Library/Sources/SCAssetExportSession.m @@ -487,16 +487,21 @@ - (void)cancelExport }); } -- (void)pauseExport { +- (void)pauseExport{ if (self.paused) { return; } self.paused = YES; + dispatch_suspend(_audioQueue); dispatch_suspend(_videoQueue); } - (void)resumeExport{ + if (!self.paused) { + return; + } self.paused = NO; + dispatch_resume(_audioQueue); dispatch_resume(_videoQueue); } From fde242d55d4b1672af019d157055186dce1eebec Mon Sep 17 00:00:00 2001 From: Artem Iakovliev Date: Tue, 6 Aug 2019 12:19:06 +0300 Subject: [PATCH 47/55] check whether cmtime is numeric --- Library/Sources/SCRecorder.m | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Library/Sources/SCRecorder.m b/Library/Sources/SCRecorder.m index 034b5034..6e2c36c0 100644 --- a/Library/Sources/SCRecorder.m +++ b/Library/Sources/SCRecorder.m @@ -1289,7 +1289,7 @@ - (void)previewViewFrameChanged { - (CMTime)videoDelay{ CMTime videoDelay = CMTimeSubtract(CMSampleBufferGetPresentationTimeStamp(everyAudioCapturedBuffer.sampleBuffer),CMSampleBufferGetPresentationTimeStamp(everyVideoCapturedBuffer.sampleBuffer)); - if (CMTimeGetSeconds(videoDelay) < 0) { + if (CMTimeGetSeconds(videoDelay) < 0 || !CMTIME_IS_NUMERIC(videoDelay)) { videoDelay = kCMTimeZero; } return videoDelay; From 5e29710de113d0341a69302dac22cfc41cbdc45f Mon Sep 17 00:00:00 2001 From: Artem Iakovliev Date: Tue, 12 Nov 2019 18:59:32 +0200 Subject: [PATCH 48/55] fixed missing video buffers in processing queue --- Library/Sources/SCProcessingQueue.m | 40 +++++++++++++++++------------ 1 file changed, 23 insertions(+), 17 deletions(-) diff --git a/Library/Sources/SCProcessingQueue.m b/Library/Sources/SCProcessingQueue.m index f9de9552..ce4c6ff2 100644 --- a/Library/Sources/SCProcessingQueue.m +++ b/Library/Sources/SCProcessingQueue.m @@ -78,7 +78,7 @@ - (void)_process:(id (^)(void))processingBlock { } if (shouldStopProcessing) { - [self stopProcessing]; + [self stopProcessingWithQueueCleaning:NO]; } } } @@ -90,11 +90,15 @@ - (void)startProcessingWithBlock:(id (^)(void))processingBlock { } - (void)stopProcessing { + [self stopProcessingWithQueueCleaning:YES]; +} + +- (void)stopProcessingWithQueueCleaning:(BOOL)cleanQueue { dispatch_semaphore_wait(_accessQueue, DISPATCH_TIME_FOREVER); _completed = YES; - - [_queue removeAllObjects]; + if (cleanQueue) + [_queue removeAllObjects]; while (dispatch_semaphore_signal(_availableItemsToDequeue) < 0) { @@ -104,26 +108,28 @@ - (void)stopProcessing { } - dispatch_semaphore_signal(_accessQueue); + dispatch_semaphore_signal(_accessQueue); } - (id)dequeue { id obj = nil; + if (_completed && _queue.count <= 0) { + [self stopProcessing]; + return nil; + } - if (!_completed) { - dispatch_semaphore_wait(_availableItemsToDequeue, DISPATCH_TIME_FOREVER); - - dispatch_semaphore_wait(_accessQueue, DISPATCH_TIME_FOREVER); - if (_queue.count > 0) { - obj = _queue.firstObject; - [_queue removeObjectAtIndex:0]; - dispatch_semaphore_signal(_availableItemsToEnqueue); - } else { - dispatch_semaphore_signal(_availableItemsToDequeue); - } - - dispatch_semaphore_signal(_accessQueue); + dispatch_semaphore_wait(_availableItemsToDequeue, DISPATCH_TIME_FOREVER); + + dispatch_semaphore_wait(_accessQueue, DISPATCH_TIME_FOREVER); + if (_queue.count > 0) { + obj = _queue.firstObject; + [_queue removeObjectAtIndex:0]; + dispatch_semaphore_signal(_availableItemsToEnqueue); + } else { + dispatch_semaphore_signal(_availableItemsToDequeue); } + + dispatch_semaphore_signal(_accessQueue); return obj; } From 4fdc2f690e08061e18572d048f7430b926e22175 Mon Sep 17 00:00:00 2001 From: Artem Iakovliev Date: Wed, 4 Mar 2020 18:06:06 +0200 Subject: [PATCH 49/55] merge of segments disabled if only one segment is present --- Library/Sources/SCRecordSession.m | 48 ++++++++++++++++++++++--------- 1 file changed, 34 insertions(+), 14 deletions(-) diff --git a/Library/Sources/SCRecordSession.m b/Library/Sources/SCRecordSession.m index 4f0f485a..a8544a2f 100644 --- a/Library/Sources/SCRecordSession.m +++ b/Library/Sources/SCRecordSession.m @@ -680,21 +680,41 @@ - (AVAssetExportSession *)mergeSegmentsUsingPreset:(NSString *)exportSessionPres return nil; } else { - AVAssetExportSession *exportSession = [AVAssetExportSession exportSessionWithAsset:asset presetName:exportSessionPreset]; - exportSession.outputURL = outputUrl; - exportSession.outputFileType = fileType; - exportSession.shouldOptimizeForNetworkUse = YES; - [exportSession exportAsynchronouslyWithCompletionHandler:^{ - NSError *error = exportSession.error; - - if (completionHandler != nil) { - dispatch_async(dispatch_get_main_queue(), ^{ - completionHandler(outputUrl, error); - }); - } - }]; + + if (self.segments.count == 1) { + + NSError* copyError = nil; + NSURL* srcURL = ((AVURLAsset*)asset).URL; + [[NSFileManager defaultManager] copyItemAtURL:srcURL + toURL:outputUrl + error:©Error]; + NSError* delError = nil; + [[NSFileManager defaultManager] removeItemAtURL:srcURL + error:&delError]; + + dispatch_async(dispatch_get_main_queue(), ^{ + if (completionHandler != nil) + completionHandler(outputUrl, copyError); + }); + return nil; + + } else { + + AVAssetExportSession *exportSession = [AVAssetExportSession exportSessionWithAsset:asset presetName:exportSessionPreset]; + exportSession.outputURL = outputUrl; + exportSession.outputFileType = fileType; + exportSession.shouldOptimizeForNetworkUse = YES; + [exportSession exportAsynchronouslyWithCompletionHandler:^{ + NSError *error = exportSession.error; - return exportSession; + if (completionHandler != nil) { + dispatch_async(dispatch_get_main_queue(), ^{ + completionHandler(outputUrl, error); + }); + } + }]; + return exportSession; + } } } From 0086b44f22a1c3f07925e320f94ef2eab94e799d Mon Sep 17 00:00:00 2001 From: Artem Iakovliev Date: Fri, 18 Sep 2020 18:14:56 +0300 Subject: [PATCH 50/55] memory consumption reduced for creating of last video buffer snapshot --- Library/Sources/SCRecorder.m | 54 ++++++++++++++++++++++++++++++++---- 1 file changed, 49 insertions(+), 5 deletions(-) diff --git a/Library/Sources/SCRecorder.m b/Library/Sources/SCRecorder.m index 6e2c36c0..e398b73a 100644 --- a/Library/Sources/SCRecorder.m +++ b/Library/Sources/SCRecorder.m @@ -423,16 +423,60 @@ - (UIImage *)_imageFromSampleBufferHolder:(SCSampleBufferHolder *)sampleBufferHo CVPixelBufferRef buffer = CMSampleBufferGetImageBuffer(sampleBuffer); CIImage *ciImage = [CIImage imageWithCVPixelBuffer:buffer]; - CGImageRef cgImage = [_context createCGImage:ciImage fromRect:CGRectMake(0, 0, CVPixelBufferGetWidth(buffer), CVPixelBufferGetHeight(buffer))]; - - UIImage *image = [UIImage imageWithCGImage:cgImage]; - - CGImageRelease(cgImage); + // [CIContext createCGImage:...] leads to bigger memory consumption + UIImage *image = [self imageFromCIImage:ciImage + scale:1 + orientation:UIImageOrientationUp]; CFRelease(sampleBuffer); return image; } +static const size_t kComponentsPerPixel = 4; +static const size_t kBitsPerComponent = sizeof(unsigned char) * 8; +static void releasePixels(void *info, const void *data, size_t size) +{ + free((void*)data); +} +- (UIImage *)imageFromCIImage:(CIImage *)img + scale:(CGFloat)scale + orientation:(UIImageOrientation)orientation { + + int width = (int)img.extent.size.width; + int height = (int)img.extent.size.height; + + long memsize = sizeof(unsigned char) * width * height * kComponentsPerPixel; + + unsigned char *rawData = malloc(memsize); + + CIContext *context = [CIContext contextWithOptions:@{kCIContextUseSoftwareRenderer: @NO}]; + + CGColorSpaceRef rgb = CGColorSpaceCreateDeviceRGB(); + + [context render:img toBitmap:rawData rowBytes:width*kComponentsPerPixel bounds:img.extent format:kCIFormatRGBA8 colorSpace:rgb]; + + CGDataProviderRef provider = CGDataProviderCreateWithData(nil, rawData, memsize, releasePixels); + + CGImageRef imageFromContext = CGImageCreate(width, + height, + kBitsPerComponent, + kBitsPerComponent * kComponentsPerPixel, + width*kComponentsPerPixel, + rgb, + kCGBitmapByteOrderDefault | kCGImageAlphaLast, + provider, + NULL, + false, + kCGRenderingIntentDefault); + UIImage *outImage = [UIImage imageWithCGImage:imageFromContext scale:scale orientation:orientation]; + + CGImageRelease(imageFromContext); + CGDataProviderRelease(provider); + CGColorSpaceRelease(rgb); + + return outImage; +} + - (UIImage *)snapshotOfLastVideoBuffer { return [self _imageFromSampleBufferHolder:_lastVideoBuffer]; } From 97a2a7b1444dfbfc5c63b9b719ee260e4cd50a6e Mon Sep 17 00:00:00 2001 From: Artem Iakovliev Date: Mon, 23 Nov 2020 19:21:29 +0200 Subject: [PATCH 51/55] option to render export overlay layer on main thread --- Library/Sources/SCAssetExportSession.m | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/Library/Sources/SCAssetExportSession.m b/Library/Sources/SCAssetExportSession.m index f9f3f7df..3a489c8c 100644 --- a/Library/Sources/SCAssetExportSession.m +++ b/Library/Sources/SCAssetExportSession.m @@ -186,7 +186,7 @@ - (void)CGRenderWithInputPixelBuffer:(CVPixelBufferRef)inputPixelBuffer toOutput if ([overlay respondsToSelector:@selector(updateWithVideoTime:)]) { [overlay updateWithVideoTime:timeSeconds]; } - + [overlay.layer renderInContext:ctx]; // [overlay layoutIfNeeded]; }; @@ -196,8 +196,6 @@ - (void)CGRenderWithInputPixelBuffer:(CVPixelBufferRef)inputPixelBuffer toOutput layoutBlock(); } - [overlay.layer renderInContext:ctx]; - CGContextRelease(ctx); }; } From 983f8020a10e6bd960057e4d1483cc6c6ae8ec34 Mon Sep 17 00:00:00 2001 From: Artem Iakovliev Date: Fri, 22 Jan 2021 17:25:21 +0200 Subject: [PATCH 52/55] added delegate method to get audio sample buffers while not recording --- Library/Sources/SCRecorder.m | 48 ++++++++++++++++++++++++++++ Library/Sources/SCRecorderDelegate.h | 6 ++++ 2 files changed, 54 insertions(+) diff --git a/Library/Sources/SCRecorder.m b/Library/Sources/SCRecorder.m index e398b73a..93350eba 100644 --- a/Library/Sources/SCRecorder.m +++ b/Library/Sources/SCRecorder.m @@ -1009,6 +1009,49 @@ - (void)_handleAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer withSession:(SC } } +- (void)_cacheAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer withSession:(SCRecordSession *)recordSession { + + CMTime sampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); + CMTime sampleDuration = CMSampleBufferGetDuration(sampleBuffer); + + if (_didCaptureFirstSessionBuffer == NO) { +// NSLog(@"**** FIRST SESSION ***"); + _firstSessionTime = sampleTime; + _lastBufferTime = sampleTime; + _didCaptureFirstSessionBuffer = YES; + } + if (_didCaptureFirstAudioBuffer == NO) { +// NSLog(@"**** FIRST BUFFER ***"); + _runningTime = CMTimeSubtract(_lastBufferTime, _firstSessionTime); + _firstBufferTime = sampleTime; + _didCaptureFirstAudioBuffer = YES; + } + _lastBufferTime = CMTimeAdd(sampleTime, sampleDuration); + + id delegate = self.delegate; + CMBlockBufferRef blockBuffer = CMSampleBufferGetDataBuffer(sampleBuffer); + CMItemCount sampleCount = CMSampleBufferGetNumSamples(sampleBuffer); + size_t sampleSize = CMSampleBufferGetSampleSize(sampleBuffer, 0); + size_t dataLength = sampleCount * sampleSize; + SInt16* data = malloc(dataLength); + OSStatus status = CMBlockBufferCopyDataBytes(blockBuffer, 0, dataLength, data); + CMTime bufferTimestamp = CMTimeSubtract(sampleTime, _firstBufferTime); + + if (CMTimeCompare(_runningTime, kCMTimeZero)) + bufferTimestamp = CMTimeAdd(bufferTimestamp, _runningTime); + +// dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH,0), ^{ + if (status == kCMBlockBufferNoErr) { + dispatch_async(dispatch_get_main_queue(), ^{ + [delegate recorder:self didReceiveAudioBuffer:data length:sampleCount timestamp:bufferTimestamp]; + }); + } else { + NSLog(@"OSStatus = %i", status); + } + free(data); +// }); +} + - (void)storeSampleBuffer:(CMSampleBufferRef)sampleBuffer forOutput:(AVCaptureOutput *)captureOutput { if (captureOutput == _videoOutput) { @@ -1065,6 +1108,11 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CM } } + + if (!_isRecording) { + [self _cacheAudioSampleBuffer:sampleBuffer withSession:_session]; + } + if (_audioConfiguration.shouldIgnore) { return; } diff --git a/Library/Sources/SCRecorderDelegate.h b/Library/Sources/SCRecorderDelegate.h index caa82713..0dc4a3bc 100644 --- a/Library/Sources/SCRecorderDelegate.h +++ b/Library/Sources/SCRecorderDelegate.h @@ -130,4 +130,10 @@ typedef NS_ENUM(NSInteger, SCFlashMode) { - (void)recorder:(SCRecorder *__nonnull)recorder didAcquireAudioBuffer:(SInt16* __nonnull)audioBuffer length:(CMItemCount)length timestamp:(CMTime)time; +/** + Gives an opportunity to the delegate to do some low-level audio munging while not recording. + */ +- (void)recorder:(SCRecorder *__nonnull)recorder didReceiveAudioBuffer:(SInt16* __nonnull)audioBuffer + length:(CMItemCount)length timestamp:(CMTime)time; + @end From 3e2bfff89ffb159463b579c8db0ad8cd7fda73d3 Mon Sep 17 00:00:00 2001 From: Artem Iakovliev Date: Fri, 26 Mar 2021 13:42:49 +0200 Subject: [PATCH 53/55] export cancelation fix --- Library/Sources/SCAssetExportSession.m | 15 +++++++++++++++ SCRecorder.podspec | 4 ++-- 2 files changed, 17 insertions(+), 2 deletions(-) diff --git a/Library/Sources/SCAssetExportSession.m b/Library/Sources/SCAssetExportSession.m index 3a489c8c..b60c4140 100644 --- a/Library/Sources/SCAssetExportSession.m +++ b/Library/Sources/SCAssetExportSession.m @@ -43,6 +43,10 @@ @interface SCAssetExportSession() { @property (nonatomic, assign) CMTime nextAllowedVideoFrame; @property (nonatomic, assign) BOOL paused; +@property (nonatomic, weak) SCProcessingQueue* filterRenderingQueue; +@property (nonatomic, weak) SCProcessingQueue* videoProcessingQueue; +@property (nonatomic, weak) SCProcessingQueue* videoReadingQueue; + @end @implementation SCAssetExportSession @@ -224,6 +228,7 @@ - (void)beginReadWriteOnVideo { SCProcessingQueue *videoProcessingQueue = nil; SCProcessingQueue *filterRenderingQueue = nil; SCProcessingQueue *videoReadingQueue = [SCProcessingQueue new]; + self.videoReadingQueue = videoReadingQueue; __weak typeof(self) wSelf = self; @@ -243,6 +248,7 @@ - (void)beginReadWriteOnVideo { if (_videoPixelAdaptor != nil) { filterRenderingQueue = [SCProcessingQueue new]; + self.filterRenderingQueue = filterRenderingQueue; filterRenderingQueue.maxQueueSize = 2; [filterRenderingQueue startProcessingWithBlock:^id{ SCIOPixelBuffers *pixelBuffers = nil; @@ -265,6 +271,7 @@ - (void)beginReadWriteOnVideo { }]; videoProcessingQueue = [SCProcessingQueue new]; + self.videoProcessingQueue = videoProcessingQueue; videoProcessingQueue.maxQueueSize = 2; [videoProcessingQueue startProcessingWithBlock:^id{ SCIOPixelBuffers *videoBuffers = [filterRenderingQueue dequeue]; @@ -467,6 +474,14 @@ - (void)cancelExport _cancelled = YES; __weak typeof(self) wSelf = self; dispatch_sync(_videoQueue, ^{ + + [self.filterRenderingQueue stopProcessing]; + [self.videoProcessingQueue stopProcessing]; + [self.videoReadingQueue stopProcessing]; + + [self markInputComplete:self.audioInput error:nil]; + [self markInputComplete:self.videoInput error:nil]; + typeof(self) iSelf = wSelf; if (iSelf->_needsLeaveVideo) { iSelf->_needsLeaveVideo = NO; diff --git a/SCRecorder.podspec b/SCRecorder.podspec index bcb41d89..dfb04454 100644 --- a/SCRecorder.podspec +++ b/SCRecorder.podspec @@ -1,7 +1,7 @@ Pod::Spec.new do |s| s.name = "SCRecorder" - s.version = "2.7.0" + s.version = "2.7.1" s.summary = "The camera engine that is complete, for real." s.description = <<-DESC @@ -12,7 +12,7 @@ Pod::Spec.new do |s| s.license = 'Apache License, Version 2.0' s.author = { "Simon CORSIN" => "simon@corsin.me" } s.platform = :ios, '8.0' - s.source = { :git => "https://github.com/rFlex/SCRecorder.git", :tag => "v2.7.0" } + s.source = { :git => "https://github.com/rFlex/SCRecorder.git", :tag => "v2.7.1" } s.source_files = 'Library/Sources/*.{h,m}' s.public_header_files = 'Library/Sources/*.h' s.requires_arc = true From b275ed0b26f6ae38e9a175ebee6b1d302b6e53a6 Mon Sep 17 00:00:00 2001 From: Artem Iakovliev Date: Tue, 25 May 2021 14:04:57 +0300 Subject: [PATCH 54/55] export cancelation fix --- Library/Sources/SCAssetExportSession.m | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Library/Sources/SCAssetExportSession.m b/Library/Sources/SCAssetExportSession.m index b60c4140..24f03993 100644 --- a/Library/Sources/SCAssetExportSession.m +++ b/Library/Sources/SCAssetExportSession.m @@ -473,7 +473,7 @@ - (void)cancelExport { _cancelled = YES; __weak typeof(self) wSelf = self; - dispatch_sync(_videoQueue, ^{ + dispatch_async(_videoQueue, ^{ [self.filterRenderingQueue stopProcessing]; [self.videoProcessingQueue stopProcessing]; From f50f9fe0132779d963b27fb791b5a24c038df184 Mon Sep 17 00:00:00 2001 From: Artem Yakovlev Date: Tue, 25 May 2021 14:29:52 +0300 Subject: [PATCH 55/55] podspec update --- SCRecorder.podspec | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/SCRecorder.podspec b/SCRecorder.podspec index dfb04454..4a8e356a 100644 --- a/SCRecorder.podspec +++ b/SCRecorder.podspec @@ -1,7 +1,7 @@ Pod::Spec.new do |s| s.name = "SCRecorder" - s.version = "2.7.1" + s.version = "2.7.2" s.summary = "The camera engine that is complete, for real." s.description = <<-DESC @@ -12,7 +12,7 @@ Pod::Spec.new do |s| s.license = 'Apache License, Version 2.0' s.author = { "Simon CORSIN" => "simon@corsin.me" } s.platform = :ios, '8.0' - s.source = { :git => "https://github.com/rFlex/SCRecorder.git", :tag => "v2.7.1" } + s.source = { :git => "https://github.com/rFlex/SCRecorder.git", :tag => "v2.7.2" } s.source_files = 'Library/Sources/*.{h,m}' s.public_header_files = 'Library/Sources/*.h' s.requires_arc = true