Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix issue #246 #247

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
114 changes: 53 additions & 61 deletions Library/Sources/SCAssetExportSession.m
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ @implementation SCAssetExportSession

-(instancetype)init {
self = [super init];

if (self) {
_audioQueue = dispatch_queue_create("me.corsin.SCAssetExportSession.AudioQueue", nil);
_videoQueue = dispatch_queue_create("me.corsin.SCAssetExportSession.VideoQueue", nil);
Expand All @@ -66,11 +66,11 @@ -(instancetype)init {

- (instancetype)initWithAsset:(AVAsset *)inputAsset {
self = [self init];

if (self) {
self.inputAsset = inputAsset;
}

return self;
}

Expand All @@ -82,11 +82,11 @@ - (void)dealloc {

- (AVAssetWriterInput *)addWriter:(NSString *)mediaType withSettings:(NSDictionary *)outputSettings {
AVAssetWriterInput *writer = [AVAssetWriterInput assetWriterInputWithMediaType:mediaType outputSettings:outputSettings];

if ([_writer canAddInput:writer]) {
[_writer addInput:writer];
}

return writer;
}

Expand All @@ -97,24 +97,20 @@ - (BOOL)encodePixelBuffer:(CVPixelBufferRef)pixelBuffer presentationTime:(CMTime
- (SCIOPixelBuffers *)createIOPixelBuffers:(CMSampleBufferRef)sampleBuffer {
CVPixelBufferRef inputPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CMTime time = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);

if (_outputBufferDiffersFromInput) {
CVPixelBufferRef outputPixelBuffer = nil;

CVReturn ret = CVPixelBufferPoolCreatePixelBuffer(nil, _videoPixelAdaptor.pixelBufferPool, &outputPixelBuffer);

if (ret != kCVReturnSuccess) {
NSLog(@"Unable to allocate pixelBuffer: %d", ret);
return nil;
}

SCIOPixelBuffers *pixelBuffers = [SCIOPixelBuffers IOPixelBuffersWithInputPixelBuffer:inputPixelBuffer outputPixelBuffer:outputPixelBuffer time:time];
CVPixelBufferRelease(outputPixelBuffer);

return pixelBuffers;
} else {
return [SCIOPixelBuffers IOPixelBuffersWithInputPixelBuffer:inputPixelBuffer outputPixelBuffer:inputPixelBuffer time:time];

CVPixelBufferRef outputPixelBuffer = nil;

CVReturn ret = CVPixelBufferPoolCreatePixelBuffer(nil, _videoPixelAdaptor.pixelBufferPool, &outputPixelBuffer);

if (ret != kCVReturnSuccess) {
NSLog(@"Unable to allocate pixelBuffer: %d", ret);
return nil;
}

SCIOPixelBuffers *pixelBuffers = [SCIOPixelBuffers IOPixelBuffersWithInputPixelBuffer:inputPixelBuffer outputPixelBuffer:outputPixelBuffer time:time];
CVPixelBufferRelease(outputPixelBuffer);

return pixelBuffers;
}

- (SCIOPixelBuffers *)renderIOPixelBuffersWithCI:(SCIOPixelBuffers *)pixelBuffers {
Expand All @@ -139,15 +135,14 @@ - (SCIOPixelBuffers *)renderIOPixelBuffersWithCI:(SCIOPixelBuffers *)pixelBuffer
[_context.CIContext render:result toCVPixelBuffer:pixelBuffers.outputPixelBuffer bounds:result.extent colorSpace:colorSpace];

CGColorSpaceRelease(colorSpace);

if (pixelBuffers.inputPixelBuffer != pixelBuffers.outputPixelBuffer) {
CVPixelBufferUnlockBaseAddress(pixelBuffers.inputPixelBuffer, 0);
}
}

outputPixelBuffers = [SCIOPixelBuffers IOPixelBuffersWithInputPixelBuffer:pixelBuffers.outputPixelBuffer outputPixelBuffer:pixelBuffers.outputPixelBuffer time:pixelBuffers.time];
}


CVPixelBufferUnlockBaseAddress(pixelBuffers.inputPixelBuffer, 0);
CVPixelBufferUnlockBaseAddress(pixelBuffers.outputPixelBuffer, 0);

return outputPixelBuffers;
}

Expand All @@ -168,7 +163,7 @@ static CGContextRef SCCreateContextFromPixelBuffer(CVPixelBufferRef pixelBuffer)

- (void)CGRenderWithInputPixelBuffer:(CVPixelBufferRef)inputPixelBuffer toOutputPixelBuffer:(CVPixelBufferRef)outputPixelBuffer atTimeInterval:(NSTimeInterval)timeSeconds {
UIView<SCVideoOverlay> *overlay = self.videoConfiguration.overlay;

if (overlay != nil) {
if ([overlay respondsToSelector:@selector(updateWithVideoTime:)]) {
[overlay updateWithVideoTime:timeSeconds];
Expand All @@ -177,9 +172,9 @@ - (void)CGRenderWithInputPixelBuffer:(CVPixelBufferRef)inputPixelBuffer toOutput
CGContextRef ctx = SCCreateContextFromPixelBuffer(outputPixelBuffer);
overlay.frame = CGRectMake(0, 0, CVPixelBufferGetWidth(outputPixelBuffer), CVPixelBufferGetHeight(outputPixelBuffer));
[overlay layoutIfNeeded];

[overlay.layer renderInContext:ctx];

CGContextRelease(ctx);
};
}
Expand Down Expand Up @@ -210,57 +205,55 @@ - (void)beginReadWriteOnVideo {
SCProcessingQueue *videoReadingQueue = [SCProcessingQueue new];

__weak typeof(self) wSelf = self;

videoReadingQueue.maxQueueSize = 2;

[videoReadingQueue startProcessingWithBlock:^id{
CMSampleBufferRef sampleBuffer = [wSelf.videoOutput copyNextSampleBuffer];
SCSampleBufferHolder *holder = nil;

if (sampleBuffer != nil) {
holder = [SCSampleBufferHolder sampleBufferHolderWithSampleBuffer:sampleBuffer];
CFRelease(sampleBuffer);
}

return holder;
}];

if (_videoPixelAdaptor != nil) {
filterRenderingQueue = [SCProcessingQueue new];
filterRenderingQueue.maxQueueSize = 2;
[filterRenderingQueue startProcessingWithBlock:^id{
SCIOPixelBuffers *pixelBuffers = nil;
SCSampleBufferHolder *bufferHolder = [videoReadingQueue dequeue];

if (bufferHolder != nil) {
__strong typeof(self) strongSelf = wSelf;

if (strongSelf != nil) {
pixelBuffers = [strongSelf createIOPixelBuffers:bufferHolder.sampleBuffer];
CVPixelBufferLockBaseAddress(pixelBuffers.inputPixelBuffer, 0);
if (pixelBuffers.outputPixelBuffer != pixelBuffers.inputPixelBuffer) {
CVPixelBufferLockBaseAddress(pixelBuffers.outputPixelBuffer, 0);
}
CVPixelBufferLockBaseAddress(pixelBuffers.outputPixelBuffer, 0);
pixelBuffers = [strongSelf renderIOPixelBuffersWithCI:pixelBuffers];
}
}

return pixelBuffers;
}];

videoProcessingQueue = [SCProcessingQueue new];
videoProcessingQueue.maxQueueSize = 2;
[videoProcessingQueue startProcessingWithBlock:^id{
SCIOPixelBuffers *videoBuffers = [filterRenderingQueue dequeue];

if (videoBuffers != nil) {
[wSelf CGRenderWithInputPixelBuffer:videoBuffers.inputPixelBuffer toOutputPixelBuffer:videoBuffers.outputPixelBuffer atTimeInterval:CMTimeGetSeconds(videoBuffers.time)];
}

return videoBuffers;
}];
}

dispatch_group_enter(_dispatchGroup);
_needsLeaveVideo = YES;

Expand Down Expand Up @@ -302,7 +295,7 @@ - (void)beginReadWriteOnVideo {
shouldReadNextBuffer = NO;
}
}

if (!shouldReadNextBuffer) {
[filterRenderingQueue stopProcessing];
[videoProcessingQueue stopProcessing];
Expand All @@ -328,20 +321,20 @@ - (void)beginReadWriteOnAudio {
BOOL shouldReadNextBuffer = YES;
while (strongSelf.audioInput.isReadyForMoreMediaData && shouldReadNextBuffer && !strongSelf.cancelled) {
CMSampleBufferRef audioBuffer = [strongSelf.audioOutput copyNextSampleBuffer];

if (audioBuffer != nil) {
shouldReadNextBuffer = [strongSelf.audioInput appendSampleBuffer:audioBuffer];

CMTime time = CMSampleBufferGetPresentationTimeStamp(audioBuffer);

CFRelease(audioBuffer);

[strongSelf _didAppendToInput:strongSelf.audioInput atTime:time];
} else {
shouldReadNextBuffer = NO;
}
}

if (!shouldReadNextBuffer) {
[strongSelf markInputComplete:strongSelf.audioInput error:nil];
if (strongSelf.needsLeaveAudio) {
Expand All @@ -355,11 +348,11 @@ - (void)beginReadWriteOnAudio {

- (void)_setProgress:(float)progress {
[self willChangeValueForKey:@"progress"];

_progress = progress;

[self didChangeValueForKey:@"progress"];

id<SCAssetExportSessionDelegate> delegate = self.delegate;
if ([delegate respondsToSelector:@selector(assetExportSessionDidProgress:)]) {
[delegate assetExportSessionDidProgress:self];
Expand Down Expand Up @@ -438,7 +431,7 @@ - (void)_setupPixelBufferAdaptorIfNeeded:(BOOL)needed {
(id)kCVPixelBufferWidthKey : [NSNumber numberWithFloat:_outputBufferSize.width],
(id)kCVPixelBufferHeightKey : [NSNumber numberWithFloat:_outputBufferSize.height]
};

_videoPixelAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:_videoInput sourcePixelBufferAttributes:pixelBufferAttributes];
}
}
Expand Down Expand Up @@ -489,7 +482,6 @@ - (SCFilter *)_generateRenderingFilterForVideoSize:(CGSize)videoSize {
return renderingFilter;
}


- (SCFilter *)_buildWatermarkFilterForVideoSize:(CGSize)videoSize {
UIImage *watermarkImage = self.videoConfiguration.watermarkImage;

Expand Down Expand Up @@ -640,15 +632,15 @@ - (void)exportAsynchronouslyWithCompletionHandler:(void (^)())completionHandler
_cancelled = NO;
_nextAllowedVideoFrame = kCMTimeZero;
NSError *error = nil;

[[NSFileManager defaultManager] removeItemAtURL:self.outputUrl error:nil];

_writer = [AVAssetWriter assetWriterWithURL:self.outputUrl fileType:self.outputFileType error:&error];
_writer.shouldOptimizeForNetworkUse = _shouldOptimizeForNetworkUse;
_writer.metadata = [SCRecorderTools assetWriterMetadata];

EnsureSuccess(error, completionHandler);

_reader = [AVAssetReader assetReaderWithAsset:self.inputAsset error:&error];
_reader.timeRange = _timeRange;
EnsureSuccess(error, completionHandler);
Expand All @@ -664,23 +656,23 @@ - (void)exportAsynchronouslyWithCompletionHandler:(void (^)())completionHandler
if (![_reader startReading]) {
EnsureSuccess(_reader.error, completionHandler);
}

if (![_writer startWriting]) {
EnsureSuccess(_writer.error, completionHandler);
}

[_writer startSessionAtSourceTime:kCMTimeZero];

_totalDuration = CMTimeGetSeconds(_inputAsset.duration);

[self beginReadWriteOnAudio];
[self beginReadWriteOnVideo];

dispatch_group_notify(_dispatchGroup, dispatch_get_main_queue(), ^{
if (_error == nil) {
_error = _writer.error;
}

if (_error == nil && _writer.status != AVAssetWriterStatusCancelled) {
[_writer finishWritingWithCompletionHandler:^{
_error = _writer.error;
Expand Down