From fb76afa41af7109cd109f78b27f4fd8f3348f73b Mon Sep 17 00:00:00 2001 From: Guo Dong Date: Tue, 8 Dec 2015 18:05:04 +0800 Subject: [PATCH] fix issue #246 --- Library/Sources/SCAssetExportSession.m | 114 ++++++++++++------------- 1 file changed, 53 insertions(+), 61 deletions(-) diff --git a/Library/Sources/SCAssetExportSession.m b/Library/Sources/SCAssetExportSession.m index 4f692055..ef925d6b 100644 --- a/Library/Sources/SCAssetExportSession.m +++ b/Library/Sources/SCAssetExportSession.m @@ -48,7 +48,7 @@ @implementation SCAssetExportSession -(instancetype)init { self = [super init]; - + if (self) { _audioQueue = dispatch_queue_create("me.corsin.SCAssetExportSession.AudioQueue", nil); _videoQueue = dispatch_queue_create("me.corsin.SCAssetExportSession.VideoQueue", nil); @@ -66,11 +66,11 @@ -(instancetype)init { - (instancetype)initWithAsset:(AVAsset *)inputAsset { self = [self init]; - + if (self) { self.inputAsset = inputAsset; } - + return self; } @@ -82,11 +82,11 @@ - (void)dealloc { - (AVAssetWriterInput *)addWriter:(NSString *)mediaType withSettings:(NSDictionary *)outputSettings { AVAssetWriterInput *writer = [AVAssetWriterInput assetWriterInputWithMediaType:mediaType outputSettings:outputSettings]; - + if ([_writer canAddInput:writer]) { [_writer addInput:writer]; } - + return writer; } @@ -97,24 +97,20 @@ - (BOOL)encodePixelBuffer:(CVPixelBufferRef)pixelBuffer presentationTime:(CMTime - (SCIOPixelBuffers *)createIOPixelBuffers:(CMSampleBufferRef)sampleBuffer { CVPixelBufferRef inputPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); CMTime time = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); - - if (_outputBufferDiffersFromInput) { - CVPixelBufferRef outputPixelBuffer = nil; - - CVReturn ret = CVPixelBufferPoolCreatePixelBuffer(nil, _videoPixelAdaptor.pixelBufferPool, &outputPixelBuffer); - - if (ret != kCVReturnSuccess) { - NSLog(@"Unable to allocate pixelBuffer: %d", ret); - return nil; - } - - SCIOPixelBuffers *pixelBuffers = [SCIOPixelBuffers IOPixelBuffersWithInputPixelBuffer:inputPixelBuffer outputPixelBuffer:outputPixelBuffer time:time]; - CVPixelBufferRelease(outputPixelBuffer); - - return pixelBuffers; - } else { - return [SCIOPixelBuffers IOPixelBuffersWithInputPixelBuffer:inputPixelBuffer outputPixelBuffer:inputPixelBuffer time:time]; + + CVPixelBufferRef outputPixelBuffer = nil; + + CVReturn ret = CVPixelBufferPoolCreatePixelBuffer(nil, _videoPixelAdaptor.pixelBufferPool, &outputPixelBuffer); + + if (ret != kCVReturnSuccess) { + NSLog(@"Unable to allocate pixelBuffer: %d", ret); + return nil; } + + SCIOPixelBuffers *pixelBuffers = [SCIOPixelBuffers IOPixelBuffersWithInputPixelBuffer:inputPixelBuffer outputPixelBuffer:outputPixelBuffer time:time]; + CVPixelBufferRelease(outputPixelBuffer); + + return pixelBuffers; } - (SCIOPixelBuffers *)renderIOPixelBuffersWithCI:(SCIOPixelBuffers *)pixelBuffers { @@ -139,15 +135,14 @@ - (SCIOPixelBuffers *)renderIOPixelBuffersWithCI:(SCIOPixelBuffers *)pixelBuffer [_context.CIContext render:result toCVPixelBuffer:pixelBuffers.outputPixelBuffer bounds:result.extent colorSpace:colorSpace]; CGColorSpaceRelease(colorSpace); - - if (pixelBuffers.inputPixelBuffer != pixelBuffers.outputPixelBuffer) { - CVPixelBufferUnlockBaseAddress(pixelBuffers.inputPixelBuffer, 0); - } } outputPixelBuffers = [SCIOPixelBuffers IOPixelBuffersWithInputPixelBuffer:pixelBuffers.outputPixelBuffer outputPixelBuffer:pixelBuffers.outputPixelBuffer time:pixelBuffers.time]; } - + + CVPixelBufferUnlockBaseAddress(pixelBuffers.inputPixelBuffer, 0); + CVPixelBufferUnlockBaseAddress(pixelBuffers.outputPixelBuffer, 0); + return outputPixelBuffers; } @@ -168,7 +163,7 @@ static CGContextRef SCCreateContextFromPixelBuffer(CVPixelBufferRef pixelBuffer) - (void)CGRenderWithInputPixelBuffer:(CVPixelBufferRef)inputPixelBuffer toOutputPixelBuffer:(CVPixelBufferRef)outputPixelBuffer atTimeInterval:(NSTimeInterval)timeSeconds { UIView *overlay = self.videoConfiguration.overlay; - + if (overlay != nil) { if ([overlay respondsToSelector:@selector(updateWithVideoTime:)]) { [overlay updateWithVideoTime:timeSeconds]; @@ -177,9 +172,9 @@ - (void)CGRenderWithInputPixelBuffer:(CVPixelBufferRef)inputPixelBuffer toOutput CGContextRef ctx = SCCreateContextFromPixelBuffer(outputPixelBuffer); overlay.frame = CGRectMake(0, 0, CVPixelBufferGetWidth(outputPixelBuffer), CVPixelBufferGetHeight(outputPixelBuffer)); [overlay layoutIfNeeded]; - + [overlay.layer renderInContext:ctx]; - + CGContextRelease(ctx); }; } @@ -210,57 +205,55 @@ - (void)beginReadWriteOnVideo { SCProcessingQueue *videoReadingQueue = [SCProcessingQueue new]; __weak typeof(self) wSelf = self; - + videoReadingQueue.maxQueueSize = 2; [videoReadingQueue startProcessingWithBlock:^id{ CMSampleBufferRef sampleBuffer = [wSelf.videoOutput copyNextSampleBuffer]; SCSampleBufferHolder *holder = nil; - + if (sampleBuffer != nil) { holder = [SCSampleBufferHolder sampleBufferHolderWithSampleBuffer:sampleBuffer]; CFRelease(sampleBuffer); } - + return holder; }]; - + if (_videoPixelAdaptor != nil) { filterRenderingQueue = [SCProcessingQueue new]; filterRenderingQueue.maxQueueSize = 2; [filterRenderingQueue startProcessingWithBlock:^id{ SCIOPixelBuffers *pixelBuffers = nil; SCSampleBufferHolder *bufferHolder = [videoReadingQueue dequeue]; - + if (bufferHolder != nil) { __strong typeof(self) strongSelf = wSelf; if (strongSelf != nil) { pixelBuffers = [strongSelf createIOPixelBuffers:bufferHolder.sampleBuffer]; CVPixelBufferLockBaseAddress(pixelBuffers.inputPixelBuffer, 0); - if (pixelBuffers.outputPixelBuffer != pixelBuffers.inputPixelBuffer) { - CVPixelBufferLockBaseAddress(pixelBuffers.outputPixelBuffer, 0); - } + CVPixelBufferLockBaseAddress(pixelBuffers.outputPixelBuffer, 0); pixelBuffers = [strongSelf renderIOPixelBuffersWithCI:pixelBuffers]; } } return pixelBuffers; }]; - + videoProcessingQueue = [SCProcessingQueue new]; videoProcessingQueue.maxQueueSize = 2; [videoProcessingQueue startProcessingWithBlock:^id{ SCIOPixelBuffers *videoBuffers = [filterRenderingQueue dequeue]; - + if (videoBuffers != nil) { [wSelf CGRenderWithInputPixelBuffer:videoBuffers.inputPixelBuffer toOutputPixelBuffer:videoBuffers.outputPixelBuffer atTimeInterval:CMTimeGetSeconds(videoBuffers.time)]; } - + return videoBuffers; }]; } - + dispatch_group_enter(_dispatchGroup); _needsLeaveVideo = YES; @@ -302,7 +295,7 @@ - (void)beginReadWriteOnVideo { shouldReadNextBuffer = NO; } } - + if (!shouldReadNextBuffer) { [filterRenderingQueue stopProcessing]; [videoProcessingQueue stopProcessing]; @@ -328,20 +321,20 @@ - (void)beginReadWriteOnAudio { BOOL shouldReadNextBuffer = YES; while (strongSelf.audioInput.isReadyForMoreMediaData && shouldReadNextBuffer && !strongSelf.cancelled) { CMSampleBufferRef audioBuffer = [strongSelf.audioOutput copyNextSampleBuffer]; - + if (audioBuffer != nil) { shouldReadNextBuffer = [strongSelf.audioInput appendSampleBuffer:audioBuffer]; - + CMTime time = CMSampleBufferGetPresentationTimeStamp(audioBuffer); - + CFRelease(audioBuffer); - + [strongSelf _didAppendToInput:strongSelf.audioInput atTime:time]; } else { shouldReadNextBuffer = NO; } } - + if (!shouldReadNextBuffer) { [strongSelf markInputComplete:strongSelf.audioInput error:nil]; if (strongSelf.needsLeaveAudio) { @@ -355,11 +348,11 @@ - (void)beginReadWriteOnAudio { - (void)_setProgress:(float)progress { [self willChangeValueForKey:@"progress"]; - + _progress = progress; - + [self didChangeValueForKey:@"progress"]; - + id delegate = self.delegate; if ([delegate respondsToSelector:@selector(assetExportSessionDidProgress:)]) { [delegate assetExportSessionDidProgress:self]; @@ -438,7 +431,7 @@ - (void)_setupPixelBufferAdaptorIfNeeded:(BOOL)needed { (id)kCVPixelBufferWidthKey : [NSNumber numberWithFloat:_outputBufferSize.width], (id)kCVPixelBufferHeightKey : [NSNumber numberWithFloat:_outputBufferSize.height] }; - + _videoPixelAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:_videoInput sourcePixelBufferAttributes:pixelBufferAttributes]; } } @@ -489,7 +482,6 @@ - (SCFilter *)_generateRenderingFilterForVideoSize:(CGSize)videoSize { return renderingFilter; } - - (SCFilter *)_buildWatermarkFilterForVideoSize:(CGSize)videoSize { UIImage *watermarkImage = self.videoConfiguration.watermarkImage; @@ -640,15 +632,15 @@ - (void)exportAsynchronouslyWithCompletionHandler:(void (^)())completionHandler _cancelled = NO; _nextAllowedVideoFrame = kCMTimeZero; NSError *error = nil; - + [[NSFileManager defaultManager] removeItemAtURL:self.outputUrl error:nil]; - + _writer = [AVAssetWriter assetWriterWithURL:self.outputUrl fileType:self.outputFileType error:&error]; _writer.shouldOptimizeForNetworkUse = _shouldOptimizeForNetworkUse; _writer.metadata = [SCRecorderTools assetWriterMetadata]; EnsureSuccess(error, completionHandler); - + _reader = [AVAssetReader assetReaderWithAsset:self.inputAsset error:&error]; _reader.timeRange = _timeRange; EnsureSuccess(error, completionHandler); @@ -664,23 +656,23 @@ - (void)exportAsynchronouslyWithCompletionHandler:(void (^)())completionHandler if (![_reader startReading]) { EnsureSuccess(_reader.error, completionHandler); } - + if (![_writer startWriting]) { EnsureSuccess(_writer.error, completionHandler); } - + [_writer startSessionAtSourceTime:kCMTimeZero]; - + _totalDuration = CMTimeGetSeconds(_inputAsset.duration); [self beginReadWriteOnAudio]; [self beginReadWriteOnVideo]; - + dispatch_group_notify(_dispatchGroup, dispatch_get_main_queue(), ^{ if (_error == nil) { _error = _writer.error; } - + if (_error == nil && _writer.status != AVAssetWriterStatusCancelled) { [_writer finishWritingWithCompletionHandler:^{ _error = _writer.error;