如何修复CVPixelBuffer内存泄漏

时间:2015-02-17 03:20:21

标签: ios objective-c memory-leaks

使用HJImagesToVideo(来自github的源代码)将uiimages转换为mp4,但我资助它可能有内存泄漏。超过200个图像转换,会有内存警告,然后崩溃。源代码在这里:

 

+ (void)writeImageAsMovie:(NSArray )array toPath:(NSString)path size:(CGSize)size fps:(int)fps animateTransitions:(BOOL)shouldAnimateTransitions withCallbackBlock:(SuccessBlock)callbackBlock { NSLog(@"%@", path); NSError *error = nil; AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:path] fileType:AVFileTypeMPEG4 error:&error]; if (error) { if (callbackBlock) { callbackBlock(NO); } return; } NSParameterAssert(videoWriter); NSDictionary *videoSettings = @{AVVideoCodecKey: AVVideoCodecH264, AVVideoWidthKey: [NSNumber numberWithInt:size.width], AVVideoHeightKey: [NSNumber numberWithInt:size.height]}; AVAssetWriterInput* writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings]; AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:nil]; NSParameterAssert(writerInput); NSParameterAssert([videoWriter canAddInput:writerInput]); [videoWriter addInput:writerInput]; //Start a session: [videoWriter startWriting]; [videoWriter startSessionAtSourceTime:kCMTimeZero]; CVPixelBufferRef buffer; CVPixelBufferPoolCreatePixelBuffer(NULL, adaptor.pixelBufferPool, &buffer); CMTime presentTime = CMTimeMake(0, fps); int i = 0; while (1) { if(writerInput.readyForMoreMediaData) { presentTime = CMTimeMake(i, fps); if (i >= [array count]) { buffer = NULL; } else { buffer = [HJImagesToVideo pixelBufferFromCGImage:[array[i] CGImage] size:CGSizeMake(480, 320)]; } if (buffer) { //append buffer BOOL appendSuccess = [HJImagesToVideo appendToAdapter:adaptor pixelBuffer:buffer atTime:presentTime withInput:writerInput]; NSAssert(appendSuccess, @"Failed to append"); if (shouldAnimateTransitions && i + 1 < array.count) { //Create time each fade frame is displayed CMTime fadeTime = CMTimeMake(1, fps*TransitionFrameCount); //Add a delay, causing the base image to have more show time before fade begins. for (int b = 0; b < FramesToWaitBeforeTransition; b++) { presentTime = CMTimeAdd(presentTime, fadeTime); } //Adjust fadeFrameCount so that the number and curve of the fade frames and their alpha stay consistant NSInteger framesToFadeCount = TransitionFrameCount - FramesToWaitBeforeTransition; //Apply fade frames for (double j = 1; j < framesToFadeCount; j++) { buffer = [HJImagesToVideo crossFadeImage:[array[i] CGImage] toImage:[array[i + 1] CGImage] atSize:CGSizeMake(480, 320) withAlpha:j/framesToFadeCount]; BOOL appendSuccess = [HJImagesToVideo appendToAdapter:adaptor pixelBuffer:buffer atTime:presentTime withInput:writerInput]; presentTime = CMTimeAdd(presentTime, fadeTime); NSAssert(appendSuccess, @"Failed to append"); } } i++; } else { //Finish the session: [writerInput markAsFinished]; [videoWriter finishWritingWithCompletionHandler:^{ NSLog(@"Successfully closed video writer"); if (videoWriter.status == AVAssetWriterStatusCompleted) { if (callbackBlock) { callbackBlock(YES); } } else { if (callbackBlock) { callbackBlock(NO); } } }]; CVPixelBufferPoolRelease(adaptor.pixelBufferPool); //CVPixelBufferRelease(buffer); NSLog (@"Done"); break; } } } } + (CVPixelBufferRef)pixelBufferFromCGImage:(CGImageRef)image size:(CGSize)imageSize { NSDictionary *options = @{(id)kCVPixelBufferCGImageCompatibilityKey: @YES, (id)kCVPixelBufferCGBitmapContextCompatibilityKey: @YES}; CVPixelBufferRef pxbuffer = NULL; CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, imageSize.width, imageSize.height, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options, &pxbuffer); NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL); CVPixelBufferLockBaseAddress(pxbuffer, 0); void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer); NSParameterAssert(pxdata != NULL); CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB(); CGContextRef context = CGBitmapContextCreate(pxdata, imageSize.width, imageSize.height, 8, 4*imageSize.width, rgbColorSpace, kCGImageAlphaNoneSkipFirst); NSParameterAssert(context); CGContextDrawImage(context, CGRectMake(0 + (imageSize.width-CGImageGetWidth(image))/2, (imageSize.height-CGImageGetHeight(image))/2, CGImageGetWidth(image), CGImageGetHeight(image)), image); CGColorSpaceRelease(rgbColorSpace); CGContextRelease(context); CVPixelBufferUnlockBaseAddress(pxbuffer, 0); return pxbuffer; }

0 个答案:

没有答案