可以用视频设置GIF图像吗?

时间:2015-07-06 04:38:36

标签: ios objective-c avfoundation video-processing

我正在尝试将视频与GIF图像合并,为此我使用MainCompositionInst.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];并在视频图层中设置了GIF图像,但不幸的是它没有动画,所以我的问题这是可以做到的吗?请建议我..

提前致谢。

1 个答案:

答案 0 :(得分:2)

Apple对GIF的支持相当有限。

您可以使用此代码从GIF转换为视频: (使用当前代码,gif将被裁剪为480x480。对于某些分辨率,输出图像的颜色会失真,因此请尝试使用您知道可行的固定帧。

用法:

#import "SCGIFConverter.h"

NSURL *tempFileURL = //create a NSURL to a tempfile for output

[SCGIFConverter processGIFData:data toFilePath:tempFileURL completed:^(NSString *outputFilePath, NSError *error)
 {
     //Now you can access your tempFileURL to read the movie
     //outputFilePath can be 'nil' if there was  a problem
 }];

<强> SCGIFConverter.h

FOUNDATION_EXTERN NSString * const kGIF2MP4ConversionErrorDomain;

typedef enum {
    kGIF2MP4ConversionErrorInvalidGIFImage = 0,
    kGIF2MP4ConversionErrorAlreadyProcessing,
    kGIF2MP4ConversionErrorBufferingFailed,
    kGIF2MP4ConversionErrorInvalidResolution,
    kGIF2MP4ConversionErrorTimedOut,
} kGIF2MP4ConversionError;


typedef void (^kGIF2MP4ConversionCompleted) (NSString* outputFilePath, NSError* error);

@interface SCGIFConverter : NSObject

+ (BOOL) processGIFData: (NSData*) data
             toFilePath: (NSURL*) outFilePath
              completed: (kGIF2MP4ConversionCompleted)handler;

@end

<强> SCGIFConverter.m

#import <AVFoundation/AVFoundation.h>
#import <ImageIO/ImageIO.h>
#import <MobileCoreServices/MobileCoreServices.h>

#import "SCGIFConverter.h"

#define FPS 30

NSString * const kGIF2MP4ConversionErrorDomain = @"GIF2MP4ConversionError";

@implementation SCGIFConverter

+ (BOOL) processGIFData: (NSData*) data
             toFilePath: (NSURL*) outFilePath
              completed: (kGIF2MP4ConversionCompleted) completionHandler {

    [[NSFileManager defaultManager] removeItemAtURL:outFilePath error:nil];

    CGImageSourceRef source = CGImageSourceCreateWithData((__bridge CFDataRef)data, NULL);
    CGImageMetadataRef meta = CGImageSourceCopyMetadataAtIndex(source, 0, NULL);
    NSLog(@"%@",meta);
    unsigned char *bytes = (unsigned char*)data.bytes;
    NSError* error = nil;

    if( !CGImageSourceGetStatus(source) == kCGImageStatusComplete ) {
        error = [NSError errorWithDomain: kGIF2MP4ConversionErrorDomain
                                    code: kGIF2MP4ConversionErrorInvalidGIFImage
                                userInfo: nil];
        CFRelease(source);
        completionHandler(outFilePath.absoluteString, error);
        return NO;
    }

    size_t sourceWidth = bytes[6] + (bytes[7]<<8), sourceHeight = bytes[8] + (bytes[9]<<8);
    sourceWidth = 480;
    sourceHeight = 480;
    //size_t sourceFrameCount = CGImageSourceGetCount(source);
    __block size_t currentFrameNumber = 0;
    __block Float64 totalFrameDelay = 0.f;

    AVAssetWriter* videoWriter = [[AVAssetWriter alloc] initWithURL: outFilePath
                                                           fileType: AVFileTypeQuickTimeMovie
                                                              error: &error];
    if( error ) {
        CFRelease(source);
        completionHandler(outFilePath.absoluteString, error);
        return NO;
    }

    if( sourceWidth > 6400 || sourceWidth == 0) {
        CFRelease(source);
        error = [NSError errorWithDomain: kGIF2MP4ConversionErrorDomain
                                    code: kGIF2MP4ConversionErrorInvalidResolution
                                userInfo: nil];
        completionHandler(outFilePath.absoluteString, error);
        return NO;
    }

    if( sourceHeight > 4800 || sourceHeight == 0 ) {
        CFRelease(source);
        error = [NSError errorWithDomain: kGIF2MP4ConversionErrorDomain
                                    code: kGIF2MP4ConversionErrorInvalidResolution
                                userInfo: nil];
        completionHandler(outFilePath.absoluteString, error);
        return NO;
    }

    size_t totalFrameCount = CGImageSourceGetCount(source);

    if( totalFrameCount <= 0 ) {
        CFRelease(source);
        error = [NSError errorWithDomain: kGIF2MP4ConversionErrorDomain
                                    code: kGIF2MP4ConversionErrorInvalidGIFImage
                                userInfo: nil];
        completionHandler(outFilePath.absoluteString, error);
        return NO;
    }

    NSDictionary *videoSettings = @{
                                    AVVideoCodecKey : AVVideoCodecH264,
                                    AVVideoWidthKey : @(sourceWidth),
                                    AVVideoHeightKey : @(sourceHeight)
                                    };

    AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType: AVMediaTypeVideo
                                                                              outputSettings: videoSettings];
    videoWriterInput.expectsMediaDataInRealTime = YES;

    NSAssert([videoWriter canAddInput: videoWriterInput], @"Video writer can not add video writer input");
    [videoWriter addInput: videoWriterInput];

    NSDictionary* attributes = @{
                                 (NSString*)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32ARGB),
                                 (NSString*)kCVPixelBufferWidthKey : @(sourceWidth),
                                 (NSString*)kCVPixelBufferHeightKey : @(sourceHeight),
                                 (NSString*)kCVPixelBufferCGImageCompatibilityKey : @YES,
                                 (NSString*)kCVPixelBufferCGBitmapContextCompatibilityKey : @YES
                                 };

    AVAssetWriterInputPixelBufferAdaptor* adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput: videoWriterInput
                                                                                                                     sourcePixelBufferAttributes: attributes];

    [videoWriter startWriting];
    [videoWriter startSessionAtSourceTime: CMTimeMakeWithSeconds(0, FPS)];

    while(YES) {
        if( videoWriterInput.isReadyForMoreMediaData ) {
#if DEBUG
            //NSLog(@"Drawing frame %lu/%lu", currentFrameNumber, totalFrameCount);
#endif

            NSDictionary* options = @{(NSString*)kCGImageSourceTypeIdentifierHint : (id)kUTTypeGIF};
            CGImageRef imgRef = CGImageSourceCreateImageAtIndex(source, currentFrameNumber, (__bridge CFDictionaryRef)options);
            if( imgRef ) {
                CFDictionaryRef propertiesT = CGImageSourceCopyProperties(source, NULL);

                CFDictionaryRef properties = CGImageSourceCopyPropertiesAtIndex(source, currentFrameNumber, NULL);
                CFDictionaryRef gifProperties = CFDictionaryGetValue(properties, kCGImagePropertyGIFDictionary);

                if( gifProperties ) {

                    CVPixelBufferRef pxBuffer = [self newBufferFrom: imgRef
                                                withPixelBufferPool: adaptor.pixelBufferPool
                                                      andAttributes: adaptor.sourcePixelBufferAttributes];
                    if( pxBuffer ) {
                        NSNumber* delayTime = CFDictionaryGetValue(gifProperties, kCGImagePropertyGIFDelayTime);
                        if (currentFrameNumber!=0) {
                            totalFrameDelay += delayTime.floatValue;
                        }
                        CMTime time = CMTimeMakeWithSeconds(totalFrameDelay, FPS);

                        if( ![adaptor appendPixelBuffer: pxBuffer withPresentationTime: time] ) {
                            NSLog(@"Could not save pixel buffer!: %@", videoWriter.error);
                            CFRelease(properties);
                            CGImageRelease(imgRef);
                            CVBufferRelease(pxBuffer);
                            break;
                        }

                        CVBufferRelease(pxBuffer);
                    }
                }

                if( properties ) CFRelease(properties);
                CGImageRelease(imgRef);

                currentFrameNumber++;
            }
            else {
                //was no image returned -> end of file?
                [videoWriterInput markAsFinished];

                void (^videoSaveFinished)(void) = ^{
                    AVAssetWriter * retainedVideoWriter = videoWriter;
                    completionHandler(outFilePath.absoluteString, nil);
                    retainedVideoWriter = nil;
                };

                if( [videoWriter respondsToSelector: @selector(finishWritingWithCompletionHandler:)]) {
                    [videoWriter finishWritingWithCompletionHandler: videoSaveFinished];
                }
                else {
                    [videoWriter finishWriting];
                    videoSaveFinished();
                }
                break;
            }
        }
        else {
            //NSLog(@"Was not ready...");
            [NSThread sleepForTimeInterval: 0.1];
        }
    };

    CFRelease(source);

    return YES;
};


+ (CVPixelBufferRef) newBufferFrom: (CGImageRef) frame
               withPixelBufferPool: (CVPixelBufferPoolRef) pixelBufferPool
                     andAttributes: (NSDictionary*) attributes {
    NSParameterAssert(frame);

    size_t width = 480;//CGImageGetWidth(frame);
    size_t height = 480;//CGImageGetHeight(frame);

    size_t frameHeight = height;
    size_t frameWidth = CGImageGetWidth(frame)*height/CGImageGetHeight(frame);
    if (frameWidth<width) {
        frameWidth = width;
        frameHeight = CGImageGetHeight(frame)*width/CGImageGetWidth(frame);
    }
    CGFloat relax = 0.12;
    if (frameWidth>width) {
        CGFloat factor = MAX(width/frameWidth,1-relax);
        frameWidth*=factor;
    }
    if (frameHeight>height) {
        CGFloat factor = MAX(height/frameHeight,1-relax);
        frameHeight*=factor;
    }

    size_t bpc = 8;
    CGColorSpaceRef colorSpace =  CGColorSpaceCreateDeviceRGB();

    CVPixelBufferRef pxBuffer = NULL;
    CVReturn status = kCVReturnSuccess;

    if( pixelBufferPool )
        status = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pixelBufferPool, &pxBuffer);
    else {
        status = CVPixelBufferCreate(kCFAllocatorDefault, width, height, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef)attributes, &pxBuffer);
    }

    NSAssert(status == kCVReturnSuccess, @"Could not create a pixel buffer");

    CVPixelBufferLockBaseAddress(pxBuffer, 0);
    void *pxData = CVPixelBufferGetBaseAddress(pxBuffer);

    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pxBuffer);


    CGContextRef context = CGBitmapContextCreate(pxData,
                                                 width,
                                                 height,
                                                 bpc,
                                                 bytesPerRow,
                                                 colorSpace,
                                                 kCGImageAlphaNoneSkipFirst);
    NSAssert(context, @"Could not create a context");

    CGContextDrawImage(context,
                       CGRectMake(-(frameWidth-(CGFloat)width)/2, -(frameHeight-(CGFloat)height)/2, frameWidth, frameHeight), frame);

    CVPixelBufferUnlockBaseAddress(pxBuffer, 0);

    CGContextRelease(context);
    CGColorSpaceRelease(colorSpace);

    return pxBuffer;
}

@end