在每个视频上添加标题

时间:2015-11-24 12:10:55

标签: merge avfoundation avmutablecomposition

我正在尝试使用AVMutableComposition将多个视频合并为一个视频,并且它现在正常工作我希望在每个视频上添加不同的不同标题。 任何帮助都会很明显。 谢谢

这是我到目前为止所尝试的

 for(int i=0;i< [arrSelectedUrls count];i++)

{

    AVAsset *currentAsset = [AVAsset assetWithURL:[arrSelectedUrls objectAtIndex:i]]; // i take the for loop for geting the asset

    /* Current Asset is the asset of the video From the Url Using AVAsset */

   // AVURLAsset *newAudioAsset = [AVURLAsset URLAssetWithURL:[arrSelectedUrls objectAtIndex:i] options:nil];

    BOOL hasAudio = [currentAsset tracksWithMediaType:AVMediaTypeAudio].count > 0;

    AVMutableCompositionTrack *currentTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:i];

   [currentTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, currentAsset.duration) ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:duration error:nil];



 //       [audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, currentAsset.duration) ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:duration error:nil];

    audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];


    if(hasAudio)

    {

       [audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, currentAsset.duration) ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:duration error:nil];

    }



    float sect = CMTimeGetSeconds(currentAsset.duration);

    NSString *strSect = [NSString stringWithFormat:@"%f",sect];

    [arrDuration addObject:@"0.5"];

    [arrDuration addObject:@"0.5"];

    [arrDuration addObject:@"0.5"];

    [arrDuration addObject:@"0.5"];







    AVMutableVideoCompositionLayerInstruction *currentAssetLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:currentTrack];



    AVAssetTrack *currentAssetTrack = [[currentAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];

    UIImageOrientation currentAssetOrientation  = UIImageOrientationUp;

    BOOL  isCurrentAssetPortrait  = NO;

    CGAffineTransform currentTransform = currentAssetTrack.preferredTransform;



    if(currentTransform.a == 0 && currentTransform.b == 1.0 && currentTransform.c == -1.0 && currentTransform.d == 0)  {currentAssetOrientation= UIImageOrientationRight; isCurrentAssetPortrait = YES;}

    if(currentTransform.a == 0 && currentTransform.b == -1.0 && currentTransform.c == 1.0 && currentTransform.d == 0)  {currentAssetOrientation =  UIImageOrientationLeft; isCurrentAssetPortrait = YES;}

    if(currentTransform.a == 1.0 && currentTransform.b == 0 && currentTransform.c == 0 && currentTransform.d == 1.0)   {currentAssetOrientation =  UIImageOrientationUp;}

    if(currentTransform.a == -1.0 && currentTransform.b == 0 && currentTransform.c == 0 && currentTransform.d == -1.0) {currentAssetOrientation = UIImageOrientationDown;}



    CGFloat FirstAssetScaleToFitRatio = 320.0/320.0;

    if(isCurrentAssetPortrait){

        FirstAssetScaleToFitRatio = 320.0/320.0;

        CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);

        [currentAssetLayerInstruction setTransform:CGAffineTransformConcat(currentAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:duration];

    }else{

        CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);

        [currentAssetLayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(currentAssetTrack.preferredTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, 0)) atTime:duration];

    }



    duration=CMTimeAdd(duration, currentAsset.duration);



    [currentAssetLayerInstruction setOpacity:0.0 atTime:duration];

    [arrayInstruction addObject:currentAssetLayerInstruction];



    NSLog(@"%lld", duration.value/duration.timescale);



    CATextLayer *titleLayer = [CATextLayer layer];

    if (i==0) {

         titleLayer.string = @"www.miivdo.com";

    }

    if (i==1) {

         titleLayer.string = @"www.mail.com";

    }



    //titleLayer.backgroundColor = (__bridge CGColorRef)([UIColor redColor]);

    CGSize videoSize = [currentAssetTrack naturalSize];

    titleLayer.fontSize = videoSize.height / 14;

   // titleLayer.foregroundColor = (__bridge CGColorRef)([UIColor redColor]);

     titleLayer.shadowOpacity = 0.5;

    titleLayer.alignmentMode = kCAAlignmentRight;

   titleLayer.bounds = CGRectMake(0, 0, 320, 50); //You may need to adjust this for proper display



    parentLayer = [CALayer layer];

    videoLayer  = [CALayer layer];

    parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);

    videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);

   [parentLayer addSublayer:videoLayer];

    [parentLayer addSublayer:titleLayer];



    }



MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, duration);

    MainInstruction.layerInstructions = arrayInstruction;

    MainCompositionInst = [AVMutableVideoComposition videoComposition];



    MainCompositionInst.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];



    MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];

    MainCompositionInst.frameDuration = CMTimeMake(1,30);


    MainCompositionInst.renderSize = CGSizeMake(320.0, 320.0);

1 个答案:

答案 0 :(得分:0)

你可以尝试这个

- (void)MergeAndSave
{


   for(int i=0;i< [arrSelectedUrls count];i++)
{

    NSURL *url;
    CALayer * parentLayer;
    CALayer * videoLayer;
    AVSynchronizedLayer *animationLayer = [AVSynchronizedLayer layer];
    UIImage *image1 = [UIImage imageNamed:@"Fire1.jpeg"];
    UIImage *image2 = [UIImage imageNamed:@"Fire2.jpeg"];
    UIImage *image3 = [UIImage imageNamed:@"Fire3.jpeg"];
    UIImage *image4 = [UIImage imageNamed:@"Fire4.jpeg"];
    //int numberOfFile = [arrSelectedUrls count]; // Number Of Video You want to merge
    AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];
    NSMutableArray *starImageArray = [NSMutableArray arrayWithObjects:(id)image1.CGImage,(id)image2.CGImage,(id)image3.CGImage,(id)image4.CGImage, nil];
    NSMutableArray *arrDuration = [[NSMutableArray alloc] init];
    NSMutableArray *arrayInstruction = [[NSMutableArray alloc] init];

    AVMutableVideoCompositionInstruction * MainInstruction =
    [AVMutableVideoCompositionInstruction videoCompositionInstruction];
    AVMutableCompositionTrack *audioTrack;

    audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
                                             preferredTrackID:kCMPersistentTrackID_Invalid];


    CMTime duration = kCMTimeZero;

         AVAsset *currentAsset = [AVAsset assetWithURL:[arrSelectedUrls    objectAtIndex:i]]; // i take the for loop for geting the asset
    /* Current Asset is the asset of the video From the Url Using AVAsset */
   // AVURLAsset *newAudioAsset = [AVURLAsset URLAssetWithURL:[arrSelectedUrls objectAtIndex:i] options:nil];
    BOOL hasAudio = [currentAsset tracksWithMediaType:AVMediaTypeAudio].count > 0;
    AVMutableCompositionTrack *currentTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:i];
 //       [currentTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,  currentAsset.duration) ofTrack:[[currentAsset  tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:duration error:nil];
      [currentTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, currentAsset.duration) ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:duration error:nil];
//        
 ////       [audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,   currentAsset.duration) ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:duration error:nil];
 //        audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
  //        
 //        // [audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, mixComposition.duration) ofTrack:[[mixComposition tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:duration error:nil];

    if(hasAudio)
    {
        [audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, currentAsset.duration) ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:duration error:nil];
        audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];

        [audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, mixComposition.duration) ofTrack:[[mixComposition tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:duration error:nil];
    }

    float sect = CMTimeGetSeconds(currentAsset.duration);
    NSString *strSect = [NSString stringWithFormat:@"%f",sect];
    [arrDuration addObject:@"0.5"];
    [arrDuration addObject:@"0.5"];
    [arrDuration addObject:@"0.5"];
    [arrDuration addObject:@"0.5"];



    AVMutableVideoCompositionLayerInstruction *currentAssetLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:currentTrack];

    AVAssetTrack *currentAssetTrack = [[currentAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
    UIImageOrientation currentAssetOrientation  = UIImageOrientationUp;
    BOOL  isCurrentAssetPortrait  = NO;
    CGAffineTransform currentTransform = currentAssetTrack.preferredTransform;

    if(currentTransform.a == 0 && currentTransform.b == 1.0 && currentTransform.c == -1.0 && currentTransform.d == 0)  {currentAssetOrientation= UIImageOrientationRight; isCurrentAssetPortrait = YES;}
    if(currentTransform.a == 0 && currentTransform.b == -1.0 && currentTransform.c == 1.0 && currentTransform.d == 0)  {currentAssetOrientation =  UIImageOrientationLeft; isCurrentAssetPortrait = YES;}
    if(currentTransform.a == 1.0 && currentTransform.b == 0 && currentTransform.c == 0 && currentTransform.d == 1.0)   {currentAssetOrientation =  UIImageOrientationUp;}
    if(currentTransform.a == -1.0 && currentTransform.b == 0 && currentTransform.c == 0 && currentTransform.d == -1.0) {currentAssetOrientation = UIImageOrientationDown;}

    CGFloat FirstAssetScaleToFitRatio = 320.0/320.0;
    if(isCurrentAssetPortrait){
        FirstAssetScaleToFitRatio = 320.0/320.0;
        CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
        [currentAssetLayerInstruction setTransform:CGAffineTransformConcat(currentAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:duration];
    }else{
        CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
        [currentAssetLayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(currentAssetTrack.preferredTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, 0)) atTime:duration];
    }

    duration=CMTimeAdd(duration, currentAsset.duration);

    [currentAssetLayerInstruction setOpacity:0.0 atTime:duration];
    [arrayInstruction addObject:currentAssetLayerInstruction];

    NSLog(@"%lld", duration.value/duration.timescale);

    CATextLayer *titleLayer = [CATextLayer layer];
    titleLayer.string = [NSString stringWithFormat:@"Final%@",[Titlearry objectAtIndex:i]];

    CGSize videoSize = [currentAssetTrack naturalSize];
    titleLayer.fontSize = videoSize.height / 14;

     titleLayer.shadowOpacity = 0.5;
    titleLayer.alignmentMode = kCAAlignmentRight;
   titleLayer.bounds = CGRectMake(0, 0, 320, 500); //You may need to adjust this for proper display

    parentLayer = [CALayer layer];
    videoLayer  = [CALayer layer];
    parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
    videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
   [parentLayer addSublayer:videoLayer];
   [parentLayer addSublayer:titleLayer];




    // new addition From Ritesh //




    double time1 = 0.01;
    // in ms, (0.2*1000)/1000 == 200/1000 == 0.2
   // CMTime time2 = CMTimeMake(time1*1000, 1000);
   NSMutableArray * keyTimesArray = [[NSMutableArray alloc]init];
    for (int z = 1; z<4; z++)
    {
        NSNumber *temp = [NSNumber numberWithFloat:(time1+(float)z/30)];
        [keyTimesArray addObject:temp];
    }





    animationLayer.opacity = 1.0;
   // animationLayer.backgroundColor = [UIColor yellowColor].CGColor;
    [animationLayer setFrame:CGRectMake(0, 0, 320, 50)];
    [parentLayer addSublayer:animationLayer];


    CAKeyframeAnimation *changeImageAnimation = [CAKeyframeAnimation animationWithKeyPath:@"contents"];
    [changeImageAnimation setDelegate:self];
    changeImageAnimation.calculationMode = kCAAnimationDiscrete;



    [animationLayer setContents:[starImageArray lastObject]];

    changeImageAnimation.duration = 10.0f;
    changeImageAnimation.repeatCount = 30;
    changeImageAnimation.values = [NSArray arrayWithArray:starImageArray];
    //changeImageAnimation.removedOnCompletion = YES;
    // [changeImageAnimation setKeyTimes:arrDuration];
    [changeImageAnimation setBeginTime:1.0];
    [changeImageAnimation setRemovedOnCompletion:NO];
    [changeImageAnimation setDelegate:self];
    [animationLayer addAnimation:changeImageAnimation forKey:@"contents"];



    MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, duration);
    MainInstruction.layerInstructions = arrayInstruction;
   AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];

    MainCompositionInst.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];

    MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
    MainCompositionInst.frameDuration = CMTimeMake(1,30);
    MainCompositionInst.renderSize = CGSizeMake(320.0, 320.0);

    // NSString *myPathDocs =  [[[AppDelegate sharedAppDelegate] applicationCacheDirectory] stringByAppendingPathComponent:[NSString stringWithFormat:@"mergeVideo%-dtemp.mp4",arc4random() % 10000]];
    NSArray *dirPaths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
    NSString *docsDir = [dirPaths objectAtIndex:0];
    NSString *outputFilePath = [docsDir stringByAppendingPathComponent:[NSString stringWithFormat:@"merge%@.mov",[Titlearry objectAtIndex:i]]];
    if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
        [[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
     url = [NSURL fileURLWithPath:outputFilePath];
    [UrlArray addObject:outputFilePath];

    AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
    exporter.outputURL=url;
    exporter.outputFileType = AVFileTypeQuickTimeMovie;
    exporter.videoComposition = MainCompositionInst;
    exporter.shouldOptimizeForNetworkUse = YES;
    [exporter exportAsynchronouslyWithCompletionHandler:^
     {
         switch (exporter.status)
         {
             case AVAssetExportSessionStatusCompleted:
             {




                if(i == [arrSelectedUrls count]-1)
                {
                    [self mergeAllVideoClipscompletionCallback];
                }

             }
                 break;
             case AVAssetExportSessionStatusFailed:
                 NSLog(@"Failed:%@", exporter.error.description);
                 break;
             case AVAssetExportSessionStatusCancelled:
                 NSLog(@"Canceled:%@", exporter.error);
                 break;
             case AVAssetExportSessionStatusExporting:
                 NSLog(@"Exporting!");
                 break;
             case AVAssetExportSessionStatusWaiting:
                 NSLog(@"Waiting");
                 break;
             default:
                 break;
         }
     }];
}



     // [self performSelector:@selector(MergeAndExport) withObject:nil afterDelay:3.0];

  //    [self MergeAndExport];






    }


-(void)MergeAndExport
{

CALayer * parentLayer;
CALayer * videoLayer;

//int numberOfFile = [arrSelectedUrls count]; // Number Of Video You want to merge
AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];

NSMutableArray *arrayInstruction = [[NSMutableArray alloc] init];

AVMutableVideoCompositionInstruction * MainInstruction =
[AVMutableVideoCompositionInstruction videoCompositionInstruction];
AVMutableCompositionTrack *audioTrack;

audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
                                         preferredTrackID:kCMPersistentTrackID_Invalid];


CMTime duration = kCMTimeZero;
for(int i=0;i< [arrSelectedUrls count];i++)
{
  //        AVAsset *currentAsset = [AVAsset assetWithURL:[arrSelectedUrls objectAtIndex:i]];

    NSURL *url=[NSURL fileURLWithPath:[UrlArray objectAtIndex:i]];

    AVURLAsset *currentAsset=[AVURLAsset URLAssetWithURL:url options:nil];

    // i take the for loop for geting the asset
    /* Current Asset is the asset of the video From the Url Using AVAsset */
        // AVURLAsset *newAudioAsset = [AVURLAsset URLAssetWithURL:[arrSelectedUrls objectAtIndex:i] options:nil];
    BOOL hasAudio = [currentAsset tracksWithMediaType:AVMediaTypeAudio].count > 0;
    AVMutableCompositionTrack *currentTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
    [currentTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, currentAsset.duration) ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:duration error:nil];

            [currentTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, currentAsset.duration) ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:duration error:nil];
   // audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];

    // [audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, mixComposition.duration) ofTrack:[[mixComposition tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:duration error:nil];

    if(hasAudio)
    {
        [audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, currentAsset.duration) ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:duration error:nil];
        audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];

        [audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, mixComposition.duration) ofTrack:[[mixComposition tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:duration error:nil];
    }



    AVMutableVideoCompositionLayerInstruction *currentAssetLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:currentTrack];
    AVAssetTrack *currentAssetTrack = [[currentAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
    UIImageOrientation currentAssetOrientation  = UIImageOrientationUp;
    BOOL  isCurrentAssetPortrait  = NO;
    CGAffineTransform currentTransform = currentAssetTrack.preferredTransform;

    if(currentTransform.a == 0 && currentTransform.b == 1.0 && currentTransform.c == -1.0 && currentTransform.d == 0)  {currentAssetOrientation= UIImageOrientationRight; isCurrentAssetPortrait = YES;}
    if(currentTransform.a == 0 && currentTransform.b == -1.0 && currentTransform.c == 1.0 && currentTransform.d == 0)  {currentAssetOrientation =  UIImageOrientationLeft; isCurrentAssetPortrait = YES;}
    if(currentTransform.a == 1.0 && currentTransform.b == 0 && currentTransform.c == 0 && currentTransform.d == 1.0)   {currentAssetOrientation =  UIImageOrientationUp;}
    if(currentTransform.a == -1.0 && currentTransform.b == 0 && currentTransform.c == 0 && currentTransform.d == -1.0) {currentAssetOrientation = UIImageOrientationDown;}

    CGFloat FirstAssetScaleToFitRatio = 320.0/320.0;
    if(isCurrentAssetPortrait){
        FirstAssetScaleToFitRatio = 320.0/320.0;
        CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
        [currentAssetLayerInstruction setTransform:CGAffineTransformConcat(currentAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:duration];
    }else{
        CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
        [currentAssetLayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(currentAssetTrack.preferredTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, 0)) atTime:duration];
    }

    duration=CMTimeAdd(duration, currentAsset.duration);

    [currentAssetLayerInstruction setOpacity:0.0 atTime:duration];
    [arrayInstruction addObject:currentAssetLayerInstruction];

    NSLog(@"%lld", duration.value/duration.timescale);

    CATextLayer *titleLayer = [CATextLayer layer];
    titleLayer.string = @"www.miivdo.com";
    //titleLayer.backgroundColor = (__bridge CGColorRef)([UIColor redColor]);
    CGSize videoSize = [currentAssetTrack naturalSize];
    titleLayer.fontSize = videoSize.height / 14;
    // titleLayer.foregroundColor = (__bridge CGColorRef)([UIColor redColor]);
    titleLayer.shadowOpacity = 0.5;
    titleLayer.alignmentMode = kCAAlignmentRight;
    titleLayer.bounds = CGRectMake(0, 0, 320, 50); //You may need to adjust this for proper display

    parentLayer = [CALayer layer];
    videoLayer  = [CALayer layer];
    parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
    videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
    [parentLayer addSublayer:videoLayer];
    [parentLayer addSublayer:titleLayer];


}




MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, duration);
MainInstruction.layerInstructions = arrayInstruction;
AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];

MainCompositionInst.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];

MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
MainCompositionInst.frameDuration = CMTimeMake(1, 30);
MainCompositionInst.renderSize = CGSizeMake(320.0, 320.0);


// NSString *myPathDocs =  [[[AppDelegate sharedAppDelegate] applicationCacheDirectory] stringByAppendingPathComponent:[NSString stringWithFormat:@"mergeVideo%-dtemp.mp4",arc4random() % 10000]];
NSArray *dirPaths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *docsDir = [dirPaths objectAtIndex:0];
NSString *outputFilePath = [docsDir stringByAppendingPathComponent:[NSString stringWithFormat:@"merge.mov"]];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
    [[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];


NSURL *url = [NSURL fileURLWithPath:outputFilePath];


AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.videoComposition = MainCompositionInst;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^
 {
     switch (exporter.status)
     {
         case AVAssetExportSessionStatusCompleted:
         {

             NSURL *outputURL = exporter.outputURL;

             ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
             if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputURL]) {

                 [self writeExportedVideoToAssetsLibrary:outputURL];

                 //
             }

         }
             break;
         case AVAssetExportSessionStatusFailed:
             NSLog(@"Failed:%@", exporter.error.description);
             break;
         case AVAssetExportSessionStatusCancelled:
             NSLog(@"Canceled:%@", exporter.error);
             break;
         case AVAssetExportSessionStatusExporting:
             NSLog(@"Exporting!");
             break;
         case AVAssetExportSessionStatusWaiting:
             NSLog(@"Waiting");
             break;
         default:
             break;
     }
 }];




}
相关问题