使用触摸iOS在视频上移动叠加层

时间:2016-03-25 08:45:09

标签: ios avfoundation overlays

我一直致力于视频编辑,我已成功完成以下事项: 1.合并多个视频。 2.调整视频速度。 3.剪辑视频。

现在我需要在视频上显示图像,然后将该图像移到该视频上。我有两个问题:

问题1: 我已经成功完成了叠加部分,即在视频上添加图像。但我该如何移动此图像呢?

问题2: 此外,我无法通过它检查视频的预览。我能够检查当且仅当我在某个路径保存该视频然后使用该路径播放它。

以下是我用来完成叠加任务的代码:

    composition_ = [[AVMutableComposition alloc]init];
AVMutableCompositionTrack *compositionTrack = [composition_ addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];

AVAssetTrack *assetTrack = [[self.asset tracksWithMediaType:AVMediaTypeVideo]firstObject];

[compositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, self.asset.duration) ofTrack:assetTrack atTime:kCMTimeZero error:nil];

AVMutableVideoCompositionInstruction *videoCompositionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
videoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, self.asset.duration);

AVMutableVideoCompositionLayerInstruction *videoCompositionLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionTrack];

videoCompositionInstruction.layerInstructions = @[videoCompositionLayerInstruction];

[videoCompositionLayerInstruction setTransform:assetTrack.preferredTransform atTime:kCMTimeZero];

[videoCompositionLayerInstruction setOpacity:0.0 atTime:self.asset.duration];
CGSize naturalSize = CGSizeMake(assetTrack.naturalSize.height, assetTrack.naturalSize.width);


videoComposition_ = [AVMutableVideoComposition videoComposition];
videoComposition_.renderSize = CGSizeMake(naturalSize.width, naturalSize.height);
videoComposition_.instructions = @[videoCompositionInstruction];
videoComposition_.frameDuration = CMTimeMake(1, 30);

//start image code from here

CALayer *overlayLayer = [CALayer layer];
overlayLayer.contents = (__bridge id _Nullable)([[UIImage imageNamed:@"overlay_icon.png"]CGImage]);
overlayLayer.frame = CGRectMake(naturalSize.width/2 , naturalSize.height/2, naturalSize.width/8, naturalSize.height/8);
overlayLayer.opacity = 0.5;
[overlayLayer setMasksToBounds:YES];
overlayLayer.backgroundColor = [[UIColor redColor]CGColor];

CALayer *parentLayer  = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0 , 0, naturalSize.width, naturalSize.height);
videoLayer.frame = CGRectMake(0 , 0, naturalSize.width, naturalSize.height);
videoLayer.backgroundColor = [[UIColor redColor]CGColor];
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:overlayLayer];

videoComposition_.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];

2 个答案:

答案 0 :(得分:1)

index  location     value
  1    New York      2.0
  2    Los Angeles   1.5
  3    Chicago       -1.3
  4    New York      2.1

AnimationTool Part

-(void)prepareOverlayVideo{

composition_ = [[AVMutableComposition alloc]init];//correct

AVMutableCompositionTrack *compositionTrack = [composition_ addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];//correct

[compositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, self.asset.duration) ofTrack:
 [[self.asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
                           atTime:kCMTimeZero error:nil];//correct

assetTrack = [[self.asset tracksWithMediaType:AVMediaTypeVideo]firstObject];//correct

AVMutableVideoCompositionInstruction *videoCompositionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];//correct

videoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero,self.asset.duration);//correct

AVMutableVideoCompositionLayerInstruction *videoCompositionLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:assetTrack];//correct

UIImageOrientation videoAssetOrientation_  = UIImageOrientationUp;//correct
BOOL isVideoAssetPortrait_  = NO;
CGAffineTransform videoTransform = assetTrack.preferredTransform;
if (videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 &&
    videoTransform.d == 0) {

    videoAssetOrientation_ = UIImageOrientationRight;
    isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {

    videoAssetOrientation_ =  UIImageOrientationLeft;
    isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) {

    videoAssetOrientation_ =  UIImageOrientationUp;
}
if (videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) {

    videoAssetOrientation_ = UIImageOrientationDown;
}

    //[videoCompositionLayerInstruction setOpacityRampFromStartOpacity:1.f toEndOpacity:0.5f timeRange:CMTimeRangeMake(kCMTimeZero, assetTrack.timeRange.duration)];

//    CGFloat FirstAssetScaleToFitRatio = 320.0/assetTrack.naturalSize.height;
//    CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
//    [videoCompositionLayerInstruction setTransform:CGAffineTransformConcat(assetTrack.preferredTransform, FirstAssetScaleFactor) atTime:kCMTimeZero];

[videoCompositionLayerInstruction setTransform:assetTrack.preferredTransform atTime:kCMTimeZero];
[videoCompositionLayerInstruction setOpacity:0.0 atTime:self.asset.duration];

videoCompositionInstruction.layerInstructions = [NSArray arrayWithObjects:videoCompositionLayerInstruction,nil];//correct

   if(isVideoAssetPortrait_){
    naturalSize = CGSizeMake(assetTrack.naturalSize.height, assetTrack.naturalSize.width);
   }
   else {
    naturalSize = assetTrack.naturalSize;
   }

videoComposition_ = [AVMutableVideoComposition videoComposition];
videoComposition_.renderSize = CGSizeMake(naturalSize.width, naturalSize.height);

videoComposition_.instructions = @[videoCompositionInstruction];
videoComposition_.frameDuration = CMTimeMake(1, 30);
}

//注意这个方法.....这是成功的部分

     CGRect newRect = [self.view convertRect:overlayView.frame toView:videoRectView];
    CGFloat current_x = newRect.origin.x;
    CGFloat current_y;
    if(isSizeChanged && overlayView.frame.size.width>60)
        current_y = newRect.origin.y+50;
    else
        current_y = newRect.origin.y;
    overlayView.frame = newRect;
    imageView_.frame = newRect;

    CGPoint overlayPoint = [self mapSize:assetTrack.naturalSize withSize:playerLayer.videoRect.size forPoint:CGPointMake(current_x, current_y)];

   //invert Y-axis
    CGFloat finalOverlayPoint_y = CGRectGetMaxY(CGRectMake(0, 0, naturalSize.width, naturalSize.height)) - overlayPoint.y-50;
    CGPoint newOverlayPoint = CGPointMake(overlayPoint.x, finalOverlayPoint_y);

    overlayView.layer.frame = CGRectMake(overlayPoint.x, newOverlayPoint.y,finalOverlayRect.size.width, finalOverlayRect.size.height);
    imageView_.layer.frame = CGRectMake(overlayPoint.x, newOverlayPoint.y,finalOverlayRect.size.width, finalOverlayRect.size.height);

    if(isShapeACircle)
        imageView_.layer.cornerRadius = finalOverlayRect.size.height/2;
    else
        imageView_.layer.cornerRadius = 0.f;
    imageView_.layer.borderWidth = 5.f;

    overlayView.layer.contentsScale = [UIScreen mainScreen].scale;
    [overlayView.layer setMasksToBounds:YES];

    //add animation
    CABasicAnimation *animation1 = [CABasicAnimation animationWithKeyPath:@"opacity"];
    [animation1 setDuration:0.5 ];
    [animation1 setFromValue:[NSNumber numberWithFloat:1.0]];
    [animation1 setToValue:[NSNumber numberWithFloat:1.0]];
    //[animation1 setBeginTime:[self.timeLabel.text floatValue]];
    [animation1 setBeginTime:_overlayTime];
    [animation1 setRemovedOnCompletion:YES];
    [animation1 setFillMode:kCAFillModeForwards];
    [imageView_.layer addAnimation:animation1 forKey:@"animateOpacity"];

    imageView_.layer.opacity = 0.0;
    CALayer *parentLayer  = [CALayer layer];
    CALayer *videoLayer = [CALayer layer];

    parentLayer.frame = CGRectMake(0 , 0, naturalSize.width, naturalSize.height);
    parentLayer.backgroundColor = [[UIColor redColor]CGColor];
    videoLayer.frame = CGRectMake(0 , 0, naturalSize.width, naturalSize.height);

    //CALayer *watermarkLayer = [self setWaterMarkInLayer:videoLayer];

    [parentLayer addSublayer:videoLayer];
    [parentLayer addSublayer:imageView_.layer];
    //[parentLayer addSublayer:watermarkLayer];



    videoComposition_.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];

@ pigeon_39这是重叠的代码。我知道它有点晚了,但我休息了一段时间,我对这个问题没有任何回忆。如果现在有所帮助,你可以参考这个回答。如果你遇到任何问题,我们可以在这里继续聊聊。在我遇到问题的时候问我,但先自己做一些研究。注意这不是完整的代码,因为我要做很多编辑为您提供实际的代码。我的代码涉及很多条件。所有最好的伙伴:-)

答案 1 :(得分:0)

我在网上到处搜索,但我似乎找不到解决方案。即使是关于“视频编辑”的苹果文档也没有提到这一点。

最后我提出了自己的逻辑。这就是我要做的事情:

1.我将捕获视频并移动到下一页,上面有图像。 2.在图像上添加手势,将其移动到视频的任何位置。 3.然后添加上面的代码以将该图像合并到视频上。

我认为这是完美的解决方案。我会在2天内发布代码,即我写的时候。但逻辑很简单。