是什么导致AVMutableComposition大幅增加视频的大小? - iOS,Swift,AVFoundation

时间:2017-08-10 09:52:51

标签: ios swift video swift3 avmutablecomposition

假设我们有两个视频资源(AVAsset对象),我们称之为空白主要,其中是随机视频限制长度,比方说2-5分钟,空白始终为4秒视频,我们希望按以下顺序合并视频:

空白 - 主要 - 空白

    // Create AVMutableComposition Object.This object will hold our multiple AVMutableCompositionTrack.

    let mixComposition = AVMutableComposition()

    let assets = [blank, main, blank]
    var totalTime : CMTime = CMTimeMake(0, 0)
    var atTimeM: CMTime = CMTimeMake(0, 0)

    Utils.log([blank.duration, main.duration])

    // VIDEO TRACK
    let videoTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: Int32(kCMPersistentTrackID_Invalid))

    for (index,asset) in assets.enumerated() {

        do {

            if index == 0 {
                atTimeM = kCMTimeZero
            } else {
                atTimeM = totalTime // <-- Use the total time for all the videos seen so far.
            }

            try videoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, asset.duration), of: asset.tracks(withMediaType: AVMediaTypeVideo)[0], at: atTimeM)

        } catch let error as NSError {
            Utils.log("error: \(error)")
        }

        totalTime = CMTimeAdd(totalTime, asset.duration)
    }

    // AUDIO TRACK
    let audioTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
    do {
        try audioTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, main.duration), of: main.tracks(withMediaType: AVMediaTypeAudio)[0], at: blank.duration)
    } catch _ {
        completionHandler(nil, ErrorType(rawValue: "Unable to add audio in composition."))
        return
    }

    let outputURL = mainVideoObject.getDirectoryURL()?.appendingPathComponent("video-with-blank.mp4")

    guard let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPreset1280x720) else {
        completionHandler(nil, ErrorType(rawValue: "Unable to create export session."))
        return
    }

    let mainInstruction = AVMutableVideoCompositionInstruction()

    mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeAdd(blank.duration, CMTimeAdd(main.duration, blank.duration)))

    // Fixing orientation
    let firstLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
    let firstAssetTrack = blank.tracks(withMediaType: AVMediaTypeVideo)[0]
    firstLayerInstruction.setTransform(firstAssetTrack.preferredTransform, at: kCMTimeZero)
    firstLayerInstruction.setOpacity(0.0, at: blank.duration)

    let secondLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
    let secondAssetTrack = main.tracks(withMediaType: AVMediaTypeVideo)[0]
    var isSecondAssetPortrait = false
    let secondTransform = secondAssetTrack.preferredTransform
    if (secondTransform.a == 0 && secondTransform.b == 1.0 && secondTransform.c == -1.0 && secondTransform.d == 0) {
        isSecondAssetPortrait = true
    }
    if (secondTransform.a == 0 && secondTransform.b == -1.0 && secondTransform.c == 1.0 && secondTransform.d == 0) {
        isSecondAssetPortrait = true
    }
    secondLayerInstruction.setTransform(secondAssetTrack.preferredTransform, at: blank.duration)
    secondLayerInstruction.setOpacity(0.0, at: CMTimeAdd(blank.duration, main.duration))

    let thirdLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
    let thirdAssetTrack = blank.tracks(withMediaType: AVMediaTypeVideo)[0]
    thirdLayerInstruction.setTransform(thirdAssetTrack.preferredTransform, at: CMTimeAdd(blank.duration, main.duration))

    mainInstruction.layerInstructions = [firstLayerInstruction, secondLayerInstruction, thirdLayerInstruction]

    var naturalSize = CGSize()
    if(isSecondAssetPortrait) {
        naturalSize = CGSize(width: secondAssetTrack.naturalSize.height, height: secondAssetTrack.naturalSize.width)
    } else {
        naturalSize = secondAssetTrack.naturalSize
    }

    let renderWidth = naturalSize.width
    let renderHeight = naturalSize.height

    let mainCompositionInst = AVMutableVideoComposition()
    mainCompositionInst.instructions = [mainInstruction]
    mainCompositionInst.frameDuration = CMTimeMake(1, 30)
    mainCompositionInst.renderSize = CGSize(width: renderWidth, height: renderHeight)

    exporter.outputURL = outputURL
    exporter.outputFileType = AVFileTypeMPEG4
    exporter.videoComposition = mainCompositionInst
    //exporter.shouldOptimizeForNetworkUse = true

    exporter.exportAsynchronously {
        if exporter.status == .completed {
            completionHandler(AVAsset(url: outputURL!), nil)
        } else {
            completionHandler(nil, ErrorType(rawValue: "Unable to export video."))
            if let error = exporter.error {
                Utils.log("Unable to export video. \(error)")
            }
        }
    }

假设原始录像机在720p质量下持续5分钟占用大约200MB的空间,在主视频的开始和结束时添加4s空白视频不应该大幅改变大小,并且应该非常快速地完成处理。

然而,结果是视频的大小是原始视频的2到2.5倍(所以400 - 500 MB)并且处理时间太长。

请指教,

由于

1 个答案:

答案 0 :(得分:1)

在这里,我准备了一个自定义课程,您可以在其中传递视频名称,并将这些视频保存到捆绑包中。运行应用程序后,它将根据您的要求生成新的视频文件,并将其放入应用程序文档目录路径中。

使用Swift 4我准备了这个演示

//
//  ViewController.swift
//  SOVideoMergingDemo
//
//  Created by iOS Test User on 03/01/18.
//  Copyright © 2018 Test User. Ltd. All rights reserved.
//

import UIKit
import AVFoundation
import MediaPlayer
import Photos
import AssetsLibrary
import AVKit


class ViewController : UIViewController {

    //--------------------------------------------------
    //MARK:
    //MARK: - IBOutlets
    //--------------------------------------------------




    //--------------------------------------------------
    //MARK:
    //MARK: - Properties
    //--------------------------------------------------

    var videoUrls : [URL]     = []
    var arrVideoAsset : [AVAsset] = []
    let video1 = "1"
    let video2 = "2"
    let outPutVideo = "MergedVideo.mp4"

    let semaphore = DispatchSemaphore(value: 1)


    //--------------------------------------------------
    //MARK:
    //MARK: - Custom Methods
    //--------------------------------------------------

    func getVideoURL(forVideo : String) -> URL {
        let videoPath = Bundle.main.path(forResource: forVideo, ofType:"mp4")
        let vidURL = URL(fileURLWithPath: videoPath!)
        return vidURL
    }

    //--------------------------------------------------

    func mergeVideos(arrVideoAsset : [AVAsset]) {

        let mixComposition = AVMutableComposition()

        //Tracks to insert in Composition for Merging
        // Create video tracks
        let firstTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID:  kCMPersistentTrackID_Invalid)
        let secondTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID:  kCMPersistentTrackID_Invalid)
         let thirdTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID:  kCMPersistentTrackID_Invalid)

        do {
            try firstTrack?.insertTimeRange(CMTimeRangeMake(kCMTimeZero, arrVideoAsset[0].duration), of: arrVideoAsset[0].tracks(withMediaType: .video)[0], at: kCMTimeZero)
        } catch _ {
            print("Failed to load first track")
        }

        do {
            try secondTrack?.insertTimeRange(CMTimeRangeMake(kCMTimeZero, arrVideoAsset[1].duration), of: arrVideoAsset[1].tracks(withMediaType: .video)[0], at: arrVideoAsset[0].duration)
        } catch _ {
            print("Failed to load second track")
        }

        do {
            try thirdTrack?.insertTimeRange(CMTimeRangeMake(kCMTimeZero, arrVideoAsset[0].duration), of: arrVideoAsset[0].tracks(withMediaType: .video)[0], at: arrVideoAsset[1].duration)
        } catch _ {
            print("Failed to load second track")
        }

        //This Instruciton is Created for Merging Video Tracks
        let compositionInstruction = AVMutableVideoCompositionInstruction()
        compositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero,CMTimeAdd(arrVideoAsset[0].duration, CMTimeAdd(arrVideoAsset[1].duration, arrVideoAsset[2].duration)))

        //Creating Layer Instruction for Videos
        let firstInstruction = videoCompositionInstructionForTrack(firstTrack!, asset: arrVideoAsset[0])
        firstInstruction.setOpacity(0.0, at: arrVideoAsset[0].duration )
        let secondInstruction = videoCompositionInstructionForTrack(secondTrack!, asset: arrVideoAsset[1])
        secondInstruction.setOpacity(0.0, at: arrVideoAsset[1].duration)
        let thirdInstruction = videoCompositionInstructionForTrack(thirdTrack!, asset: arrVideoAsset[2])

        compositionInstruction.layerInstructions = [firstInstruction, secondInstruction,thirdInstruction]

        //By  Changing These Height and Width User can affect Size of Merged Video. Calucalte it Carefully and As per you needs
        let height = (Float((firstTrack?.naturalSize.height)!) < Float((secondTrack?.naturalSize.height)!)) ? firstTrack?.naturalSize.height : secondTrack?.naturalSize.height

        let width = (Float((firstTrack?.naturalSize.width)!) < Float((secondTrack?.naturalSize.width)!)) ? firstTrack?.naturalSize.width : secondTrack?.naturalSize.width

        let mainComposition = AVMutableVideoComposition()
        mainComposition.instructions = [compositionInstruction]
        mainComposition.frameDuration = CMTimeMake(1, 30)
        mainComposition.renderSize = CGSize(width:width!,height: height!)

        let exporter = AVAssetExportSession(asset:mixComposition, presetName: AVAssetExportPresetHighestQuality)
        exporter?.outputURL = URL(fileURLWithPath: getDocumentDirectoryPath() + "/" + outPutVideo)
        exporter?.outputFileType = AVFileType.mp4
        exporter?.shouldOptimizeForNetworkUse = true
        exporter?.videoComposition = mainComposition
        print(self.getDocumentDirectoryPath())

        exporter?.exportAsynchronously(completionHandler: {
            DispatchQueue.main.async {
                if exporter?.status == AVAssetExportSessionStatus.completed {
                    do {
                        let videoData = try Data(contentsOf: exporter!.outputURL!)
                        try videoData.write(to: URL(fileURLWithPath : self.getDocumentDirectoryPath() + "/" + self.outPutVideo), options: Data.WritingOptions.atomic)
                    } catch {
                        print("Failed to Save video ===>>> \(error.localizedDescription)")
                    }


                    //Uncomment This If you want to save video in Photos Library
//                    PHPhotoLibrary.shared().performChanges({
//                        PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: (exporter?.outputURL)!)
//                    }, completionHandler: { (success, error) in
//                        if success {
//                            let fetchOptions = PHFetchOptions()
//                            fetchOptions.sortDescriptors = [NSSortDescriptor.init(key:"creationDate", ascending: false)]
//                            _ = PHAsset.fetchAssets(with: .video, options:fetchOptions).firstObject
//                        } else {
//                            print("Error in Saving File in Photo Libaray -> \(String(describing: error?.localizedDescription))")
//                        }
//                    })
                } else {
                    print("Error -> \(String(describing: exporter?.error?.localizedDescription))")
                }
            }
        })

    }

    //--------------------------------------------------

    //This Methiod is Used to Make Layer Instruction for Particular Video
    func videoCompositionInstructionForTrack(_ track: AVCompositionTrack, asset: AVAsset) -> AVMutableVideoCompositionLayerInstruction {
        let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
        let assetTrack = asset.tracks(withMediaType: AVMediaType.video)[0]
        let scale : CGAffineTransform = CGAffineTransform(scaleX: 1, y:1)
        instruction.setTransform(assetTrack.preferredTransform.concatenating(scale), at: kCMTimeZero)
        return instruction
    }

    //--------------------------------------------------

    func getDocumentDirectoryPath() -> String {
        let arrPaths = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)
        return arrPaths[0]
    }

    //--------------------------------------------------
    //MARK:
    //MARK: - View Life Cycle Methods
    //--------------------------------------------------

    override func viewDidLoad() {
        super.viewDidLoad()
        // Do any additional setup after loading the view.

        //Prepare Video Assets
        arrVideoAsset.append(AVAsset(url:getVideoURL(forVideo:video1)))
        arrVideoAsset.append(AVAsset(url:getVideoURL(forVideo:video2)))
        arrVideoAsset.append(AVAsset(url:getVideoURL(forVideo:video1)))

        //Merge this Videos
        mergeVideos(arrVideoAsset:arrVideoAsset)
    }
}