Swift AVFoundation实时读取和分析文件

时间:2017-04-04 14:00:38

标签: ios avfoundation avasset

我无法使用AVFoundation从操作系统读取文件并实时执行渲染和分析。 我有一个代码管道,我知道实时运行对视频文件进行分析。该管道代码通过摄像机会话实时工作。然而,当我像这样阅读文件时,情况并非如此。任何人都可以让我知道我可能会出错的地方?

protocol VideoStreamTestBenchDelegate {
func frameBuffer(buffer:CMSampleBuffer)
}

class VideoStreamTestBench {


let asset:AVAsset
let assetReader:AVAssetReader
let playAtActualSpeed:Bool
let loop:Bool
var videoEncodingIsFinished = false
var previousFrameTime = kCMTimeZero
var previousActualFrameTime = CFAbsoluteTimeGetCurrent()

var numberOfFramesCaptured = 0
var totalFrameTimeDuringCapture:Double = 0.0

var delegate:VideoStreamTestBenchDelegate?

public convenience init(url:URL, playAtActualSpeed:Bool = false, loop:Bool = false) throws {
    let inputOptions = [AVURLAssetPreferPreciseDurationAndTimingKey:NSNumber(value:true)]
    let inputAsset = AVURLAsset(url:url, options:inputOptions)
    try self.init(asset:inputAsset, playAtActualSpeed:playAtActualSpeed, loop:loop)
}


public init(asset:AVAsset, playAtActualSpeed:Bool = false, loop:Bool = false) throws {
    self.asset = asset
    self.playAtActualSpeed = playAtActualSpeed
    self.loop = loop

    assetReader = try AVAssetReader(asset:self.asset)

    let outputSettings:[String:AnyObject] = [(kCVPixelBufferPixelFormatTypeKey as String):NSNumber(value:Int32(kCVPixelFormatType_32BGRA))]
    let readerVideoTrackOutput = AVAssetReaderTrackOutput(track:self.asset.tracks(withMediaType: AVMediaTypeVideo)[0], outputSettings:outputSettings)
    readerVideoTrackOutput.alwaysCopiesSampleData = false
    assetReader.add(readerVideoTrackOutput)
    // TODO: Audio here
}


public func start() {
    asset.loadValuesAsynchronously(forKeys:["tracks"], completionHandler:{
        DispatchQueue.global(priority:DispatchQueue.GlobalQueuePriority.default).async(execute: {
            guard (self.asset.statusOfValue(forKey: "tracks", error:nil) == .loaded) else { return }

            guard self.assetReader.startReading() else {
                print("Couldn't start reading")
                return
            }

            var readerVideoTrackOutput:AVAssetReaderOutput? = nil;

            for output in self.assetReader.outputs {
                if(output.mediaType == AVMediaTypeVideo) {
                    readerVideoTrackOutput = output;
                }
            }

            while (self.assetReader.status == .reading) {
                self.readNextVideoFrame(from:readerVideoTrackOutput!)
            }

            if (self.assetReader.status == .completed) {
                self.assetReader.cancelReading()

                if (self.loop) {
                    // TODO: Restart movie processing
                } else {
                    self.endProcessing()
                }
            }
        })
    })
}


public func cancel() {
    assetReader.cancelReading()
    self.endProcessing()
}

func endProcessing() {

}

func readNextVideoFrame(from videoTrackOutput:AVAssetReaderOutput) {
    if ((assetReader.status == .reading) && !videoEncodingIsFinished) {
        if let sampleBuffer = videoTrackOutput.copyNextSampleBuffer() {
            if (playAtActualSpeed) {
                // Do this outside of the video processing queue to not slow that down while waiting
                let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer)
                let differenceFromLastFrame = CMTimeSubtract(currentSampleTime, previousFrameTime)
                let currentActualTime = CFAbsoluteTimeGetCurrent()

                let frameTimeDifference = CMTimeGetSeconds(differenceFromLastFrame)
                let actualTimeDifference = currentActualTime - previousActualFrameTime

                if (frameTimeDifference > actualTimeDifference) {
                    usleep(UInt32(round(1000000.0 * (frameTimeDifference - actualTimeDifference))))
                }

                previousFrameTime = currentSampleTime
                previousActualFrameTime = CFAbsoluteTimeGetCurrent()
            }

            DispatchQueue.global().sync {
                self.delegate?.frameBuffer(buffer: sampleBuffer)
                CMSampleBufferInvalidate(sampleBuffer)
            }

        } else {
            if (!loop) {
                videoEncodingIsFinished = true
                if (videoEncodingIsFinished) {
                    self.endProcessing()
                }
            }
        }
    }
}




}

//这是委托

       public func bufferReader(_ reader: BufferReader!, didGetNextVideoSample bufferRef: CMSampleBuffer!) {

//        let posePoints:[Any] = self.visageBackend.posePoints(with: bufferRef)
//        var regions:[Any]? = nil
//        
//        if (posePoints.count > 0) {
//            regions = (self.luaBackend?.regions(forPosePoints: posePoints))!
//        }
//        
//        // extract
//        if(regions != nil) {
//            let rois:[Any] = (self.luaBackend?.extractedRegionInfos(for: bufferRef, andRegions: regions))!
//            print(rois)
//        }
//        
//        self.dLibRenderEngine.render(with: bufferRef, andPoints: posePoints, andRegions: regions)
        self.backgroundRenderQueue.async { [weak self] in

            if self?.previewLayer?.isReadyForMoreMediaData == true {
                self?.previewLayer?.enqueue(bufferRef!)
            }
        }
    }

0 个答案:

没有答案