CustomVideoCompositorClass-绘制CGContext错误(CGImage,CGRect)

时间:2020-07-30 14:55:17

标签: swift avfoundation cgimage

我已按照this的说明在视频(大)上实现视频(小)。

获取资产,创建作品...

...
let videoComposition = AVMutableVideoComposition()
videoComposition.customVideoCompositorClass = CustomComposition.self
...

主要问题是何时需要CGContext绘制cgimage:

class CustomComposition: NSObject, AVVideoCompositing {
    
    var requiredPixelBufferAttributesForRenderContext: [String : Any] = [
        kCVPixelBufferPixelFormatTypeKey as String : NSNumber(value: kCVPixelFormatType_32BGRA),
        kCVPixelBufferOpenGLESCompatibilityKey as String : NSNumber(value: true),
        kCVPixelBufferOpenGLCompatibilityKey as String : NSNumber(value: true)
    ]
    var sourcePixelBufferAttributes: [String : Any]? = [
        kCVPixelBufferPixelFormatTypeKey as String : NSNumber(value: kCVPixelFormatType_32BGRA),
        kCVPixelBufferOpenGLESCompatibilityKey as String : NSNumber(value: true),
        kCVPixelBufferOpenGLCompatibilityKey as String : NSNumber(value: true)
    ]
    
    override init() {
        super.init()
    }
    
    func startRequest(_ request: AVAsynchronousVideoCompositionRequest) {
        
        guard let destination: CVPixelBuffer = request.renderContext.newPixelBuffer() else { return request.finish(with: NSError(domain: "", code: 3, userInfo: nil)) }
        
        if request.sourceTrackIDs.count == 2 {
            guard let front = request.sourceFrame(byTrackID: 2) else { return request.finish(with: NSError(domain: "", code: 1, userInfo: nil)) }
            guard let back = request.sourceFrame(byTrackID: 1) else { return request.finish(with: NSError(domain: "", code: 2, userInfo: nil)) }
            
            CVPixelBufferLockBaseAddress(front, .readOnly)
            CVPixelBufferLockBaseAddress(back, .readOnly)
            CVPixelBufferLockBaseAddress(destination, [])
            
            renderFrontBuffer(front, back: back, to: destination)
            
            CVPixelBufferUnlockBaseAddress(destination, [])
            CVPixelBufferUnlockBaseAddress(back, .readOnly)
            CVPixelBufferUnlockBaseAddress(front, .readOnly)
        }
        
        request.finish(withComposedVideoFrame: destination)
        
        CVBufferRemoveAllAttachments(destination)
    }
    
    func renderFrontBuffer(_ front: CVPixelBuffer, back: CVPixelBuffer, to destination: CVPixelBuffer) {
        
        var gc: CGContext? = nil
        
        guard let frontImage: CGImage = createSourceImage(from: front) else { return }
        guard let backImage: CGImage = createSourceImage(from: back) else { return }
        
        let width: Int = CVPixelBufferGetWidth(destination) //Int(Config.renderSize.width)
        let height: Int = CVPixelBufferGetHeight(destination)
        
        let frame = CGRect(x: 0, y: 0, width: CGFloat(width), height: CGFloat(height))
        
        let colorSpace = backImage.colorSpace!
        
        gc = CGContext(data: CVPixelBufferGetBaseAddress(destination), width: width, height: height, bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(destination), space: colorSpace, bitmapInfo: CGImageAlphaInfo.premultipliedLast.rawValue)

        // MARK: - Place Back (big)
        //gc?.draw(backImage, in: frame) // <- Problem: EXC_BAD_ACCESS
        
        gc?.beginPath()
        gc?.addRect(CGRect.init(x: 24, y: 64, width: 67, height: 120))
        gc?.setFillColor(UIColor.yellow.cgColor)
        gc?.fillPath()
        
        // MARK: - Place Front (small)
        //gc?.draw(frontImage, in: frame) // <- Problem: EXC_BAD_ACCESS
    }
    
    func createSourceImage(from buffer: CVPixelBuffer) -> CGImage? {
        
        let width: Int = CVPixelBufferGetWidth(buffer)
        let height: Int = CVPixelBufferGetHeight(buffer)
        let stride: Int = CVPixelBufferGetBytesPerRow(buffer)
        
        var data = CVPixelBufferGetBaseAddress(buffer)
        
        let rgb = CGColorSpaceCreateDeviceRGB()
        
        let provider = CGDataProvider(dataInfo: nil, data: &data, size: height * stride) { (_, _, _) in }
        
        var image: CGImage? = nil
        
        let last = CGBitmapInfo(rawValue: CGBitmapInfo.byteOrder32Big.rawValue | CGImageAlphaInfo.premultipliedLast.rawValue)
        if let provider = provider {
            image = CGImage(width: width, height: height, bitsPerComponent: 8, bitsPerPixel: 32, bytesPerRow: stride, space: rgb, bitmapInfo: last, provider: provider, decode: nil, shouldInterpolate: false, intent: .defaultIntent)
        }
        
        return image
    }
    
    func renderContextChanged(_ newRenderContext: AVVideoCompositionRenderContext) { }
    
}

enter image description here

不调用.draw时,渲染类似的内容并同时播放两个视频的音频(没关系),否则调用时立即出现EXC_BAD_ACCESS错误。 我已经尝试使用Zombie Object进行调试,但是什么也没有。

我也尝试保存cgimage的png,并且错误相同

...
let uimg = UIImage(cgImage: frontImage)
let data = uimg.pngData() <- EXC_BAD_ACCESS
...

我的猜测是,我在createSourceImage中缺少了CGDataProvider,或者缺少了requiredPixelBufferAttributesForRenderContextsourcePixelBufferAttributes数组。

或者,如果有人对如何实现这一目标(视频到视频)有其他想法,

1 个答案:

答案 0 :(得分:1)

看看这个,你可以试试看!

https://i.stack.imgur.com/jFJil.jpg

class MyCustomComposition: NSObject, AVVideoCompositing {
   
  
   var sourcePixelBufferAttributes: [String : Any]? {
       get {
           return ["\(kCVPixelBufferPixelFormatTypeKey)": kCVPixelFormatType_32BGRA]
       }
   }
   
   var requiredPixelBufferAttributesForRenderContext: [String : Any] {
       get {
           return ["\(kCVPixelBufferPixelFormatTypeKey)": kCVPixelFormatType_32BGRA]
       }
   }
   
   func renderContextChanged(_ newRenderContext: AVVideoCompositionRenderContext) {
   }
   
   func startRequest(_ asyncVideoCompositionRequest: AVAsynchronousVideoCompositionRequest) {
       
       let request = asyncVideoCompositionRequest
       
       //Destination Buffer
       let destination = request.renderContext.newPixelBuffer()
       
       //Grab Frames
       //My Video track id is 1 , 2 for back and front videos. Please Check your 'trackID' using 'request.sourceTrackIDs' then use it
       if request.sourceTrackIDs.count != 2 { return }
       guard let front = request.sourceFrame(byTrackID: 2) else { return request.finish(with: NSError(domain: "", code: 1, userInfo: nil)) }
       guard let back = request.sourceFrame(byTrackID: 1) else { return request.finish(with: NSError(domain: "", code: 2, userInfo: nil)) }
       
       CVPixelBufferLockBaseAddress(front, CVPixelBufferLockFlags.readOnly)
       CVPixelBufferLockBaseAddress(back, CVPixelBufferLockFlags.readOnly)
       CVPixelBufferLockBaseAddress(destination!, CVPixelBufferLockFlags.readOnly)
       
       renderFromBuffer(destination: destination!, front: front, back: back)
       
       CVPixelBufferUnlockBaseAddress(destination!, CVPixelBufferLockFlags.readOnly)
       CVPixelBufferUnlockBaseAddress(back, CVPixelBufferLockFlags.readOnly)
       CVPixelBufferUnlockBaseAddress(front, CVPixelBufferLockFlags.readOnly)
       
       request.finish(withComposedVideoFrame: destination!)
       
   }
   
   
   private func renderFromBuffer(destination: CVPixelBuffer, front: CVPixelBuffer, back: CVPixelBuffer) {
       
       let width = CVPixelBufferGetWidth(destination)
       let height = CVPixelBufferGetHeight(destination)
       
       let newContext = CGContext(data: CVPixelBufferGetBaseAddress(destination), width: width, height: height, bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(destination), space: CGColorSpaceCreateDeviceRGB(), bitmapInfo: CGImageAlphaInfo.premultipliedLast.rawValue)
       
       let frontImage = createSourceImageFromReferance(buffer: front)
       let backImage = createSourceImageFromReferance(buffer: back)
       
       
       //DRAW 'BACK' IMAGE
       newContext?.saveGState()
       newContext?.translateBy(x: 0, y: CGFloat(height))
       newContext?.scaleBy(x: 1.0, y: -1.0)
       
       let backImagerect = CGRect(x: 0, y: 0, width: width, height: height)
      
       newContext?.draw(backImage, in: backImagerect)
       
       newContext?.scaleBy(x: 1.0, y: -1.0)
       newContext?.translateBy(x: 0, y: -CGFloat(height / 2))
       newContext?.restoreGState()
       
       //DRAW 'FRONT' IMAGE AT CENTER
       newContext?.saveGState()
       newContext?.translateBy(x: 0, y: CGFloat(height))
       newContext?.scaleBy(x: 1.0, y: -1.0)
       
       let frontImageSize = CGSize(width: frontImage.width , height: frontImage.height)
       let centerPoint = CGPoint(x: CGFloat(width / 2) - (frontImageSize.width / 2), y: CGFloat(height / 2) - (frontImageSize.height / 2))
       let frontImagerect = CGRect(origin: centerPoint, size: frontImageSize)
       
       //Clip Front Video using BezierPath
       let bezierPath = UIBezierPath(roundedRect: frontImagerect, cornerRadius: 50)
       newContext?.addPath(bezierPath.cgPath)
       newContext?.clip()
       
       newContext?.draw(frontImage, in: frontImagerect)
       
       newContext?.scaleBy(x: 1.0, y: -1.0)
       newContext?.translateBy(x: 0, y: -CGFloat(height / 2))
       newContext?.restoreGState()

       newContext?.flush()
   }
   
   private func createSourceImageFromReferance(buffer: CVPixelBuffer) -> CGImage {
       
       let width = CVPixelBufferGetWidth(buffer)
       let height = CVPixelBufferGetHeight(buffer)
       let stride = CVPixelBufferGetBytesPerRow(buffer)
       let data = CVPixelBufferGetBaseAddress(buffer)
       let rgb = CGColorSpaceCreateDeviceRGB()
       
       let releaseMaskImagePixelData: CGDataProviderReleaseDataCallback = { (info: UnsafeMutableRawPointer?, data: UnsafeRawPointer, size: Int) -> () in
           // https://developer.apple.com/reference/coregraphics/cgdataproviderreleasedatacallback
       
           return
       }
       
       let provider:CGDataProvider? = CGDataProvider(dataInfo: nil, data: data!, size: height * stride, releaseData: releaseMaskImagePixelData)
       
       let last = CGBitmapInfo(rawValue: CGBitmapInfo.byteOrder32Big.rawValue | CGImageAlphaInfo.premultipliedLast.rawValue)
       
       var image = CGImage(width: width, height: height, bitsPerComponent: 8, bitsPerPixel: 32, bytesPerRow: stride, space: rgb, bitmapInfo: last, provider: provider!, decode: nil, shouldInterpolate: false, intent: .defaultIntent)
       
       var rect = CGRect(x: 0, y: 0,  width: Int(width), height: Int(height))
       
       let newContext = CGContext(data: nil, width: width, height: height, bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(buffer), space: CGColorSpaceCreateDeviceRGB(), bitmapInfo: CGImageAlphaInfo.premultipliedLast.rawValue)
       
       newContext?.saveGState()
       newContext?.translateBy(x: 0, y: CGFloat(height))
       newContext?.scaleBy(x: 1.0, y: -1.0)
       
       newContext?.draw(image!, in: rect)
       newContext?.restoreGState()
       
       let im = (newContext!.makeImage())!
       newContext?.flush()
       
       return im
   }
   
}

为上述课程创建构图,如下所示:

let mainInstructionCompostion = AVMutableVideoComposition()
mainInstructionCompostion.frameDuration = CMTime(value: 1, timescale: 30)
mainInstructionCompostion.renderSize = videoInfo.videoSize
mainInstructionCompostion.instructions = [mainInstruction]
mainInstructionCompostion.customVideoCompositorClass = MyCustomComposition.self