无视频输出和[MC]从公共有效用户设置读取。 Swift / iOS 11中出错

时间:2017-06-09 01:40:13

标签: ios swift avfoundation ios11 xcode9-beta

在iOS 11上使用Xcode 9 Beta:

我已经关注了如何从AVCaptureSession中提取帧的walkthrough,但是无法显示捕获。虽然我已将相机权限包含在info.plist文件中,但应用程序似乎在打开后停止,我收到以下错误:

  

[App Name]没有为frZQaeyWLUvLjeuEK43hmg提供沙箱访问权限且未正确授权

     

[MC] systemgroup.com.apple.configurationprofiles路径的系统组容器是/private/var/containers/Shared/SystemGroup/systemgroup.com.apple.configurationprofiles

     

[MC]从公共有效用户设置中读取。

以下是FrameExtractor.swift的代码供参考:

import UIKit
import AVFoundation

protocol FrameExtractorDelegate: class {
    func captured(image: UIImage)
}

class FrameExtractor: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate {

private let position = AVCaptureDevice.Position.front
private let quality = AVCaptureSession.Preset.medium

private var permissionGranted = false
private let sessionQueue = DispatchQueue(label: "session queue")
private let captureSession = AVCaptureSession()
private let context = CIContext()

weak var delegate: FrameExtractorDelegate?

override init() {
    super.init()
    checkPermission()
    sessionQueue.async { [unowned self] in
        self.configureSession()
        self.captureSession.startRunning()
    }
}

// MARK: AVSession configuration
private func checkPermission() {
    switch AVCaptureDevice.authorizationStatus(for: AVMediaType.video) {
    case .authorized:
        permissionGranted = true
    case .notDetermined:
        requestPermission()
    default:
        permissionGranted = false
    }
}

private func requestPermission() {
    sessionQueue.suspend()
    AVCaptureDevice.requestAccess(for: AVMediaType.video) { [unowned self] granted in
        self.permissionGranted = granted
        self.sessionQueue.resume()
    }
}

private func configureSession() {
    guard permissionGranted else { return }
    captureSession.sessionPreset = quality
    guard let captureDevice = selectCaptureDevice() else { return }
    guard let captureDeviceInput = try? AVCaptureDeviceInput(device: captureDevice) else { return }
    guard captureSession.canAddInput(captureDeviceInput) else { return }
    captureSession.addInput(captureDeviceInput)
    let videoOutput = AVCaptureVideoDataOutput()
    videoOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "sample buffer"))
    guard captureSession.canAddOutput(videoOutput) else { return }
    captureSession.addOutput(videoOutput)
    guard let connection = videoOutput.connection(with: AVFoundation.AVMediaType.video) else { return }
    guard connection.isVideoOrientationSupported else { return }
    guard connection.isVideoMirroringSupported else { return }
    connection.videoOrientation = .portrait
    connection.isVideoMirrored = position == .front
}

private func selectCaptureDevice() -> AVCaptureDevice? {
    return AVCaptureDevice.default(for: AVMediaType.video)
//        return AVCaptureDevice.devices().filter {
//            ($0 as AnyObject).hasMediaType(AVMediaType.video) &&
//                ($0 as AnyObject).position == position
//            }.first
}

// MARK: Sample buffer to UIImage conversion
private func imageFromSampleBuffer(sampleBuffer: CMSampleBuffer) -> UIImage? {
    guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return nil }
    let ciImage = CIImage(cvPixelBuffer: imageBuffer)
    guard let cgImage = context.createCGImage(ciImage, from: ciImage.extent) else { return nil }
    return UIImage(cgImage: cgImage)
}

// MARK: AVCaptureVideoDataOutputSampleBufferDelegate
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
    print("Got a Frame!")
    guard let uiImage = imageFromSampleBuffer(sampleBuffer: sampleBuffer) else { return }
    DispatchQueue.main.async { [unowned self] in
        self.delegate?.captured(image: uiImage)
    }
}
}

对于ViewController.swift:

import UIKit

class ViewController: UIViewController, FrameExtractorDelegate{
@IBOutlet var imageView: UIImageView!
var frameExtractor: FrameExtractor!
override func viewDidLoad() {
    super.viewDidLoad()
    // Do any additional setup after loading the view, typically from a nib.
    frameExtractor = FrameExtractor()
    frameExtractor.delegate = self
}

override func didReceiveMemoryWarning() {
    super.didReceiveMemoryWarning()
    // Dispose of any resources that can be recreated.
}

func captured(image: UIImage) {
    imageView.image = image
}

}`

1 个答案:

答案 0 :(得分:0)

问题在于captureOutput中的不同函数调用。这是iOS 11中针对AVCaptureVideoDataOutputSampleBufferDelegate中的captureOutput的新函数调用:

func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
    guard let uiImage = imageFromSampleBuffer(sampleBuffer: sampleBuffer) else { return }
    DispatchQueue.main.async { [unowned self] in
        self.delegate?.captured(image: uiImage)
    }
}

注意" didOutput sampleBuffer:"之间的变化。和" didOutputSampleBuffer sampleBuffer:"