从iOS相机

时间:2016-02-13 19:04:45

标签: ios swift xcode7

我需要通过相机捕捉视频。但它给出了以下错误:

  

由于未捕获的异常终止应用程序' NSInvalidArgumentException',原因:' *** - [AVCaptureDevice init:] - 无法直接实例化AVCaptureDevice。'

遵循以下代码:

import UIKit
import AVFoundation

class ViewController: UIViewController, AVCaptureFileOutputRecordingDelegate {

//ビデオのアウトプット
private var myVideoOutPut:AVCaptureMovieFileOutput!

//スタート&ストップボタン
private var myButtonStart : UIButton!
private var myButtonStop : UIButton!

override func viewDidLoad() {
    super.viewDidLoad()

    //セッションの作成
    let mySession : AVCaptureSession = AVCaptureSession()

    //デバイス
    var myDevice : AVCaptureDevice = AVCaptureDevice()

    //出力先の設定
    let myImageOutput : AVCaptureStillImageOutput = AVCaptureStillImageOutput()


    //デバイス一覧の取得
    let devices = AVCaptureDevice.devices()

    //マイクを取得
    //マイクをセッションのInputに追加
    do{
        let audioCaptureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio)
        let audioInput = try AVCaptureDeviceInput(device: audioCaptureDevice)
        if (mySession.canAddInput(audioInput)){
            mySession.addInput(audioInput)
        }
    }catch let error as NSError{
        print(error)
    }

    //バックライトをmyDeviceに格納
    for device in devices {
        if (device.position == AVCaptureDevicePosition.Back){
            myDevice = device as! AVCaptureDevice
        }
    }

    //バックカメラを取得
    do{
        let videoCaptureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
        let videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice)
        //ビデオをセッションのInputに追加
        if (mySession.canAddInput(videoInput)){
            mySession.addInput(videoInput)
        }
    }catch let error as NSError{
        print(error)
    }


    //セッションに出力先を追加
    mySession.addOutput(myImageOutput)

    //動画の保存
    myVideoOutPut = AVCaptureMovieFileOutput()

    //ビデオ出力をOutputに追加
    mySession.addOutput(myVideoOutPut)

    //画像を表示するレイヤーを生成
    let myVideoLayer:AVCaptureVideoPreviewLayer = AVCaptureVideoPreviewLayer(session: mySession)
    myVideoLayer.frame = self.view.bounds
    myVideoLayer.videoGravity = AVLayerVideoGravityResizeAspectFill

    //viewに追加
    self.view.layer.addSublayer(myVideoLayer)

    //セッション開始
    mySession.startRunning()

    // UIボタン作成
    myButtonStart = UIButton(frame: CGRectMake(0,0,120,50))
    myButtonStop = UIButton(frame: CGRectMake(0,0,120,50))

    myButtonStart.backgroundColor = UIColor.redColor()
    myButtonStop.backgroundColor = UIColor.grayColor()

    myButtonStart.layer.masksToBounds = true
    myButtonStop.layer.masksToBounds = true

    myButtonStart.setTitle("撮影", forState: .Normal)
    myButtonStop.setTitle("停止", forState: .Normal)

    myButtonStart.layer.cornerRadius = 20
    myButtonStop.layer.cornerRadius = 20


    myButtonStart.layer.position = CGPoint(x: self.view.bounds.width/2 - 70, y: self.view.bounds.height-50)
    myButtonStop.layer.position = CGPoint(x: self.view.bounds.width/2 - 70, y: self.view.bounds.height-50)

    myButtonStart.addTarget(self, action: "onClickMyButton:", forControlEvents: .TouchUpInside)
    myButtonStop.addTarget(self, action: "onClickMyButton:", forControlEvents: .TouchUpInside)

    //UIボタンをViewに追加
    self.view.addSubview(myButtonStart)
    self.view.addSubview(myButtonStop)
}

//ボタンイベント
internal func onClickMyButton(sender:UIButton){
    //撮影開始
    if sender == myButtonStart {
        let paths = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)

        //フォルダ
        let documentsDirectory = paths[0] as! String

        //ファイル名
        let filePath:String? = "\(documentsDirectory)/test.mp4"

        //URL
        let fileURL:NSURL = NSURL(fileURLWithPath: filePath!)

        //録画開始
        myVideoOutPut.startRecordingToOutputFileURL(fileURL, recordingDelegate: self)

    } else if sender == myButtonStop {
        myVideoOutPut.stopRecording()

    }
}

//動画がキャプチャーされた後に呼ばれるメソッド
func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) {
    print("didFinishRecordingToOutputFileAtURL")
}

//動画のキャプチャーが開始された時に呼ばれるメソッド
func captureOutput(captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAtURL fileURL: NSURL!, fromConnections connections: [AnyObject]!) {
    print("didStartRecordingToOutputFileAtURL")
}

override func didReceiveMemoryWarning() {
    super.didReceiveMemoryWarning()
    // Dispose of any resources that can be recreated.
}


/*
// MARK: - Navigation

// In a storyboard-based application, you will often want to do a little preparation before navigation
override func prepareForSegue(segue: UIStoryboardSegue, sender: AnyObject?) {
// Get the new view controller using segue.destinationViewController.
// Pass the selected object to the new view controller.
}
*/

}

1 个答案:

答案 0 :(得分:0)

您不能实例化AVCaptureDevice。只需将myDevice声明为可选:

var myDevice : AVCaptureDevice?

您已在此行代码中为其分配了对它的引用:

myDevice = device as! AVCaptureDevice