当AVCaptureVideoPreviewLayer子视图添加

时间:2015-05-18 19:15:16

标签: ios swift uiview avfoundation

你好,周一快乐!

我正在尝试构建一个与Instagram相似的自定义相机。

首先,我使用Storyboard添加一个UIView,将UIView垂直居中放置。水平,并通过故事板将宽度和高度设置为200。

然后,在我的代码中,我将AVCaptureVideoPreviewLayer添加到我的UIView,我调用了cameraView,然后将AVCaptureVideoPreviewLayer的帧设置为我的UIView帧。

问题:好像UIView的宽度和高度约束被忽略了。相机预览应位于视图的正中央,但事实并非如此。我在这做错了什么?我想这与预览图层有关。或者我可能需要使用UIImageView而不是UIView?

任何输入都将非常感谢!谢谢!

这是一个截然不同的截图...注意:突出显示图像以查看屏幕截图的边缘,抱歉。

enter image description here

我的代码:

extension String {
func stripCharactersInSet(chars: [Character]) -> String {
    return String(filter(self) {find(chars, $0) == nil})
}
}

class MyCameraViewController: UIViewController {

let session = AVCaptureSession()
var captureDevice: AVCaptureDevice?
var previewLayer: AVCaptureVideoPreviewLayer?
var stillImageOutput = AVCaptureStillImageOutput()
var imageData: NSData!

@IBOutlet weak var capturePhotoButton: UIButton!
@IBOutlet weak var flashButton: UIButton!
@IBOutlet weak var cameraView: UIView!

override func prefersStatusBarHidden() -> Bool {
    return true
}

override func viewDidLoad() {
    super.viewDidLoad()
    //flashButton.hidden = true

    if(session.canSetSessionPreset(AVCaptureSessionPresetHigh)) {
        session.sessionPreset = AVCaptureSessionPresetHigh
    } else {
        println("Cannot Set session Preset to AVCaptureSessionPresetPhoto")
    }

    let devices = AVCaptureDevice.devices()
    for device in devices {
        if(device.hasMediaType(AVMediaTypeVideo)){
            if(device.position == AVCaptureDevicePosition.Front){
                captureDevice = device as? AVCaptureDevice
                if captureDevice != nil {
                    beginSession()

                }
            }
        }
    }
}

func setCaptureDevice() {
    let devices = AVCaptureDevice.devices()
    for device in devices {
        if(device.hasMediaType(AVMediaTypeVideo)){
            if(device.position == AVCaptureDevicePosition.Back){
                captureDevice = device as? AVCaptureDevice

            }
        }
    }

}

@IBAction func flashButtonPressed(sender: UIButton) {
    if captureDevice!.hasFlash {
        if captureDevice!.isFlashModeSupported(AVCaptureFlashMode.On) {
            if (captureDevice!.lockForConfiguration(nil)) {
                if (captureDevice!.flashActive) {
                    captureDevice!.flashMode = AVCaptureFlashMode.Off
                    flashButton.setTitle("Flash Off", forState: UIControlState.Normal)
                } else {
                    captureDevice!.flashMode = AVCaptureFlashMode.On
                    flashButton.setTitle("Flash On", forState: UIControlState.Normal)
                }
            }
            captureDevice!.unlockForConfiguration()
        }
    }
}

@IBAction func switchCamera(sender: UIButton) {
    let currentCameraInput: AVCaptureInput = session.inputs[0] as! AVCaptureInput
    session.removeInput(currentCameraInput)

    let newCamera: AVCaptureDevice?
    if(captureDevice!.position == AVCaptureDevicePosition.Back){
        println("Setting new camera with Front")
        flashButton.hidden = true
        newCamera = self.cameraWithPosition(AVCaptureDevicePosition.Front)
    } else {
        println("Setting new camera with Back")
        flashButton.hidden = false
        newCamera = self.cameraWithPosition(AVCaptureDevicePosition.Back)
    }

    let newVideoInput = AVCaptureDeviceInput(device: newCamera!, error: nil)
    if(newVideoInput != nil) {
        session.addInput(newVideoInput)
    } else {
        println("Error creating capture device input")
    }
    captureDevice! = newCamera!
    session.commitConfiguration()

}

func cameraWithPosition(position: AVCaptureDevicePosition) -> AVCaptureDevice {
    let devices = AVCaptureDevice.devices()
    for device in devices {
            if(device.position == position){
                return device as! AVCaptureDevice
            }
    }
    return AVCaptureDevice()
}

func beginSession() {
    if(captureDevice!.isFocusModeSupported(AVCaptureFocusMode.ContinuousAutoFocus)) {
            captureDevice?.focusMode = AVCaptureFocusMode.ContinuousAutoFocus
        }

    var err : NSError? = nil
    //Add Input, which is my captureDevice
    session.addInput(AVCaptureDeviceInput(device: captureDevice, error: &err))
    if err != nil {
        println("error: \(err?.localizedDescription)")
    }
    previewLayer = AVCaptureVideoPreviewLayer(session: session)



    //self.view.layer.addSublayer(previewLayer)
    cameraView.layer.addSublayer(previewLayer)

    //previewLayer?.frame = self.view.layer.frame
    previewLayer?.frame = cameraView.layer.frame

    session.startRunning()
}

struct imageViewStruct {
    static var image: UIImage?
}

@IBAction func shotPress(sender: UIButton) {
    stillImageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
    if session.canAddOutput(stillImageOutput) {
        session.addOutput(stillImageOutput)
    }
    var videoConnection = stillImageOutput.connectionWithMediaType(AVMediaTypeVideo)

    if videoConnection != nil {
        stillImageOutput.captureStillImageAsynchronouslyFromConnection(stillImageOutput.connectionWithMediaType(AVMediaTypeVideo))
            { (imageDataSampleBuffer, error) -> Void in
                self.imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(imageDataSampleBuffer)
                var dataProvider = CGDataProviderCreateWithCFData(self.imageData)
                var cgImageRef = CGImageCreateWithJPEGDataProvider(dataProvider, nil, true, kCGRenderingIntentDefault)
                var image = UIImage(CGImage: cgImageRef, scale: 1.0, orientation: UIImageOrientation.Up)

                imageViewStruct.image = image

                //self.uploadPhoto(image!)

                self.presentPhotoEditViewController(image!)                    
        }}

}

func setupPhotoEditor() {
    //Remove existing camera stuff
    previewLayer?.removeFromSuperlayer()
}

func presentPhotoEditViewController(imageToSend: UIImage) {
    let vc = self.storyboard?.instantiateViewControllerWithIdentifier("photoEditor") as! PhotoEditViewController
    self.presentViewController(vc, animated: true, completion: nil)
}

func uploadPhoto(image: UIImage) {
    let imageData = UIImagePNGRepresentation(image)
    let imageFile = PFFile(name:"image.png", data:imageData)

    var userPhoto = PFObject(className:getStringForVenue())
    userPhoto["imageFile"] = imageFile
    userPhoto.saveInBackground()
}

func getStringForVenue() -> String {

    let initialVenueString = LocViewController.variables.selectedVenue
    let chars: [Character] = ["'",",",":"," "]

    println(initialVenueString.stripCharactersInSet(chars))

    return initialVenueString.stripCharactersInSet(chars)
}

override func didReceiveMemoryWarning() {
    super.didReceiveMemoryWarning()
    // Dispose of any resources that can be recreated.
}

}

1 个答案:

答案 0 :(得分:0)

你的方法bigSession()应该有一些AVCaptureVideoPreviewLayer的设置:setVideoGravity和setFrame。

<强> OBJ-C

[previewLayer setVideoGravity: AVLayerVideoGravityResizeAspectFill];
[previewLayer setFrame:self.cameraView.layer.bounds];

这将设置您的视频方面视图以适合您为其指定的视图图层的帧。