在ios上的同一视图中打开半屏幕和半屏幕webview中的摄像头

时间:2014-08-08 07:24:04

标签: ios webview split camera

我的要求是我想在iPhone的上半部分打开一个webview,在下半部分进行视频录制。这是可能的,如果是,请描述如何实现这一点。我一直在努力解决这个问题。 3天。看看我如何捕捉视频

#import "RecordVideoViewController.h"

@interface RecordVideoViewController ()

@end

@implementation RecordVideoViewController

- (void)viewDidLoad
{
    [super viewDidLoad];
    // Do any additional setup after loading the view, typically from a nib.
    x=1;
}

- (void)didReceiveMemoryWarning
{
    [super didReceiveMemoryWarning];
    // Dispose of any resources that can be recreated.
}

- (IBAction)recordAndPlay:(id)sender {
    [self startCameraControllerFromViewController:self usingDelegate:self];
}
-(BOOL)startCameraControllerFromViewController:(UIViewController*)controller
                                 usingDelegate:(id )delegate {
    // 1 - Validations
    if (([UIImagePickerController isSourceTypeAvailable:UIImagePickerControllerSourceTypeCamera] == NO)
        || (delegate == nil)
        || (controller == nil)) {
        return NO;
 }
    // 2 - Get image picker
    UIImagePickerController *cameraUI = [[UIImagePickerController alloc] init];
    cameraUI.sourceType = UIImagePickerControllerSourceTypeCamera;
    // Displays a control that allows the user to choose movie capture
    cameraUI.mediaTypes = [[NSArray alloc] initWithObjects:(NSString *)kUTTypeMovie, nil];
    // Hides the controls for moving & scaling pictures, or for
    // trimming movies. To instead show the controls, use YES.
    cameraUI.allowsEditing = NO;
    cameraUI.delegate = delegate;
    //3 - Display image picker
    [controller presentViewController:cameraUI animated:YES completion:nil];
    return YES;
}

1 个答案:

答案 0 :(得分:1)

自己解决了。看看代码

//  ViewController.m
//  AppleVideoCapture
//  Copyright (c) 2014 NetProphets. All rights reserved.

#import "ViewController.h"

@interface ViewController (){
    AVCaptureSession * session;
    AVCaptureMovieFileOutput * output;
}

@end

@implementation ViewController

- (void)viewDidLoad
{
    [super viewDidLoad];
    // Do any additional setup after loading the view, typically from a nib.


    //1. SetUp an AV session
    session= [[AVCaptureSession alloc] init];
    if ([session canSetSessionPreset:AVCaptureSessionPresetMedium]) {
        session.sessionPreset= AVCaptureSessionPresetMedium;
    }
    //Get the front facing camera as input device
    AVCaptureDevice * device= [self frontCamera ];

    //Setup the device capture input
    NSError * error;
    AVCaptureDeviceInput * videoInput= [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
    if (error) {
        NSLog(@"Error with video capture...%@",[error description]);
    }
    else{
        if ([session canAddInput:videoInput])
            [session addInput:videoInput];
        else
            NSLog(@"Error adding video input to session");

    }
    AVCaptureDevice * audioDevice= [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
    AVCaptureDeviceInput * audioInput= [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:&error];
    if (error) {
        NSLog(@"Error with audio input...%@",[error description]);
    }
    else{
        if ([session canAddInput:audioInput])
            [session addInput:audioInput];
        else
            NSLog(@"Error adding audio to session");
    }

    //Customize and add your customized video capturing layer to the view
    CALayer * viewLayer= [self.view layer];
    AVCaptureVideoPreviewLayer * previewLayer= [[AVCaptureVideoPreviewLayer alloc]initWithSession:session];
    previewLayer.videoGravity=AVLayerVideoGravityResizeAspectFill;
    previewLayer.frame= CGRectMake(0.0f,530.0f,320.0f,-250.0f);
    [viewLayer addSublayer:previewLayer];

    //Configure the movie output
    output= [[AVCaptureMovieFileOutput alloc]init];

    if ([session canAddOutput:output]) {
        [session addOutput:output];
    }
    [session startRunning];

}

- (void)didReceiveMemoryWarning
{
    [super didReceiveMemoryWarning];
    // Dispose of any resources that can be recreated.
}

- (IBAction)recordVideo:(id)sender {

    NSLog(@"Record video called");



    NSString * path= [NSString stringWithFormat:@"%@%@",NSTemporaryDirectory(),@"output.mov"];
    NSURL * outputUrl= [NSURL fileURLWithPath:path];
    NSFileManager * myManager= [NSFileManager defaultManager];
    NSError * error;

    if ([myManager fileExistsAtPath:path]) {
               if ([myManager removeItemAtPath:path error:&error]==NO) {
                   NSLog(@"File removal at temporary directory failed..%@",[error description]);
                    }
    }

    [output startRecordingToOutputFileURL:outputUrl recordingDelegate:self];



    }
-(AVCaptureDevice *)frontCamera{
    NSArray * devices= [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
    for (AVCaptureDevice * device in devices) {
        if ([device position]==AVCaptureDevicePositionFront) {
            return device;
        }
    }
    return nil;
}
-(void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error{
    NSLog(@"Recording Finished");

    ALAssetsLibrary * library=[[ALAssetsLibrary alloc]init];
    if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputFileURL])
    {
        [library writeVideoAtPathToSavedPhotosAlbum:outputFileURL
                                    completionBlock:^(NSURL *assetURL, NSError *error)
         {
             if (error)
             {
                 NSLog(@"Error saving file to photos album");
             }
         }];
    }
}
-(void)captureOutput:(AVCaptureFileOutput *)captureOutput didStartRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections{
    NSLog(@"Recording Started");
}
- (IBAction)stopRecording:(id)sender {
    NSLog(@"Stop Recording called");
    [output stopRecording];
}
@end