屏幕上的AVCaptureVideoDataOutput和图像显示大小

时间:2015-11-03 15:58:18

标签: ios objective-c iphone avfoundation avcapture

我使用AVCaptureVideoDataOutput从相机中检索图像并显示在iPhone显示屏上。我使用iPhone 6 plusiOS8.4上运行代码,这很好。图像显示在全屏幕上。但是当我使用iPhone4 with iOS 7.1iPad mini with iOS 8.3时,图像不会显示在整个屏幕上,并且屏幕左右两侧都显示空白区域(没有显示图像)。这个问题可能是什么原因?我的代码如下所示。

- (void)viewDidLoad { 
    dispatch_async(sessionQueue, ^{
            [self setBackgroundRecordingID:UIBackgroundTaskInvalid];

            NSError *error = nil;

            AVCaptureDevice *videoDevice = [RecordViewController deviceWithMediaType:AVMediaTypeVideo preferringPosition:AVCaptureDevicePositionBack];


            AVCaptureDeviceInput *videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];

            if (error)
            {
                NSLog(@"%@", error);
            }

            if ([session canAddInput:videoDeviceInput])
            {
                [session addInput:videoDeviceInput];
                [self setVideoDeviceInput:videoDeviceInput];

                dispatch_async(dispatch_get_main_queue(), ^{
                    // Why are we dispatching this to the main queue?
                    // Because AVCaptureVideoPreviewLayer is the backing layer for AVCamPreviewView and UIView can only be manipulated on main thread.
                    // Note: As an exception to the above rule, it is not necessary to serialize video orientation changes on the AVCaptureVideoPreviewLayer’s connection with other session manipulation.
                    //[self previewView] layer
                    [[(AVCaptureVideoPreviewLayer *)[[self previewView] layer] connection] setVideoOrientation:(AVCaptureVideoOrientation)[[UIApplication sharedApplication] statusBarOrientation]];
                });
            }

            AVCaptureDevice *audioDevice = [[AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio] firstObject];
            AVCaptureDeviceInput *audioDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:&error];

            if (error)
            {
                NSLog(@"%@", error);
            }

            if ([session canAddInput:audioDeviceInput])
            {
                [session addInput:audioDeviceInput];
            }

            AVCaptureVideoDataOutput *vid_Output = [[AVCaptureVideoDataOutput alloc] init];
            [vid_Output setSampleBufferDelegate:self queue:im_processingQueue];
            vid_Output.alwaysDiscardsLateVideoFrames = YES;
            // Set the video output to store frame in BGRA (It is supposed to be faster)
            NSDictionary* videoSettings = @{(__bridge NSString*)kCVPixelBufferPixelFormatTypeKey: [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA]};
            [vid_Output setVideoSettings:videoSettings];
            if ([session canAddOutput:vid_Output])
            {
                [session addOutput:vid_Output];
                AVCaptureConnection *connection = [vid_Output connectionWithMediaType:AVMediaTypeVideo];
                if ([connection isVideoStabilizationSupported])
                    //[connection setEnablesVideoStabilizationWhenAvailable:YES];
                    connection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeAuto;
                [self setVid_Output:vid_Output];

            }


        });
    }

    - (void)viewWillAppear:(BOOL)animated
    {
        //[super viewWillAppear:YES];
        dispatch_async([self sessionQueue], ^{
            [self addObserver:self forKeyPath:@"sessionRunningAndDeviceAuthorized" options:(NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew) context:SessionRunningAndDeviceAuthorizedContext];

            [self addObserver:self forKeyPath:@"vid_Output.recording" options:(NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew) context:RecordingContext];
            [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(subjectAreaDidChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:[[self videoDeviceInput] device]];

            __weak RecordViewController *weakSelf = self;
            [self setRuntimeErrorHandlingObserver:[[NSNotificationCenter defaultCenter] addObserverForName:AVCaptureSessionRuntimeErrorNotification object:[self session] queue:nil usingBlock:^(NSNotification *note) {
                RecordViewController *strongSelf = weakSelf;
                dispatch_async([strongSelf sessionQueue], ^{
                    // Manually restarting the session since it must have been stopped due to an error.
                    [[strongSelf session] startRunning];

                });
            }]];
            [[self session] startRunning];
        });


    }
    - (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer // Create a CGImageRef from sample buffer data
    {
        CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
        CVPixelBufferLockBaseAddress(imageBuffer,0);        // Lock the image buffer

        uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
        //uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);   // Get information of the image
        size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
        size_t width = CVPixelBufferGetWidth(imageBuffer);
        size_t height = CVPixelBufferGetHeight(imageBuffer);
        CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();

        CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
        CGImageRef newImage = CGBitmapContextCreateImage(newContext);
        UIImage *image= [UIImage imageWithCGImage:newImage scale:1.0 orientation:UIImageOrientationUp];//UIImageOrientationRight
        self.videoOrientation = UIImageOrientationUp;
        CGContextRelease(newContext);
        CGImageRelease(newImage);
        CGColorSpaceRelease(colorSpace);
        CVPixelBufferUnlockBaseAddress(imageBuffer,0);
        /* CVBufferRelease(imageBuffer); */  // do not call this!

        return image;
    }

1 个答案:

答案 0 :(得分:1)

尝试添加

[[(AVCaptureVideoPreviewLayer *)[[self previewView] layer] setVideoGravity:AVLayerVideoGravityResizeAspectFill];

这将确保预览图层填满整个屏幕