In an app I am working on that goes on an iPad air, I have the camera facing the user turn on and take a live video feed of the user is placed in an outlet called ImagePreviewOutlet. It works but the aspect ratio is too narrow leaving 2 white spaces on the left and right of the video. I want the aspect ration to be wider so the video fills the whole of the iPad. Here is the code for the 3 methods that handles the video:
-
(void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection { // Convert CMSampleBufferRef to CIImage CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pixelBuffer];
// Calculate the fitting rect based on 4:3 aspect ratio CGFloat targetAspectRatio = 4.0 / 3.0; // 4:3 aspect ratio CGFloat targetWidth = CGRectGetHeight(self.ImagePreviewOutlet.bounds) * targetAspectRatio; CGFloat targetX = (CGRectGetWidth(self.ImagePreviewOutlet.bounds) - targetWidth) / 2.0; CGRect fittingRect = CGRectMake(targetX, 0, targetWidth, CGRectGetHeight(self.ImagePreviewOutlet.bounds));
// Apply filters (if needed)
// Resize the CIImage to fit the ImagePreviewOutlet CIImage *resizedImage = [ciImage imageByApplyingTransform:CGAffineTransformMakeScale(fittingRect.size.width / CGRectGetWidth(ciImage.extent), fittingRect.size.height / CGRectGetHeight(ciImage.extent))];
// Render the CIImage into a CGImage CIContext *context = [CIContext contextWithOptions:nil]; CGImageRef cgImage = [context createCGImage:resizedImage fromRect:resizedImage.extent];
// Create a UIImage from the CGImage UIImage *image = [UIImage imageWithCGImage:cgImage];
// Update the UI on the main thread dispatch_async(dispatch_get_main_queue(), ^{ self.ImagePreviewOutlet.image = image; // Replace with your UI element });
// Release resources CGImageRelease(cgImage);
}
- (void)showImagePreviewOutlet
{ self.ImagePreviewOutlet.hidden = NO;
// Create a capture session
self.captureSession = [[AVCaptureSession alloc] init];
// Configure the capture device (use front camera)
AVCaptureDevice *captureDevice = [AVCaptureDevice defaultDeviceWithDeviceType:AVCaptureDeviceTypeBuiltInWideAngleCamera
mediaType:AVMediaTypeVideo
position:AVCaptureDevicePositionFront];
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:nil];
[self.captureSession addInput:input];
// Create a preview layer to display the live video
AVCaptureVideoPreviewLayer *previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession];
previewLayer.frame = self.ImagePreviewOutlet.bounds;
[self.ImagePreviewOutlet.layer addSublayer:previewLayer];
// Start the capture session on a background thread
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
[self.captureSession startRunning];
});
// Schedule a timer to stop the capture session after 10 seconds
[NSTimer scheduledTimerWithTimeInterval:10 target:self selector:@selector(hideImagePreviewOutlet) userInfo:nil repeats:NO];
}
- (void)setupCaptureSession
{ AVCaptureSession *captureSession = [[AVCaptureSession alloc] init];
AVCaptureDevice *captureDevice = [AVCaptureDevice defaultDeviceWithDeviceType:AVCaptureDeviceTypeBuiltInWideAngleCamera
mediaType:AVMediaTypeVideo
position:AVCaptureDevicePositionBack];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error];
if (input) {
if ([captureSession canAddInput:input]) {
[captureSession addInput:input];
}
AVCaptureVideoDataOutput *videoOutput = [[AVCaptureVideoDataOutput alloc] init];
if ([captureSession canAddOutput:videoOutput]) {
[captureSession addOutput:videoOutput];
}
[captureSession startRunning];
} else {
NSLog(@"Error setting up capture session: %@", error.localizedDescription);
}
}
I want the video to be wider.
Any ideas?
JR