I guess, we just need to obtain the larger size (48MP) and then resize it.
var resultImage: UIImage?
func photoOutput(
_ output: AVCapturePhotoOutput,
didFinishProcessingPhoto photo: AVCapturePhoto,
error: Error?) {
guard error == nil, let cgImage = photo.cgImageRepresentation() else { return }
guard let cgImage = photo.cgImageRepresentation(),
/// check if the original image has 8064x6048 && do we need to resize it to 24MP
let cgImageScaled = resizeCGImage(cgImage, to: .init(width: 5712 , height: 4284)) else { return }
let resultImage = UIImage(cgImage: cgImageScaled)
}
func photoOutput(
_ output: AVCapturePhotoOutput,
didFinishCaptureFor resolvedSettings: AVCaptureResolvedPhotoSettings,
error: Error?) {
guard error == nil, let resultImage = resultImage else { return }
/// saving resultImage
}
func resizeCGImage(_ image: CGImage, to size: CGSize) -> CGImage? {
let bytesPerPixel = image.bitsPerPixel / image.bitsPerComponent
let bytesPerRow = Int(size.width) * bytesPerPixel
guard let colorSpace = image.colorSpace,
let ctx = CGContext(
data: nil,
width: Int(size.width),
height: Int(size.height),
bitsPerComponent: image.bitsPerComponent,
bytesPerRow: bytesPerRow,
space: colorSpace,
bitmapInfo: image.alphaInfo.rawValue
) else { return nil }
ctx.interpolationQuality = .default
ctx.draw(image, in: .init(origin: .zero, size: size))
return ctx.makeImage()
}