capture photos depth map

I managed to get the depth map stream with func depthDataOutput,but when I use photoOutput function,photo.depthmap always returns nil.

How can I get the depthmap of a photo?



import UIKit
import AVFoundation
import AVKit
import Photos
class dps: UIViewController, AVCapturePhotoCaptureDelegate, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureDepthDataOutputDelegate, AVCaptureDataOutputSynchronizerDelegate {
var photoOutputs=AVCapturePhotoOutput()
var depthDatasOutput=AVCaptureDepthDataOutput()
let captureSession = AVCaptureSession()
private let videoDeviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInDualCamera,
                                                                                            .builtInWideAngleCamera],
                                                                              mediaType: .video,
                                                                              position: .back)
private let dataOutputQueue = DispatchQueue(label: "video data queue", qos: .userInitiated, attributes: [], autoreleaseFrequency: .workItem)
override func viewDidLoad() {
        super.viewDidLoad()
        image.isHidden=true
        captureSession.beginConfiguration()
        captureSession.sessionPreset = AVCaptureSession.Preset.photo
        let photoSettings = AVCapturePhotoSettings(format: [AVVideoCodecKey:  AVVideoCodecType.hevc])
        let videoDevice = videoDeviceDiscoverySession.devices.first
        let videoInput = try! AVCaptureDeviceInput(device: videoDevice!)
      captureSession.addInput(videoInput)
        captureSession.addOutput(photoOutputs)
        depthDatasOutput.setDelegate(self, callbackQueue: dataOutputQueue)
      captureSession.addOutput(depthDatasOutput)
        photoSettings.isDepthDataDeliveryEnabled = true
        photoSettings.embedsDepthDataInPhoto = true
        photoOutputs.isDepthDataDeliveryEnabled = true
        let videoLayer = AVCaptureVideoPreviewLayer(session: captureSession)
        videoLayer.frame = self.view.bounds
        videoLayer.connection?.videoOrientation = AVCaptureVideoOrientation.portrait
        self.view.layer.addSublayer(videoLayer)
        captureSession.commitConfiguration()
        captureSession.startRunning()
      photoOutputs.capturePhoto(with:photoSettings, delegate: self)
    }
func depthDataOutput(_ output: AVCaptureDepthDataOutput, didOutput depthData: AVDepthData, timestamp: CMTime, connection: AVCaptureConnection) {

        print(false,depthData,depthData.depthDataMap).          //return depthmap streaming successfully.

    }
    func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
        print(photo)                      //return photo normally
        print(photo.depthData)    //always return nil
    }
func dataOutputSynchronizer(_ synchronizer: AVCaptureDataOutputSynchronizer, didOutput synchronizedDataCollection: AVCaptureSynchronizedDataCollection) {
      }
}

Replies

If I add

        outputSynchronizer = AVCaptureDataOutputSynchronizer(dataOutputs: [photoOutputs, depthDatasOutput])
        outputSynchronizer!.setDelegate(self, queue: dataOutputQueue)

to viewdidload()

xcode returns an error

Unsupported AVCaptureOutput in dataOutputs - <AVCapturePhotoOutput: 0x1c4028200>

AVCaptureDataOutputSynchronizer works with *data* outputs. AVCapturePhotoOutput isn't a data output. Try using it with AVCapture{Video|Audio|Metadata|DepthData}Output.