@Davidbaraff2 that's the code im currently using. I try to Capture faces with vision and the back camera in portrait Mode. To be able to render the bounding boxes on screen, I noticed using .leftMirrored as orientation for VNImageRequestHandler helps a lot, but I don't get DockKit to track the faces correctly.
What am I missing here?
class TestViewController: UIViewController {
private let captureSession = AVCaptureSession()
private let videoDataOutput = AVCaptureVideoDataOutput()
private lazy var previewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession)
private var faceLayers: [CAShapeLayer] = []
private var dockAccessory: DockAccessory?
private var captureDevice: AVCaptureDevice?
override func viewDidLoad() {
super.viewDidLoad()
Task {
try await DockAccessoryManager.shared.setSystemTrackingEnabled(false)
for await accessory in try DockAccessoryManager.shared.accessoryStateChanges {
print(accessory.state)
DispatchQueue.main.async {
//self.connectedToDock = accessory.state == DockAccessory.State.docked
self.dockAccessory = accessory.accessory
}
}
}
setupCamera()
captureSession.startRunning()
func setupCamera() {
self.captureSession.sessionPreset = .hd1280x720
let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(
deviceTypes: [
.builtInDualCamera,
.builtInTripleCamera
],
mediaType: .video,
position: .back
)
if let device = deviceDiscoverySession.devices.first {
if let deviceInput = try? AVCaptureDeviceInput(device: device) {
if captureSession.canAddInput(deviceInput) {
captureSession.addInput(deviceInput)
setupPreview()
}
}
self.captureDevice = device
}
func setupPreview() {
self.previewLayer.videoGravity = .resizeAspectFill
self.view.layer.addSublayer(self.previewLayer)
self.previewLayer.frame = self.view.frame
self.videoDataOutput.videoSettings = [
(kCVPixelBufferPixelFormatTypeKey as NSString) : NSNumber(value: kCVPixelFormatType_32BGRA)
] as [String : Any]
self.videoDataOutput.setSampleBufferDelegate(
self,
queue: DispatchQueue(label: "camera queue")
)
self.captureSession.addOutput(self.videoDataOutput)
let videoConnection = self.videoDataOutput.connection(with: .video)
videoConnection?.videoOrientation = .portrait
}
}
}
private var frameCounter = 0
private var lastTimestamp = Date()
}
extension TestViewController: AVCaptureVideoDataOutputSampleBufferDelegate {
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
return
}
let faceDetectionRequest = VNDetectFaceLandmarksRequest(completionHandler: { (request: VNRequest, error: Error?) in
DispatchQueue.main.async {
self.faceLayers.forEach({ drawing in drawing.removeFromSuperlayer() })
if let observations = request.results as? [VNFaceObservation] {
self.handleFaceDetectionObservations(
observations: observations,
imageBuffer,
sampleBuffer
)
}
}
})
let imageRequestHandler = VNImageRequestHandler(cvPixelBuffer: imageBuffer, orientation: .leftMirrored, options: [:])
do {
try imageRequestHandler.perform([faceDetectionRequest])
} catch {
print(error.localizedDescription)
}
}
private func handleFaceDetectionObservations(
observations: [VNFaceObservation],
_ pixelBuffer: CVPixelBuffer,
_ sampleBuffer: CMSampleBuffer
) {
for observation in observations {
var faceRectConverted = self.previewLayer.layerRectConverted(fromMetadataOutputRect: observation.boundingBox)
let faceRectanglePath = CGPath(rect: faceRectConverted, transform: nil)
let faceLayer = CAShapeLayer()
faceLayer.path = faceRectanglePath
faceLayer.fillColor = UIColor.clear.cgColor
faceLayer.strokeColor = UIColor.yellow.cgColor
self.faceLayers.append(faceLayer)
self.view.layer.addSublayer(faceLayer)
if
let captureDevice = captureDevice,
let dockAccessory = dockAccessory
{
Task {
do {
try await trackWithDockKit(
observation.boundingBox,
dockAccessory,
pixelBuffer,
sampleBuffer
)
} catch {
print(error)
}
}
}
}
}
func trackWithDockKit(_ boundingBox: CGRect, _ dockAccessory: DockAccessory, _ pixelBuffer: CVPixelBuffer, _ cmSampelBuffer: CMSampleBuffer) async throws {
guard
let device = captureDevice
else {
fatalError("Kamera nicht verfügbar")
}
let size = CGSize(width: CVPixelBufferGetWidth(pixelBuffer), height: CVPixelBufferGetHeight(pixelBuffer))
let cameraInfo = DockAccessory.CameraInformation(
captureDevice: device.deviceType,
cameraPosition: device.position,
orientation: .corrected,
cameraIntrinsics: nil,
referenceDimensions: size
)
let observation = DockAccessory.Observation(
identifier: 0,
type: .object,
rect: boundingBox
)
let observations = [observation]
try await dockAccessory.track(observations, cameraInformation: cameraInfo)
}
}
Post
Replies
Boosts
Views
Activity
I'll provide additional code tomorrow, but with the current implementation DockKit reacts extremly slow to updated boundingBox positions. Did you notice something similiar @Davidbaraff2?
@Davidbaraff2 can you help me again? I'm wondering what referenceDimension property of the cameraInformation object means. Does it mean the size of the CMSampleBuffer or for example previewLayer where the camera Output gets rendered?
Thank you very much!
Thank you so much! Don't worry about the bad editor formatting, but I fully understand your frustration!
The code you provided works like a Charme and the pendulum motion is gone now!
Thank you so much 🙏
@Ken_D did you manage to solve this issue? My app gets rejected for the exact same issue, but I don't know what's causing it. I've tried the exact same environment setup as described by app review rejection, but it works fine on all my setups.
What's kind of interesting: after installing the app completely fresh on the simulator and without any subscriptions bought yet, it shows that it tries to pull some sort of subscription history for the user. So I guess maybe it has something to do with the account that's being used for app review? Just an idea, I don't have another clue otherwise...please let me know if you got it working!
Many thanks in advice!