Root.plist - Camera usage

hi, in the settings in the application settings, how do I put a button there to allow the use of the camera?

Replies

You don't, usually.

The first time your app uses the camera (e.g.makes a AVCaptureDeviceDiscoverySession ), the OS will figure out whether you have an entitlement to use the camera (com.apple.security.device.camera) and a usage string (com.apple.security.device.camera, or INFOPLIST_KEY_NSCameraUsageDescription). The OS will show the permission dialog, using your app's usage string.

If you want to make this a bit more explicit, you can make a button in your own UI which deliberately provokes this OS behavior, perhaps as part of an onboarding flow. But you don't have direct control over it, because the OS is asking for user permission, your app is just awaiting user permission or an error.

i have it like this:

import SwiftUI
import VisionKit
import Combine
import AVFoundation
import MessageUI

struct ScannerScreen: View {
    @State private var isShowingScanner = false
    @State private var scannedText = ""
    @State private var codeProcessorMessages: [MessageModel] = []

    private let mailComposeDelegate = MailDelegate()

    var body: some View {
        if AVCaptureDevice.authorizationStatus(for: .video) == .authorized {
            ZStack(alignment: .top) {
                ScannerView(scannedText: $scannedText, codeProcessorMessages: $codeProcessorMessages)
                    .ignoresSafeArea()

                VStack {
                    ForEach(codeProcessorMessages, id: \.self) { message in
                        Text(message.text)
                            .padding()
                            .foregroundColor(.white)
                            .background(message.isGlutenFree ? Color.green : Color.red)
                            .cornerRadius(5)
                    }
                    .padding()
                    .padding(.bottom, 30)
                }
                VStack {
                    Spacer()
                    HStack {
                        Spacer()

                        Text(scannedText)
                            .padding()
                            .foregroundColor(.black)
                            .background(Color.white
                                            .opacity(0.8)
                                            .cornerRadius(10))
                            .opacity(scannedText.isEmpty ? 0 : 1)

                        Button(action: {
                            self.presentMailCompose()
                        }) {
                            Text("Nahlásit změnu")
                                .padding()
                                .foregroundColor(.white)
                                .background(Color.blue)
                                .cornerRadius(10)
                                .opacity(scannedText.isEmpty ? 0 : 1)
                        }
                    }
                }
                .padding()
            }
            .onChange(of: scannedText) { newText in
                processCodeWithCodeProcessor(code: newText)
            }
        } else {
            VStack {
                Text("Přístup k fotoaparátu není povolen.")
                Text("Povolte přístup v nastaveních zařízení.")
                Button(action: {
                    if let url = URL(string: UIApplication.openSettingsURLString) {
                        UIApplication.shared.open(url)
                    }
                }) {
                    Text("Otevřít nastavení aplikace")
                        .foregroundColor(.greenField)
                }
            }
        }
    }

    private func processCodeWithCodeProcessor(code: String) {
        let processor = CodeProcessor()
        codeProcessorMessages = processor.processCode(givenCode: code).map { message in
            let isGlutenFree = message.contains("bezlepková")
            return MessageModel(text: message, isGlutenFree: isGlutenFree)
        }
    }
}

struct ScannerScreen_Previews: PreviewProvider {
    static var previews: some View {
        ScannerScreen()
    }
}

// Structure representing a message with gluten-free information
struct MessageModel: Hashable {
    let text: String
    let isGlutenFree: Bool
}

struct ScannerView: UIViewControllerRepresentable {
    @Binding var scannedText: String
    @Binding var codeProcessorMessages: [MessageModel]

    func makeUIViewController(context: Context) -> ScannerViewController {
        return ScannerViewController(scannedText: $scannedText, codeProcessorMessages: $codeProcessorMessages)
    }

    func updateUIViewController(_ uiViewController: ScannerViewController, context: Context) {}
}

class ScannerViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
    @Binding var scannedText: String
    @Binding var codeProcessorMessages: [MessageModel]
    var captureSession: AVCaptureSession!
    var previewLayer: AVCaptureVideoPreviewLayer!

    init(scannedText: Binding<String>, codeProcessorMessages: Binding<[MessageModel]>) {
        _scannedText = scannedText
        _codeProcessorMessages = codeProcessorMessages
        super.init(nibName: nil, bundle: nil)
    }

    required init?(coder: NSCoder) {
        fatalError("init(coder:) has not been implemented")
    }

    override func viewDidLoad() {
        super.viewDidLoad()

        captureSession = AVCaptureSession()

        let discoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: .video, position: .back)
        guard let videoCaptureDevice = discoverySession.devices.first else { return }
        let videoInput: AVCaptureDeviceInput

        do {
            videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice)
        } catch let error {
            print(error)
            return
        }

        if (captureSession.canAddInput(videoInput)) {
            captureSession.addInput(videoInput)
        } else {
            print("Could not add video input")
            return
        }

        let metadataOutput = AVCaptureMetadataOutput()

        if (captureSession.canAddOutput(metadataOutput)) {
            captureSession.addOutput(metadataOutput)

            metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
            metadataOutput.metadataObjectTypes = [.ean8, .ean13, .pdf417]
        } else {
            print("Could not add metadata output")
            return
        }

        previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
        previewLayer.frame = view.layer.bounds
        previewLayer.videoGravity = .resizeAspectFill
        view.layer.addSublayer(previewLayer)

        // Start `AVCaptureSession` on a different thread
        DispatchQueue.global().async {
            self.captureSession.startRunning()
        }
    }

    func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
        guard let metadataObject = metadataObjects.first as? AVMetadataMachineReadableCodeObject else { return }
        guard let stringValue = metadataObject.stringValue else { return }

        scannedText = stringValue
    }
}

extension ScannerScreen {
    private class MailDelegate: NSObject, MFMailComposeViewControllerDelegate {
        func mailComposeController(_ controller: MFMailComposeViewController, didFinishWith result: MFMailComposeResult, error: Error?) {
            controller.dismiss(animated: true)
        }
    }