I forgot to mention the current code
(void)setupNativeScanner
{
AVCaptureSession* captureSession = [[AVCaptureSession alloc] init];
self.session = captureSession;
AVCaptureDevice *captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
self.device = captureDevice;
NSError *error = nil;
self.input = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error];
if (self.input)
[_session addInput:_input];
else
NSLog(@"Error: %@", error);
AVCaptureMetadataOutput *captureMetadataOutput = [[AVCaptureMetadataOutput alloc] init];
[captureSession addOutput:captureMetadataOutput];
self.output = captureMetadataOutput;
dispatch_queue_t dispatchQueue;
dispatchQueue = dispatch_queue_create("myQueue", NULL);
[captureMetadataOutput setMetadataObjectsDelegate:self queue:dispatchQueue];
[captureMetadataOutput setMetadataObjectTypes:@[AVMetadataObjectTypeUPCECode, AVMetadataObjectTypeCode39Code, AVMetadataObjectTypeCode39Mod43Code,
AVMetadataObjectTypeEAN13Code, AVMetadataObjectTypeEAN8Code, AVMetadataObjectTypeCode93Code, AVMetadataObjectTypeCode128Code,
AVMetadataObjectTypePDF417Code, AVMetadataObjectTypeQRCode, AVMetadataObjectTypeAztecCode]];
// [self.output setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
// [captureSession addOutput:_output];
self.output.metadataObjectTypes = [self.output availableMetadataObjectTypes];
self.prevLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.session];
self.prevLayer.frame = self.view.bounds;
self.prevLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[self.view.layer addSublayer:self.prevLayer];
[self.session startRunning];
[self.view bringSubviewToFront:self.highlightView];
}
(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects
fromConnection:(AVCaptureConnection *)connection
{
connection.enabled = NO;
CGRect highlightViewRect = CGRectZero;
AVMetadataMachineReadableCodeObject *barCodeObject;
NSString *detectionString = nil;
NSArray *barCodeTypes = @[AVMetadataObjectTypeUPCECode, AVMetadataObjectTypeCode39Code, AVMetadataObjectTypeCode39Mod43Code,
AVMetadataObjectTypeEAN13Code, AVMetadataObjectTypeEAN8Code, AVMetadataObjectTypeCode93Code, AVMetadataObjectTypeCode128Code,
AVMetadataObjectTypePDF417Code, AVMetadataObjectTypeQRCode, AVMetadataObjectTypeAztecCode];
for (AVMetadataObject *metadata in metadataObjects)
{
NSLog(@"metadata type %@", [metadata type]);
if ([metadata respondsToSelector:@selector(stringValue)])
NSLog(@"metadata string %@", [(AVMetadataMachineReadableCodeObject *)metadata stringValue]);
for (NSString *type in barCodeTypes)
{
if ([metadata.type isEqualToString:type])
{
barCodeObject = (AVMetadataMachineReadableCodeObject *)[_prevLayer transformedMetadataObjectForMetadataObject:(AVMetadataMachineReadableCodeObject *)metadata];
highlightViewRect = barCodeObject.bounds;
detectionString = [(AVMetadataMachineReadableCodeObject *)metadata stringValue];
break;
}
}
if (detectionString != nil)
{
[self.nativeScannerDelegate didFoundCode:metadata.type result:detectionString];
break;
}
}
[self performSelector:@selector(closeWithoutAnimation) withObject:nil afterDelay:0.1];
self.highlightView.frame = highlightViewRect;
}
Post
Replies
Boosts
Views
Activity
We found the answer. We re-did all the png files and made sure we had the icon sizes that Apple wanted. Also, all png files needed their alpha channels removed. Jpegs do not have alpha channels but Apple wants png files for the icons, not jpgs.
To add even more confusion
https://www.youtube.com/watch?v=gqiYSt4nAFs
I am in the same boat. more core vs more memory???
I should add that for release, xcode will only recognize the Dev profile for release signing. but when you upload that build, apple rejects it. of course it would.
I also updated PUSH certs and then created new provisional profiles for Ad Hoc, Dev, and App Store. No luck on releasing the App to App Store because it won't let me sign using the App Store profile.
I used both profiles for before Xcode 11 and after Xcode 11.