Posts

Post not yet marked as solved
1 Replies
569 Views
My existing code is working properly in iOS < 17 devices it records the iPhone screen and records audio as well simultaneously, but in iOS 17 devices the screen recording video is captured for only 2 seconds and then stops automatically, As its an extension, i don't have logs to debug the issue. I have tested the same code in other iPhones and OS less than 17, its working fine but in iOS 17 devices this issue is coming. @try { NSLog(@“initAssesWriter”); NSError *error = nil; CGRect screenRect = [[UIScreen mainScreen] bounds]; _videoWriter = [[AVAssetWriter alloc] initWithURL: _filePath fileType:AVFileTypeMPEG4 error:&error]; NSParameterAssert(_videoWriter); //Configure video NSDictionary* videoCompressionProps = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithDouble:2048*1024.0], AVVideoAverageBitRateKey, nil ]; NSDictionary* videoSettings = [NSDictionary dictionaryWithObjectsAndKeys: AVVideoCodecTypeH264, AVVideoCodecKey, [NSNumber numberWithInt:screenRect.size.width * 4], AVVideoWidthKey, [NSNumber numberWithInt:screenRect.size.height * 4], AVVideoHeightKey, videoCompressionProps, AVVideoCompressionPropertiesKey, nil]; _writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings] ; _writerInput.expectsMediaDataInRealTime = YES; NSParameterAssert(_writerInput); NSParameterAssert([_videoWriter canAddInput:_writerInput]); [_videoWriter addInput:_writerInput]; AudioChannelLayout acl; bzero( &acl, sizeof(acl)); acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono; NSDictionary* audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys: [ NSNumber numberWithInt: kAudioFormatMPEG4AAC], AVFormatIDKey, [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey, [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey, [ NSData dataWithBytes: &acl length: sizeof( AudioChannelLayout ) ], AVChannelLayoutKey, [ NSNumber numberWithInt: 64000 ], AVEncoderBitRateKey, nil]; _audioWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType: AVMediaTypeAudio outputSettings: audioOutputSettings ]; _audioWriterInput.expectsMediaDataInRealTime = YES; // seems to work slightly better NSParameterAssert(_audioWriterInput); NSParameterAssert([_videoWriter canAddInput:_audioWriterInput]); [_videoWriter addInput:_audioWriterInput]; [_videoWriter setMovieFragmentInterval:CMTimeMake(1, 600)]; [_videoWriter startWriting]; } @catch (NSException *exception) { } @finally { } -(void)processSampleBuffer:(CMSampleBufferRef)sampleBuffer withType:(RPSampleBufferType)sampleBufferType{ @try { if(!_isRecordingStarted){ [_videoWriter startSessionAtSourceTime:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)]; _isRecordingStarted = YES; [self saveFlurryLogs:@"Assest writer Start Recording" Details:@""]; NSLog(@"CMSampleBufferGetPresentationTimeStamp"); } } @catch (NSException *exception) { [self saveFlurryLogs:@"Recording Start Execption" Details:exception.description]; } @finally { } @try { switch (sampleBufferType) { case RPSampleBufferTypeVideo: // Handle video sample buffer if([_writerInput isReadyForMoreMediaData]){ [_writerInput appendSampleBuffer:sampleBuffer]; NSLog(@"writing matadata Video"); } break; case RPSampleBufferTypeAudioApp: // Handle audio sample buffer for app audio break; case RPSampleBufferTypeAudioMic: if([_audioWriterInput isReadyForMoreMediaData]){ [_audioWriterInput appendSampleBuffer:sampleBuffer]; NSLog(@"writing matadata Audio"); } // Handle audio sample buffer for mic audio break; default: break; } } @catch (NSException *exception) { [self saveFlurryLogs:@"Packet Write Execption" Details:exception.description]; } @finally { } }
Posted
by GRishi.
Last updated
.