Post

Replies

Boosts

Views

Activity

ProRes encoding on M1 Max fails for high bit depth buffers
I have code that has worked for many years for writing ProRes files, and it is now failing on the new M1 Max MacBook. Specifically, if I construct buffers with the pixel type "kCVPixelFormatType_64ARGB", after a few frames of writing, the pixel buffer pool becomes nil. This code works just fine on non Max processors (Intel and base M1 natively). Here's a sample main that demonstrates the problem. Am I doing something wrong here? //  main.m //  TestProresWriting // #import <Foundation/Foundation.h> #import <AVFoundation/AVFoundation.h> int main(int argc, const char * argv[]) {     @autoreleasepool {         int timescale = 24;         int width = 1920;         int height = 1080;         NSURL *url = [NSURL URLWithString:@"file:///Users/diftil/TempData/testfile.mov"];         NSLog(@"Output file = %@", [url absoluteURL]);         NSFileManager *fileManager = [NSFileManager defaultManager];         NSError *error = nil;         [fileManager removeItemAtURL:url error:&error];         // Set up the writer         AVAssetWriter *trackWriter = [[AVAssetWriter alloc] initWithURL:url                                                    fileType:AVFileTypeQuickTimeMovie                                                         error:&error];         // Set up the track         NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:                                        AVVideoCodecTypeAppleProRes4444, AVVideoCodecKey,                                        [NSNumber numberWithInt:width], AVVideoWidthKey,                                        [NSNumber numberWithInt:height], AVVideoHeightKey,                                        nil];                  AVAssetWriterInput *track = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo                                                         outputSettings:videoSettings];         // Set up the adapter         NSDictionary *attributes = [NSDictionary                                     dictionaryWithObjects:                                     [NSArray arrayWithObjects:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_64ARGB], // This pixel type causes problems on M1 Max, but works on everything else                                      [NSNumber numberWithUnsignedInt:width],[NSNumber numberWithUnsignedInt:height],                                      nil]                                     forKeys:                                     [NSArray arrayWithObjects:(NSString *)kCVPixelBufferPixelFormatTypeKey,                                      (NSString*)kCVPixelBufferWidthKey, (NSString*)kCVPixelBufferHeightKey,                                      nil]];         /*         NSDictionary *attributes = [NSDictionary                                     dictionaryWithObjects:                                     [NSArray arrayWithObjects:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32ARGB], // This pixel type works on M1 Max                                      [NSNumber numberWithUnsignedInt:width],[NSNumber numberWithUnsignedInt:height],                                      nil]                                     forKeys:                                     [NSArray arrayWithObjects:(NSString *)kCVPixelBufferPixelFormatTypeKey,                                      (NSString*)kCVPixelBufferWidthKey, (NSString*)kCVPixelBufferHeightKey,                                      nil]];         */         AVAssetWriterInputPixelBufferAdaptor *pixelBufferAdaptor = [AVAssetWriterInputPixelBufferAdaptor                             assetWriterInputPixelBufferAdaptorWithAssetWriterInput:track                             sourcePixelBufferAttributes:attributes];         // Add the track and start writing         [trackWriter addInput:track];         [trackWriter startWriting];         CMTime startTime = CMTimeMake(0, timescale);         [trackWriter startSessionAtSourceTime:startTime];         while(!track.readyForMoreMediaData);         int frameTime = 0;         CVPixelBufferRef frameBuffer = NULL;         for (int i = 0; i < 100; i++)         {             NSLog(@"Frame %@", [NSString stringWithFormat:@"%d", i]);             CVPixelBufferPoolRef PixelBufferPool = pixelBufferAdaptor.pixelBufferPool;             if (PixelBufferPool == nil)             {                 NSLog(@"PixelBufferPool is invalid.");                 exit(1);             }             CVReturn ret = CVPixelBufferPoolCreatePixelBuffer(nil, PixelBufferPool, &frameBuffer);             if (ret != kCVReturnSuccess)             {                 NSLog(@"Error creating framebuffer from pool");                 exit(1);             }             CVPixelBufferLockBaseAddress(frameBuffer, 0);             // This is where we would put image data into the buffer.  Nothing right now.             CVPixelBufferUnlockBaseAddress(frameBuffer, 0);             while(!track.readyForMoreMediaData);             CMTime presentationTime = CMTimeMake(frameTime+(i*timescale), timescale);             BOOL result = [pixelBufferAdaptor appendPixelBuffer:frameBuffer                                            withPresentationTime:presentationTime];             if (result == NO)             {                 NSLog(@"Error appending to track.");                 exit(1);             }             CVPixelBufferRelease(frameBuffer);         }         // Close everything         if ( trackWriter.status == AVAssetWriterStatusWriting)             [track markAsFinished];         NSLog(@"Completed.");     }     return 0; }
21
0
6.4k
Nov ’21