I'm seeing an oddity with audio files created via AVAssetExportSession and AVAssetExportPresetAppleM4A.
In every case the exported m4a files report a different frame length than the number frames that can actually be read into an AVAudioPCMBuffer for playback.
Below is a simple command line app demonstrating the issue.
In my test case the output was:
=================================
Audio File: export.m4a
File frame length: 6302184
Buffer frame length: 6301632
Frame length discrepancy: 552
=================================
I've replicated this issue on iOS and macOS. It appears to be fundamental to AVAssetExportSession on all platforms or at least to my pattern of usage.
Can anyone tell me if I'm doing something wrong? Is this possibly a bug in AVAssetExportSession?
Thanks!
import Foundation
import MediaPlayer
//Requires file path to an audio file
guard CommandLine.arguments.count == 2 else {
		print("Usage: \(CommandLine.arguments[0].split(separator: "/").last!) <filepath>")
		exit(1)
}
var sourceFile = CommandLine.arguments[1]
let sourceURL = URL(fileURLWithPath: sourceFile)
var destinationURL = sourceURL.deletingLastPathComponent()
destinationURL.appendPathComponent("export.m4a")
let exportGroup = DispatchGroup()
exportGroup.enter()
exportAsset(from: sourceURL, to: destinationURL) {
		printFrameCount(url: destinationURL)
		exportGroup.leave()
}
exportGroup.wait()
exit(0)
func exportAsset(from: URL, to: URL, completion: @escaping ()->Swift.Void) {
		let asset = AVURLAsset(url: from,
													 options: [AVURLAssetPreferPreciseDurationAndTimingKey: true])
		guard let exporter = AVAssetExportSession(asset: asset,
																							presetName: AVAssetExportPresetAppleM4A) else
		{
				fatalError("unable to create AVAssetExportSession")
		}
		exporter.shouldOptimizeForNetworkUse = false;
		exporter.outputFileType = AVFileType.m4a
		
		exporter.outputURL = to
		
		//delete target file if it exists
		if FileManager.default.fileExists(atPath: to.path) {
				try? FileManager.default.removeItem(at: to)
		}
		exporter.exportAsynchronously {
				guard exporter.error == nil else {
						fatalError("AVAssetExportSession error: \(String(describing: exporter.error))")
				}
				completion()
		}
}
func printFrameCount(url: URL) {
		do {
				let file = try AVAudioFile(forReading: url)
				let fileLength = file.length
				
				guard let buffer = AVAudioPCMBuffer( pcmFormat: file.processingFormat,
																						 frameCapacity: AVAudioFrameCount(fileLength) ) else {
						fatalError("AVAudioPCMBuffer allocation failed")
				}
				
				try file.read(into: buffer)
				let bufferLength = buffer.frameLength
				print("=================================",
						"Audio File: \(url.lastPathComponent)",
						"File frame length: \(fileLength)",
						"Buffer frame length: \(bufferLength)",
						"Frame length discrepancy: \(fileLength - Int64(bufferLength))",
						"=================================",
						separator: "\n")
		} catch {
				fatalError("AVAudioFile error: \(String(describing: error))")
		}
}
Post
Replies
Boosts
Views
Activity
Just wondering if anyone else is having issues with currentPlaybackRate in release version of iOS 15.4? In my particular case this is using MPMusicPlayerController.applicationQueuePlayer.
I've always had issues controlling this property reliably but from what I can see it is now completely non-operational in 15.4.
I've isolated this behavior in a trivial project, and will file a radar, but hoping others may have some insight first.
FWIW- This is my trivial test case:
class ViewController: UIViewController {
lazy var player: MPMusicPlayerApplicationController = {
let player = MPMusicPlayerController.applicationQueuePlayer
player.repeatMode = .none
player.shuffleMode = .off
player.beginGeneratingPlaybackNotifications()
return player
}()
override func viewDidLoad() {
super.viewDidLoad()
NotificationCenter.default.addObserver(forName: .MPMusicPlayerControllerPlaybackStateDidChange, object: nil, queue: .main) { [weak self] notification in
guard let notificationPlayer = notification.object as? MPMusicPlayerApplicationController,
notificationPlayer === self?.player else {
return
}
debugPrint("Player state now: \(notificationPlayer.playbackState)")
}
}
@IBAction func goAction(_ sender: Any) {
guard let item = MPMediaQuery.songs().items?.randomElement() else {
debugPrint("Unable to access media items")
return
}
debugPrint("Now playing item: \(item.title ?? "")")
player.setQueue(with: [item.playbackStoreID])
player.prepareToPlay() { error in
guard error == nil else {
debugPrint("Player error: \(error!.localizedDescription)")
return
}
DispatchQueue.main.async { [weak self] in
self?.player.play()
}
}
}
@IBAction func slowAction(_ sender: Any) {
debugPrint("Setting currentPlaybackRate to 0.5")
player.currentPlaybackRate = 0.5
checkPlaybackRate()
}
@IBAction func fastAction(_ sender: Any) {
debugPrint("Setting currentPlaybackRate to 1.5")
player.currentPlaybackRate = 1.5
checkPlaybackRate()
}
func checkPlaybackRate(afterSeconds delay: TimeInterval = 1.0) {
DispatchQueue.main.asyncAfter(deadline: .now() + delay) {
debugPrint("After \(delay) seconds currentPlaybackRate now: \(self.player.currentPlaybackRate)")
}
}
}
Typical console output:
"Now playing item: I Know You Know"
"Player state now: MPMusicPlaybackState(rawValue: 2)"
"Player state now: MPMusicPlaybackState(rawValue: 1)"
"Setting currentPlaybackRate to 1.5"
"After 1.0 seconds currentPlaybackRate now: 1.0"
"Setting currentPlaybackRate to 0.5"
"After 1.0 seconds currentPlaybackRate now: 1.0"