The above is the extra_data in the lhvC box in the 3D format of Apple's Vision Pro, which only contains sps/pps;
I can know that 0xa1 is sps nultype, 0x00 01 is the number of sps, and 0x00 17 is the length.
But what is the 0x01 f0 00 fc c3 02 at the beginning, and I can't find the corresponding definition.
Audio
RSS for tagDive into the technical aspects of audio on your device, including codecs, format support, and customization options.
Post
Replies
Boosts
Views
Activity
I have an app that has the camera continuously running, as it is doing its own AI, have zero need for Apple'video effects, and am seeing a 200% performance hit after updating to Sonoma. The video effects are the "heaviest stack trace" when profiling my app with Instruments CPU profiler (see below).
Is forcing your software onto developers not something Microsoft would do? Is there really no way to opt out?
6671 Jamscape_exp (23038)
2697 start_wqthread
2697 _pthread_wqthread
2183 _dispatch_workloop_worker_thread
2156 _dispatch_root_queue_drain_deferred_wlh
2153 _dispatch_lane_invoke
2146 _dispatch_lane_serial_drain
1527 _dispatch_client_callout
1493 _dispatch_call_block_and_release
777 __88-[PTHandGestureDetector initWithFrameSize:asyncInitQueue:externalHandDetectionsEnabled:]_block_invoke
777 -[VCPHandGestureVideoRequest initWithOptions:]
508 -[VCPHandGestureClassifier initWithMinHandSize:]
508 -[VCPCoreMLRequest initWithModelName:]
506 +[MLModel modelWithContentsOfURL:configuration:error:]
506 -[MLModelAsset modelWithError:]
506 -[MLModelAsset load:]
506 +[MLLoader loadModelFromAssetAtURL:configuration:error:]
506 +[MLLoader _loadModelFromAssetAtURL:configuration:loaderEvent:error:]
505 +[MLLoader _loadModelFromArchive:configuration:loaderEvent:useUpdatableModelLoaders:error:]
505 +[MLLoader _loadWithModelLoaderFromArchive:configuration:loaderEvent:useUpdatableModelLoaders:error:]
505 +[MLLoader _loadModelFromArchive:configuration:modelVersion:compilerVersion:loaderEvent:useUpdatableModelLoaders:loadingClasses:error:]
505 +[MLLoader _loadModelWithClass:fromArchive:modelVersionInfo:compilerVersionInfo:configuration:error:]
445 +[MLMultiFunctionProgramEngine loadModelFromCompiledArchive:modelVersionInfo:compilerVersionInfo:configuration:error:]
333 -[MLMultiFunctionProgramEngine initWithProgramContainer:configuration:error:]
333 -[MLNeuralNetworkEngine initWithContainer:configuration:error:]
318 -[MLNeuralNetworkEngine _setupContextAndPlanWithConfiguration:usingCPU:reshapeWithContainer:error:]
313 -[MLNeuralNetworkEngine _addNetworkToPlan:error:]
313 espresso_plan_add_network
313 EspressoLight::espresso_plan::add_network(char const*, espresso_storage_type_t)
313 EspressoLight::espresso_plan::add_network(char const*, espresso_storage_type_t, std::__1::shared_ptrEspresso::net)
313 Espresso::load_network(std::__1::basic_string<char, std::__1::char_traits, std::__1::allocator> const&, std::__1::shared_ptrEspresso::abstract_context const&, Espresso::compute_path, bool)
235 Espresso::reload_network_on_context(std::__1::shared_ptrEspresso::net const&, std::__1::shared_ptrEspresso::abstract_context const&, Espresso::compute_path)
226 Espresso::load_and_shape_network(std::__1::shared_ptrEspresso::SerDes::generic_serdes_object const&, std::__1::basic_string<char, std::__1::char_traits, std::__1::allocator> const&, std::__1::shared_ptrEspresso::abstract_context const&, Espresso::network_shape const&, Espresso::compute_path, std::__1::shared_ptrEspresso::blob_storage_abstract const&, std::__1::basic_string<char, std::__1::char_traits, std::__1::allocator> const&)
214 Espresso::load_network_layers_internal(std::__1::shared_ptrEspresso::SerDes::generic_serdes_object, std::__1::basic_string<char, std::__1::char_traits, std::__1::allocator> const&, std::__1::basic_string<char, std::__1::char_traits, std::__1::allocator> const&, std::__1::shared_ptrEspresso::abstract_context const&, Espresso::network_shape const&, std::__1::basic_istream<char, std::__1::char_traits>, Espresso::compute_path, bool, std::__1::shared_ptrEspresso::blob_storage_abstract const&)
208 Espresso::run_dispatch_v2(std::__1::shared_ptrEspresso::abstract_context, std::__1::shared_ptrEspresso::net, std::__1::vector<std::__1::shared_ptrEspresso::SerDes::generic_serdes_object, std::__1::allocator<std::__1::shared_ptrEspresso::SerDes::generic_serdes_object>> const&, Espresso::network_shape const&, Espresso::compute_path const&, std::__1::basic_istream<char, std::__1::char_traits>)
141 try_dispatch(std::__1::shared_ptrEspresso::abstract_context, std::__1::shared_ptrEspresso::net, std::__1::vector<std::__1::shared_ptrEspresso::SerDes::generic_serdes_object, std::__1::allocator<std::__1::shared_ptrEspresso::SerDes::generic_serdes_object>> const&, Espresso::network_shape const&, Espresso::compute_path const&, std::__1::basic_istream<char, std::__1::char_traits>, Espresso::platform const&, Espresso::compute_path const&)
131 Espresso::get_net_info_ir(std::__1::shared_ptrEspresso::abstract_context, std::__1::shared_ptrEspresso::net, std::__1::vector<std::__1::shared_ptrEspresso::SerDes::generic_serdes_object, std::__1::allocator<std::__1::shared_ptrEspresso::SerDes::generic_serdes_object>> const&, Espresso::network_shape const&, Espresso::compute_path const&, Espresso::platform const&, Espresso::compute_path const&, std::__1::shared_ptrEspresso::cpu_context_transfer_algo_t&, std::__1::shared_ptrEspresso::net_info_ir_t&, std::__1::shared_ptrEspresso::kernels_validation_status_t&)
131 Espresso::cpu_context_transfer_algo_t::create_net_info_ir(std::__1::vector<std::__1::shared_ptrEspresso::SerDes::generic_serdes_object, std::__1::allocator<std::__1::shared_ptrEspresso::SerDes::generic_serdes_object>> const&, std::__1::shared_ptrEspresso::abstract_context, Espresso::network_shape const&, Espresso::compute_path, std::__1::shared_ptrEspresso::net_info_ir_t)
120 Espresso::cpu_context_transfer_algo_t::check_all_kernels_availability_on_context(std::__1::vector<std::__1::shared_ptrEspresso::SerDes::generic_serdes_object, std::__1::allocator<std::__1::shared_ptrEspresso::SerDes::generic_serdes_object>> const&, std::__1::shared_ptrEspresso::abstract_context&, Espresso::compute_path, std::__1::shared_ptrEspresso::net_info_ir_t&)
120 is_kernel_available_on_engine(unsigned long, std::__1::shared_ptrEspresso::base_kernel, Espresso::kernel_info_t const&, std::__1::shared_ptrEspresso::SerDes::generic_serdes_object, std::__1::shared_ptrEspresso::abstract_context, Espresso::compute_path, std::__1::shared_ptrEspresso::net_info_ir_t, std::__1::shared_ptrEspresso::kernels_validation_status_t)
83 Espresso::ANECompilerEngine::mix_reshape_kernel::is_valid_for_engine(std::__1::shared_ptrEspresso::kernels_validation_status_t, Espresso::base_kernel::validate_for_engine_args_t const&) const
45 int ValidateLayer<ANECReshapeLayerDesc, ZinIrReshapeUnit, ZinIrReshapeUnitInfo, ANECReshapeLayerDescAlternate>(void, ANECReshapeLayerDesc const*, ANECTensorDesc const*, unsigned long, unsigned long*, ANECReshapeLayerDescAlternate**, ANECTensorValueDesc const*)
45 void ValidateLayer_Impl<ANECReshapeLayerDesc, ZinIrReshapeUnit, ZinIrReshapeUnitInfo, ANECReshapeLayerDescAlternate>(void*, ANECReshapeLayerDesc const*, ANECTensorDesc const*, unsigned long, unsigned long*, ANECReshapeLayerDescAlternate**, ANECTensorValueDesc const*)
(...)
This code to write UIImage data as heic works in iOS simulator with iOS < 17.5
import AVFoundation
import UIKit
extension UIImage {
public var heic: Data? { heic() }
public func heic(compressionQuality: CGFloat = 1) -> Data? {
let mutableData = NSMutableData()
guard let destination = CGImageDestinationCreateWithData(mutableData, AVFileType.heic as CFString, 1, nil),
let cgImage = cgImage else {
return nil
}
let options: NSDictionary = [
kCGImageDestinationLossyCompressionQuality: compressionQuality,
kCGImagePropertyOrientation: cgImageOrientation.rawValue,
]
CGImageDestinationAddImage(destination, cgImage, options)
guard CGImageDestinationFinalize(destination) else { return nil }
return mutableData as Data
}
public var isHeicSupported: Bool {
(CGImageDestinationCopyTypeIdentifiers() as! [String]).contains("public.heic")
}
var cgImageOrientation: CGImagePropertyOrientation { .init(imageOrientation) }
}
extension CGImagePropertyOrientation {
init(_ uiOrientation: UIImage.Orientation) {
switch uiOrientation {
case .up: self = .up
case .upMirrored: self = .upMirrored
case .down: self = .down
case .downMirrored: self = .downMirrored
case .left: self = .left
case .leftMirrored: self = .leftMirrored
case .right: self = .right
case .rightMirrored: self = .rightMirrored
@unknown default:
fatalError()
}
}
}
But with iOS 17.5 simulator it seems to be broken.
The call of CGImageDestinationFinalize
writes this error into the console:
writeImageAtIndex:962: *** CMPhotoCompressionSessionAddImage: err = kCMPhotoError_UnsupportedOperation [-16994] (codec: 'hvc1')
On physical devices it still seems to work.
Is there any known workaround for the iOS simulator?
When I receive the InterruptionBegan notification (the interruption type is AVAudioSessionInterruptionTypeBegan) , I pause playing music.
When I receive the InterruptionEnded notification (the interruption type is AVAudioSessionInterruptionTypeEnded), I resume playing music.
however, sometimes i has got the error code: AVAudioSessionErrorCodeCannotInterruptOthers (560557684)
If some malicious app to take up the audio, which leads to the third party app music playback recovery fails, an error AVAudioSessionErrorCodeCannotInterruptOthers.
In this case, can we know which apps are maliciously hogging the audio?
Hi,
I am getting into a trap. Please check stack-trace, howto fix this?
regards, Joël
stack-trace with ExtAudioFileWrite
Hello,
I can't get my head wrapped around the following problem:
I have an external USB microphone capable of samplerates of up to 500 kHz. I want to capture the samples and do analysis and display - no playback required. I can not find a way to run the microphone with its maximum samplerate, I always get 48 kHz.
I would like to stick to AVAudioEngine if possible.
Any pointer welcome.
thx!
volker
In my app, I only get one interruption notification when a phone call comes in, and nothing after that. The app uses AVAudioEngine. Is this a bug?
A very simple repro is to just register for the notification, but not do anything else with audio:
struct ContentView: View {
var body: some View {
VStack {
Image(systemName: "globe")
.imageScale(.large)
.foregroundStyle(.tint)
Text("Hello, world!")
}
.padding()
.onReceive(NotificationCenter.default.publisher(for: AVAudioSession.interruptionNotification)) { event in
handleAudioInterruption(event: event)
}
}
private func handleAudioInterruption(event: Notification) {
print("handleAudioInterruption")
guard let info = event.userInfo,
let typeValue = info[AVAudioSessionInterruptionTypeKey] as? UInt,
let type = AVAudioSession.InterruptionType(rawValue: typeValue) else {
print("missing the stuff")
return
}
if type == .began {
print("interruption began")
} else if type == .ended {
print("interruption ended")
guard let optionsValue = info[AVAudioSessionInterruptionOptionKey] as? UInt else { return }
if AVAudioSession.InterruptionOptions(rawValue: optionsValue).contains(.shouldResume) {
print("should resume")
}
}
}
}
And do this in the app's init:
@main
struct InterruptionsApp: App {
init() {
try! AVAudioSession.sharedInstance().setCategory(.playback,
options: [])
try! AVAudioSession.sharedInstance().setActive(true)
}
var body: some Scene {
WindowGroup {
ContentView()
}
}
}
Hi there,
I am encountering an issue in my project which utilizes a speech recognizer and occasionally plays audio files. The problem arises when I configure the AVAudioSession and enable voice processing. The system volume changes unexpectedly and becomes uncontrollable. Specifically, the volume is excessively loud on iPhone but quite low on iPad
let audioSession = AVAudioSession.sharedInstance()
try audioSession.setCategory(.playAndRecord, mode: .default, options: [.defaultToSpeaker, .allowBluetooth, .interruptSpokenAudioAndMixWithOthers])
try audioSession.setActive(true, options: .notifyOthersOnDeactivation)
try audioEngine.inputNode.setVoiceProcessingEnabled(true)
try audioEngine.outputNode.setVoiceProcessingEnabled(true)
I have provided a sample project here: Sample Project.
To reproduce the issue, please follow these steps on a real device:
Click on "Play recording" to hear the sound at normal volume.
Click on "Start recording" to set up the category and speech recognizer.
Click on "Stop recording" to stop the recording.
Click on "Play recording" again and observe that the sound volume has changed.
Thank you for your assistance.
I'm trying to capture audio samples from the selected output device on macOS using ScreenCaptureKit?
Thank you
Hi,
we have multiple threads in our CoreAudio server plugin carrying out necessary asynchronous work (namely handling USB callbacks and shuffling the required data to the IO).
Although these threads have been set up with the appropriate THREAD_TIME_CONSTRAINT_POLICY (which actually improves it) - on M* processors there is an extremely high, non-realtime amount of jitter of >10ms(!)
Now either the runloop notification from the USB stack comes that late or the thread driving the runloop hasn't been set up to correctly handling the callbacks in a timely manner.
Since AudioUnits threads requiring to comply to the frame deadlines can join the workgroup of the audio device is there a similar opportunity for the CoreAudio server plugin threads? And if so, how should these correctly be set up?
Thanks for any hints! Or pointing me to the docs :)
Using the hardware volume buttons on the iPhone, you have 16 steps you can adjust your volume to. I want to implement a volume control slider in my app. I am updating the value of the slider using AVAudioSession.sharedInstance().outputVolume. The problem is that this returns values rounded to the nearest 0 or 5. This makes the slider jump around. .formatted() is not causing this problem.
You can recreate the problem using code below.
@main
struct VolumeTestApp: App {
init() {
try? AVAudioSession.sharedInstance().setActive(true)
}
var body: some Scene {
WindowGroup {
ContentView()
}
}
}
struct ContentView: View {
@State private var volume = Double()
@State private var difference = Double()
var body: some View {
VStack {
Text("The volume changed by \(difference.formatted())")
Slider(value: $volume, in: 0...1)
}
.onReceive(AVAudioSession.sharedInstance().publisher(for: \.outputVolume), perform: { value in
volume = Double(value)
})
.onChange(of: volume) { oldValue, newValue in // Only used to make the problem more obvious
if oldValue > newValue {
difference = oldValue - newValue
} else {
difference = newValue - oldValue
}
}
}
}
Here is a video of the problem in action:
https://share.icloud.com/photos/00fmp7Vq1AkRetxcIP5EXeAZA
What am I doing wrong or what can I do to avoid this?
Thank you
Tested with library songs on an app targeted to Mac (Designed for iPad).
The same app running on iOS queries the same library songs and the duration is expressed correctly in seconds, as expected for the TimeInterval type.
Xcode 15.3
MacOS 14.5
FB13821671
We are using a VoiceProcessingIO audio unit in our VoIP application on Mac. In certain scenarios, the AudioComponentInstanceNew call blocks for up to five seconds (at least two). We are using the following code to initialize the audio unit:
OSStatus status;
AudioComponentDescription desc;
AudioComponent inputComponent;
desc.componentType = kAudioUnitType_Output;
desc.componentSubType = kAudioUnitSubType_VoiceProcessingIO;
desc.componentFlags = 0;
desc.componentFlagsMask = 0;
desc.componentManufacturer = kAudioUnitManufacturer_Apple;
inputComponent = AudioComponentFindNext(NULL, &desc);
status = AudioComponentInstanceNew(inputComponent, &unit);
We are having the issue with current MacOS versions on a host of different Macs (x86 and x64 alike). It takes two to three seconds until AudioComponentInstanceNew returns.
We also see the following errors in the log multiple times:
AUVPAggregate.cpp:2560 AggInpStreamsChanged wait failed
and those right after (which I don't know if they matter to this issue):
KeystrokeSuppressorCore.cpp:44 ERROR: KeystrokeSuppressor initialization was unsuccessful. Invalid or no plist was provided. AU will be bypassed. vpStrategyManager.mm:486 Error code 2003332927 reported at GetPropertyInfo
I'm developing an app which use "System Audio Recording Only" API to capture system audio.
Is there any API to check if app is authorized? So I can instruct user to give my app with this permission.
Thanks.
Just installed macOS Sequoia and observed that the mClientID and mProcessID parameters in the AudioServerPlugInClientInfo structure are empty when called AddDeviceClient and RemoveDeviceClient functions of the AudioServerPlugInDriverInterface.
This data is essential to identify the connected client, and its absence breaks the basic functionality of the HAL plugins.
FB13858951 ticket filed.
Hello,
my app works as Auv3 plugin.
I am interested in copying / pasting LogicPro chord track.
After I copy chord track in LogicPro and read UIPasteBoard.general in the app, I can see:
["LogicPasteBoardMarker": <OS_dispatch_data: data[0x3024599c0] = { leaf, size = 1, buf = 0x10a758000 }>]
How can I access these data? Thank you.
Hi there, whenever I use any third party editing software, the third clip and clips after that have no audio. Here’s how I did it
take any third party editing software
put 2 clips, cut one in half and delete the other half, cut a half of the 2nd clip
Any fixes?
Has anyone noticed when you connect your phone to CarPlay the screen shows up but when you play audio its still coming out of the iPhones speaker.
Hello,
I'm having a problem with my Iphone 12:
iOS version: 17.5.1
Model name: Iphone 12
When I call someone it's impossible to communicate, it seems that the microphone is deactivated or that it doesn't respond.
I was able to extract the ips file from the "data":
stacks+audiomxd-.... Here's an extract:
{"bug_type":"288","timestamp":"2024-06-16 22:44:25.00 +0200","os_version":"iPhone OS 17.5.1 (21F90)","roots_installed":0,"incident_id":"7B8604DF-3863-4760-806C-591A90A7A7A4"}
{
"crashReporterKey" : "6928f591dd9e4d26541855e6d4b6a20d408bdfd1",
"exception" : "0xbe18d1ee",
"frontmostPids" : [
34
],
"tuning" : {
},
"absoluteTime" : 2954071508880,
"product" : "iPhone13,2",
"kernel" : "Darwin Kernel Version 23.5.0: Wed May 1 20:35:15 PDT 2024; root:xnu-10063.122.3~3\/RELEASE_ARM64_T8101",
"date" : "2024-06-16 22:44:25.08 +0200",
"reason" : "XPC message timeout in -[AVAudioSessionRemoteXPCClient getProperty:propertyName:MXProperty:reply:], probably deadlocked. Writing a stackshot and terminating.",
"codeSigningMonitor" : 1,
"incident" : "7B8604DF-3863-4760-806C-591A90A7A7A4",
"build" : "iPhone OS 17.5.1 (21F90)",
"roots_installed" : 0,
"bug_type" : "288",
"pid" : 102,
"memoryStatus" : {"compressorSize":38088,"compressions":25391066,"decompressions":20835948,"busyBufferCount":3,"memoryPressureDetails":{"pagesWanted":467,"pagesReclaimed":2085},"pageSize":16384,"memoryPressure":false,"memoryPages":{"active":67555,"throttled":0,"fileBacked":60679,"wired":57187,"purgeable":3679,"inactive":65541,"free":1533,"speculative":2364}},
As you can see, one line states :
"reason" : "XPC message timeout in -[AVAudioSessionRemoteXPCClient getProperty:propertyName:MXProperty:reply:], probably deadlocked. Writing a stackshot and terminating.",
Deadlock occurs when two or more processes are waiting for resources held by the others, creating a situation where none of the processes can progress.
In this case, it seems that the process has tried to recover an audio property, but has remained blocked indefinitely.
What reinforces my doubts is that at the end of the .ips is a line that seems to give some informations as to where the problem is :
"notes" : ["Requested by audiomxd","_dyld_process_info_create(623) for resampling UUIDs failed with 1","_dyld_process_info_create(2535) for resampling UUIDs failed with 1","_dyld_process_info_create(3181) for resampling UUIDs failed with 1","_dyld_process_info_create(3183) for resampling UUIDs failed with 1","_dyld_process_info_create(3503) for resampling UUIDs failed with 1","resampled 409 of 1813 threads with truncated backtraces from 0 pids: ","resampled 625 of 37 images missing from 175 pids: 75,93,98,178,190,210,...,3627"],
Here's what my research yielded. Could you please help me?
All calls are useless because the microphone no longer works.
Regards
Hi, I'm working on a web project that uses the MediaSession API to interface with the media notification on iOS. The issue that I'm experiencing occurs after pressing the play button in the media session modal where the session seems to NOT fire the event handler callback and also kill the media session itself. It's a strange behaviour considering that the pause callback works fine.
audio_source = new Audio(url);
navigator.mediasession.metadata = {
... // Metadata here
};
navigator.mediasession.setActionHandler('play', (details) => {
audio_source.play();
}
);
navigator.mediasession.setActionHandler('pause', (details) => {
audio_source.pause();
}
);