AVFoundation / C5 note is not playing properly

Hello,

I'm trying to play some waves I'm downloading from a seismometer and the sound is not good.

I decided to create a simple wave (C5 note, 523.25 Hz) and play it and it does not work too.

Here is my code:

Code Block swift
import AVFoundation
import Combine
class ContentViewModel: ObservableObject {
let audioEngine: AVAudioEngine
let player: AVAudioPlayerNode
let data: [Double]
let sampleRate: Double
init() {
let sinFrequency: Double = 523.25 /* C5 */
let sampleRate: Double = 44100
let seconds: Double = 5
let range = 0 ..< Int(seconds * sampleRate)
self.data = range.map { sin(2.0 * .pi * Double($0) * sinFrequency / sampleRate) }
self.sampleRate = sampleRate
audioEngine = AVAudioEngine()
let _ = audioEngine.mainMixerNode
audioEngine.prepare()
try! audioEngine.start()
try! AVAudioSession.sharedInstance().setCategory(.playback)
self.player = AVAudioPlayerNode()
audioEngine.attach(player)
}
func copyBuffer<T: FixedWidthInteger>(data: [Double], buffer: AVAudioPCMBuffer, channelData: UnsafePointer<UnsafeMutablePointer<T>>) {
buffer.frameLength = buffer.frameCapacity
let buffData = data.map { T(Double(T.max) * $0) }
memcpy(channelData[0], buffData, Int(buffer.frameCapacity) * MemoryLayout<T>.size)
}
enum BufferType {
case int16
case int32
}
func createBuffer(for type: BufferType) -> AVAudioPCMBuffer {
switch type {
case .int16:
guard
let inputFormat = AVAudioFormat(commonFormat: .pcmFormatInt16, sampleRate: sampleRate, channels: 1, interleaved: false),
let buffer = AVAudioPCMBuffer(pcmFormat: inputFormat, frameCapacity: UInt32(data.count)),
let channelData = buffer.int16ChannelData
else {
fatalError()
}
copyBuffer(data: data, buffer: buffer, channelData: channelData)
return buffer
case .int32:
guard
let inputFormat = AVAudioFormat(commonFormat: .pcmFormatInt16, sampleRate: sampleRate, channels: 1, interleaved: false),
let buffer = AVAudioPCMBuffer(pcmFormat: inputFormat, frameCapacity: UInt32(data.count)),
let channelData = buffer.int16ChannelData
else {
fatalError()
}
copyBuffer(data: data, buffer: buffer, channelData: channelData)
return buffer
}
}
func play(for type: BufferType) {
let buffer = createBuffer(for: type)
let linkFormat = AVAudioFormat(standardFormatWithSampleRate: sampleRate, channels: 1)
audioEngine.connect(player, to: audioEngine.mainMixerNode, format: linkFormat)
audioEngine.prepare()
audioEngine.mainMixerNode.outputVolume = 0.5
player.scheduleBuffer(buffer, at: nil, options: .interrupts, completionHandler: nil)
if !player.isPlaying {
player.play()
}
}
}


You can listen to the note looking for "Middle C Sine Wave for Ten Hours - 261.6 hertz" on YouTube (the title is wrong, this video is for C5).

Could you please tell me why my sound does not sound like the real C5 note?

Thanks!!!

You can create a simple ContentView in Swift with this code:

Code Block swift
import SwiftUI
struct ContentView: View {
@StateObject var viewModel = ContentViewModel()
var body: some View {
VStack {
Spacer()
HStack {
Button("Play Int16") {
viewModel.play(for: .int16)
}
Button("Play Int32") {
viewModel.play(for: .int32)
}
}
Spacer()
}
}
}


Answered by heltena in 638480022
I changed AVAudioPlayerNode by AVAudioSourceNode (check the BuildingASignalGenerator example).

My new code for your records:

Code Block swift
import AVFoundation
import Combine
class ContentViewModel: ObservableObject {
let audioEngine: AVAudioEngine
var sourceNode: AVAudioSourceNode?
let downMixer: AVAudioMixerNode
let data: [Float]
let sampleRate: Float
init() {
let sinFrequency: Float = 523.25 /* C5 */
let sampleRate: Float = 44100
let seconds: Float = 10
let range = 0 ..< Int(seconds * sampleRate)
self.data = range.map { sin(2.0 * .pi * Float($0) * sinFrequency / 44100) }
self.sampleRate = sampleRate
audioEngine = AVAudioEngine()
downMixer = AVAudioMixerNode()
let _ = audioEngine.mainMixerNode
audioEngine.prepare()
try! audioEngine.start()
audioEngine.attach(downMixer)
audioEngine.connect(downMixer, to: audioEngine.mainMixerNode, format: nil)
}
var isPlaying: Bool { sourceNode != nil }
@Published private(set) var totalSeconds: Float = 0
@Published private(set) var currentTime: Float = 0
func stopCurrent() {
if let sourceNode = sourceNode {
audioEngine.detach(sourceNode)
self.sourceNode = nil
}
self.totalSeconds = 0
self.currentTime = 0
self.objectWillChange.send()
}
func play(speedUpFactor: Float) {
let playingSampleRate = sampleRate * speedUpFactor
guard
let dataFormat = AVAudioFormat(commonFormat: .pcmFormatFloat32, sampleRate: Double(playingSampleRate), channels: 1, interleaved: false)
else {
fatalError()
}
stopCurrent()
self.totalSeconds = Float(data.count) / Float(playingSampleRate)
self.currentTime = 0
var index: Int = 0
let sourceNode = AVAudioSourceNode(format: dataFormat) { [self] _, audioTimeStamp, frameCount, audioBufferList -> OSStatus in
let ablPointer = UnsafeMutableAudioBufferListPointer(audioBufferList)
let remainValues = min(Int(frameCount), data.count - index)
for frame in 0 ..< remainValues {
let value = index < data.count ? data[index] : 0.0
index += 1
for buffer in ablPointer {
let buf: UnsafeMutableBufferPointer<Float> = UnsafeMutableBufferPointer(buffer)
buf[frame] = value
}
}
DispatchQueue.main.async {
self.currentTime += Float(remainValues) / Float(playingSampleRate)
}
if remainValues < Int(frameCount) {
for frame in remainValues ..< Int(frameCount) {
for buffer in ablPointer {
let buf: UnsafeMutableBufferPointer<Float> = UnsafeMutableBufferPointer(buffer)
buf[frame] = 0.0
}
}
DispatchQueue.main.async {
stopCurrent()
}
}
return noErr
}
self.sourceNode = sourceNode
audioEngine.attach(sourceNode)
audioEngine.connect(sourceNode, to: downMixer, format: nil)
self.objectWillChange.send()
}
}


And the SwiftUI view:

Code Block swift
import SwiftUI
struct ContentView: View {
@StateObject var viewModel = ContentViewModel()
var body: some View {
VStack {
Spacer()
HStack {
Button("Play x1") {
viewModel.play(speedUpFactor: 1)
}
Button("Play x2") {
viewModel.play(speedUpFactor: 2)
}
Button("Stop") {
viewModel.stopCurrent()
}
}
if viewModel.isPlaying {
Text("Playing: \(viewModel.currentTime) of \(viewModel.totalSeconds)")
}
Spacer()
}
}
}
struct ContentView_Previews: PreviewProvider {
static var previews: some View {
ContentView()
}
}


Accepted Answer
I changed AVAudioPlayerNode by AVAudioSourceNode (check the BuildingASignalGenerator example).

My new code for your records:

Code Block swift
import AVFoundation
import Combine
class ContentViewModel: ObservableObject {
let audioEngine: AVAudioEngine
var sourceNode: AVAudioSourceNode?
let downMixer: AVAudioMixerNode
let data: [Float]
let sampleRate: Float
init() {
let sinFrequency: Float = 523.25 /* C5 */
let sampleRate: Float = 44100
let seconds: Float = 10
let range = 0 ..< Int(seconds * sampleRate)
self.data = range.map { sin(2.0 * .pi * Float($0) * sinFrequency / 44100) }
self.sampleRate = sampleRate
audioEngine = AVAudioEngine()
downMixer = AVAudioMixerNode()
let _ = audioEngine.mainMixerNode
audioEngine.prepare()
try! audioEngine.start()
audioEngine.attach(downMixer)
audioEngine.connect(downMixer, to: audioEngine.mainMixerNode, format: nil)
}
var isPlaying: Bool { sourceNode != nil }
@Published private(set) var totalSeconds: Float = 0
@Published private(set) var currentTime: Float = 0
func stopCurrent() {
if let sourceNode = sourceNode {
audioEngine.detach(sourceNode)
self.sourceNode = nil
}
self.totalSeconds = 0
self.currentTime = 0
self.objectWillChange.send()
}
func play(speedUpFactor: Float) {
let playingSampleRate = sampleRate * speedUpFactor
guard
let dataFormat = AVAudioFormat(commonFormat: .pcmFormatFloat32, sampleRate: Double(playingSampleRate), channels: 1, interleaved: false)
else {
fatalError()
}
stopCurrent()
self.totalSeconds = Float(data.count) / Float(playingSampleRate)
self.currentTime = 0
var index: Int = 0
let sourceNode = AVAudioSourceNode(format: dataFormat) { [self] _, audioTimeStamp, frameCount, audioBufferList -> OSStatus in
let ablPointer = UnsafeMutableAudioBufferListPointer(audioBufferList)
let remainValues = min(Int(frameCount), data.count - index)
for frame in 0 ..< remainValues {
let value = index < data.count ? data[index] : 0.0
index += 1
for buffer in ablPointer {
let buf: UnsafeMutableBufferPointer<Float> = UnsafeMutableBufferPointer(buffer)
buf[frame] = value
}
}
DispatchQueue.main.async {
self.currentTime += Float(remainValues) / Float(playingSampleRate)
}
if remainValues < Int(frameCount) {
for frame in remainValues ..< Int(frameCount) {
for buffer in ablPointer {
let buf: UnsafeMutableBufferPointer<Float> = UnsafeMutableBufferPointer(buffer)
buf[frame] = 0.0
}
}
DispatchQueue.main.async {
stopCurrent()
}
}
return noErr
}
self.sourceNode = sourceNode
audioEngine.attach(sourceNode)
audioEngine.connect(sourceNode, to: downMixer, format: nil)
self.objectWillChange.send()
}
}


And the SwiftUI view:

Code Block swift
import SwiftUI
struct ContentView: View {
@StateObject var viewModel = ContentViewModel()
var body: some View {
VStack {
Spacer()
HStack {
Button("Play x1") {
viewModel.play(speedUpFactor: 1)
}
Button("Play x2") {
viewModel.play(speedUpFactor: 2)
}
Button("Stop") {
viewModel.stopCurrent()
}
}
if viewModel.isPlaying {
Text("Playing: \(viewModel.currentTime) of \(viewModel.totalSeconds)")
}
Spacer()
}
}
}
struct ContentView_Previews: PreviewProvider {
static var previews: some View {
ContentView()
}
}


AVFoundation / C5 note is not playing properly
 
 
Q