在具有 16 位 PCM 的 iOS 中生成音调,AudioEngine.connect() 会抛出 AUSetFormat:错误 -10868

2023-12-20

我有以下代码用于生成给定频率和持续时间的音频。它大致基于在 Android 上执行相同操作的这个答案(感谢:@Steve Pomeroy):

https://stackoverflow.com/a/3731075/973364 https://stackoverflow.com/a/3731075/973364

import Foundation
import CoreAudio
import AVFoundation
import Darwin

   class AudioUtil {

    class func play(frequency: Int, durationMs: Int) -> Void {
        let sampleRateHz: Double = 8000.0
        let numberOfSamples = Int((Double(durationMs) / 1000 * sampleRateHz))
        let factor: Double = 2 * M_PI / (sampleRateHz/Double(frequency))

        // Generate an array of Doubles.
        var samples = [Double](count: numberOfSamples, repeatedValue: 0.0)

        for i in 1..<numberOfSamples {
            let sample = sin(factor * Double(i))
            samples[i] = sample
        }

        // Convert to a 16 bit PCM sound array.
        var index = 0
        var sound = [Byte](count: 2 * numberOfSamples, repeatedValue: 0)

        for doubleValue in samples {
            // Scale to maximum amplitude. Int16.max is 37,767.
            var value = Int16(doubleValue * Double(Int16.max))

            // In a 16 bit wav PCM, first byte is the low order byte.
            var firstByte = Int16(value & 0x00ff)
            var secondByteHighOrderBits = Int32(value) & 0xff00
            var secondByte = Int16(secondByteHighOrderBits >> 8) // Right shift.

            // println("\(doubleValue) -> \(value) -> \(firstByte), \(secondByte)")

            sound[index++] = Byte(firstByte)
            sound[index++] = Byte(secondByte)
        }

        let format = AVAudioFormat(commonFormat: AVAudioCommonFormat.PCMFormatInt16, sampleRate: sampleRateHz, channels:AVAudioChannelCount(1), interleaved: false)
        let buffer = AudioBuffer(mNumberChannels: 1, mDataByteSize: UInt32(sound.count), mData: &sound)
        let pcmBuffer = AVAudioPCMBuffer(PCMFormat: format, frameCapacity: AVAudioFrameCount(sound.count))
        let audioEngine = AVAudioEngine()
        let audioPlayer = AVAudioPlayerNode()

        audioEngine.attachNode(audioPlayer)
        // Runtime error occurs here:
        audioEngine.connect(audioPlayer, to: audioEngine.mainMixerNode, format: format)
        audioEngine.startAndReturnError(nil)

        audioPlayer.play()
        audioPlayer.scheduleBuffer(pcmBuffer, atTime: nil, options: nil, completionHandler: nil)
    }
}

在 AVAudioEngine 上调用 connect() 时,我在运行时遇到的错误是这样的:

ERROR:     [0x3bfcb9dc] AVAudioNode.mm:521: AUSetFormat: error -10868
*** Terminating app due to uncaught exception 'com.apple.coreaudio.avfaudio', reason: 'error -10868'

我生成的不是真正的 AVAudioCommonFormat.PCMFormatInt16 吗?

[EDIT]

这是另一种更简单的尝试,仅使用一个缓冲区作为 PCMFormatFloat32。没有错误,但也没有声音。

import AVFoundation

class AudioManager:NSObject {

    let audioPlayer = AVAudioPlayerNode()

    lazy var audioEngine: AVAudioEngine = {
        let engine = AVAudioEngine()

        // Must happen only once.
        engine.attachNode(self.audioPlayer)

        return engine
    }()

    func play(frequency: Int, durationMs: Int, completionBlock:dispatch_block_t!) {
        var error: NSError?

        var mixer = audioEngine.mainMixerNode
        var sampleRateHz: Float = Float(mixer.outputFormatForBus(0).sampleRate)
        var numberOfSamples = AVAudioFrameCount((Float(durationMs) / 1000 * sampleRateHz))

        var format = AVAudioFormat(commonFormat: AVAudioCommonFormat.PCMFormatFloat32, sampleRate: Double(sampleRateHz), channels: AVAudioChannelCount(1), interleaved: false)

        var buffer = AVAudioPCMBuffer(PCMFormat: format, frameCapacity: numberOfSamples)
        buffer.frameLength = numberOfSamples

        // Generate sine wave
        for var i = 0; i < Int(buffer.frameLength); i++ {
            var val = sinf(Float(frequency) * Float(i) * 2 * Float(M_PI) / sampleRateHz)

            // log.debug("val: \(val)")

            buffer.floatChannelData.memory[i] = val * 0.5
        }

        // Audio engine
        audioEngine.connect(audioPlayer, to: mixer, format: format)

        log.debug("Sample rate: \(sampleRateHz), samples: \(numberOfSamples), format: \(format)")

        if !audioEngine.startAndReturnError(&error) {
            log.debug("Error: \(error)")
        }

        // Play player and buffer
        audioPlayer.play()
        audioPlayer.scheduleBuffer(buffer, atTime: nil, options: nil, completionHandler: completionBlock)
    }
}

谢谢:托马斯·罗亚尔(http://www.tmroyal.com/playing-sounds-in-swift-audioengine.html http://www.tmroyal.com/playing-sounds-in-swift-audioengine.html)


问题是,当退出 play() 函数时,玩家会被清理,并且永远不会完成(或刚刚开始)游戏。这是一个相当笨拙的解决方案:在从 play() 返回之前睡眠与样本一样长的时间。

我会接受一个更好的答案,如果有人想发布一个答案,可以避免通过不清理玩家来这样做。

import AVFoundation

class AudioManager: NSObject, AVAudioPlayerDelegate {

    let audioPlayerNode = AVAudioPlayerNode()

    var waveAudioPlayer: AVAudioPlayer?

    var playing: Bool! = false

    lazy var audioEngine: AVAudioEngine = {
        let engine = AVAudioEngine()

        // Must happen only once.
        engine.attachNode(self.audioPlayerNode)

        return engine
    }()

    func playWaveFromBundle(filename: String, durationInSeconds: NSTimeInterval) -> Void {
        var error: NSError?
        var sound = NSURL(fileURLWithPath: NSBundle.mainBundle().pathForResource(filename, ofType: "wav")!)

        if error != nil {
            log.error("Error: \(error)")
            return
        }

        self.waveAudioPlayer = AVAudioPlayer(contentsOfURL: sound, error: &error)
        self.waveAudioPlayer!.delegate = self

        AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayback, error: &error)

        if error != nil {
            log.error("Error: \(error)")
            return
        }

        log.verbose("Playing \(sound)")

        self.waveAudioPlayer!.prepareToPlay()

        playing = true

        if !self.waveAudioPlayer!.play() {
            log.error("Failed to play")
        }

        // If we don't block here, the player stops as soon as this function returns. While we'd prefer to wait for audioPlayerDidFinishPlaying() to be called here, it's never called if we block here. Instead, pass in the duration of the wave file and simply sleep for that long.
        /*
        while (playing!) {
            NSThread.sleepForTimeInterval(0.1) // seconds
        }
        */

        NSThread.sleepForTimeInterval(durationInSeconds)

        log.verbose("Done")
    }

    func play(frequency: Int, durationInMillis: Int, completionBlock:dispatch_block_t!) -> Void {
        var session = AVAudioSession.sharedInstance()
        var error: NSError?

        if !session.setCategory(AVAudioSessionCategoryPlayAndRecord, error: &error) {
            log.error("Error: \(error)")
            return
        }

        var mixer = audioEngine.mainMixerNode
        var sampleRateHz: Float = Float(mixer.outputFormatForBus(0).sampleRate)
        var numberOfSamples = AVAudioFrameCount((Float(durationInMillis) / 1000 * sampleRateHz))

        var format = AVAudioFormat(commonFormat: AVAudioCommonFormat.PCMFormatFloat32, sampleRate: Double(sampleRateHz), channels: AVAudioChannelCount(1), interleaved: false)

        var buffer = AVAudioPCMBuffer(PCMFormat: format, frameCapacity: numberOfSamples)
        buffer.frameLength = numberOfSamples

        // Generate sine wave
        for var i = 0; i < Int(buffer.frameLength); i++ {
            var val = sinf(Float(frequency) * Float(i) * 2 * Float(M_PI) / sampleRateHz)

            // log.debug("val: \(val)")

            buffer.floatChannelData.memory[i] = val * 0.5
        }

        AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayback, error: &error)

        if error != nil {
            log.error("Error: \(error)")
            return
        }

        // Audio engine
        audioEngine.connect(audioPlayerNode, to: mixer, format: format)

        log.debug("Sample rate: \(sampleRateHz), samples: \(numberOfSamples), format: \(format)")

        if !audioEngine.startAndReturnError(&error) {
            log.error("Error: \(error)")
            return
        }

        // TODO: Check we're not in the background. Attempting to play audio while in the background throws:
        //   *** Terminating app due to uncaught exception 'com.apple.coreaudio.avfaudio', reason: 'error 561015905'

        // Play player and schedule buffer
        audioPlayerNode.play()
        audioPlayerNode.scheduleBuffer(buffer, atTime: nil, options: nil, completionHandler: completionBlock)

        // If we don't block here, the player stops as soon as this function returns.
        NSThread.sleepForTimeInterval(Double(durationInMillis) * 1000.0) // seconds
    }

    // MARK: AVAudioPlayerDelegate

    func audioPlayerDidFinishPlaying(player: AVAudioPlayer!, successfully flag: Bool) {
        log.verbose("Success: \(flag)")

        playing = false
    }

    func audioPlayerDecodeErrorDidOccur(player: AVAudioPlayer!, error: NSError!) {
        log.verbose("Error: \(error)")

        playing = false
    }

    // MARK: NSObject overrides

    deinit {
        log.verbose("deinit")
    }

}

对于上下文,此 AudioManager 是我的 AppDelegate 上的延迟加载属性:

lazy var audioManager: AudioManager = {
        return AudioManager()
    }()
本文内容由网友自发贡献,版权归原作者所有,本站不承担相应法律责任。如您发现有涉嫌抄袭侵权的内容,请联系:hwhale#tublm.com(使用前将#替换为@)

在具有 16 位 PCM 的 iOS 中生成音调,AudioEngine.connect() 会抛出 AUSetFormat:错误 -10868 的相关文章

随机推荐