-> AVAudioFormat { var environmentOutputConnectionFormat: AVAudioFormat? = nil var numHardwareOutputChannels: AVAudioChannelCount = engine.outputNode.outputFormat(forBus: 0).channelCount let hardwareSampleRate: Double = engine.outputNode.outputFormat(forBus: 0).sampleRate // if we're connected to multichannel hardware, create a compatible multichannel format for the environment node if numHardwareOutputChannels > 2 && numHardwareOutputChannels != 3 { if numHardwareOutputChannels > 8 { numHardwareOutputChannels = 8 } var environmentOutputLayoutTag: AudioChannelLayoutTag = kAudioChannelLayoutTag_Stereo switch numHardwareOutputChannels { case 4: environmentOutputLayoutTag = kAudioChannelLayoutTag_AudioUnit_4 case 5: environmentOutputLayoutTag = kAudioChannelLayoutTag_AudioUnit_5_0 case 6: environmentOutputLayoutTag = kAudioChannelLayoutTag_AudioUnit_6_0 case 7: environmentOutputLayoutTag = kAudioChannelLayoutTag_AudioUnit_7_0 case 8: environmentOutputLayoutTag = kAudioChannelLayoutTag_AudioUnit_8 default: break } let environmentOutputChannelLayout = AVAudioChannelLayout(layoutTag: environmentOutputLayoutTag) environmentOutputConnectionFormat = AVAudioFormat(standardFormatWithSampleRate: hardwareSampleRate, channelLayout: environmentOutputChannelLayout!) multichannelOutputEnabled = true } else { environmentOutputConnectionFormat = AVAudioFormat(standardFormatWithSampleRate: hardwareSampleRate, channels: 2) multichannelOutputEnabled = false } return environmentOutputConnectionFormat! } func loadSoundIntoBuffer(_ filename: String, ofType: String = "caf", seek: Double = 0.0) -> AVAudioPCMBuffer? { guard let path = Bundle.main.path(forResource: filename, ofType: ofType), let url = URL(string: path) else { return nil } do { let soundFile = try AVAudioFile(forReading: url, commonFormat: AVAudioCommonFormat.pcmFormatFloat32, interleaved: false) let outputBuffer = AVAudioPCMBuffer(pcmFormat: soundFile.processingFormat, frameCapacity: AVAudioFrameCount(soundFile.length))! if seek > 0.0 && seek <= 1.0 { soundFile.framePosition = AVAudioFramePosition(Double(soundFile.length) * seek) } try soundFile.read(into: outputBuffer) return outputBuffer } catch { return nil } } ハードウェアをチェックして3D音声再生用の AVFormatを作成 /** wire everything up */ func makeEngineConnections() { guard let environment: AVAudioEnvironmentNode = environment, let sound: Sound = buffers.keys.first, let buffer: AVAudioPCMBuffer = buffers[sound] else { return } // 効果音再生器 -> 3D環境 for playerNode: AVAudioPlayerNode in playerNodes { engine.connect(playerNode, to: environment, format: buffer.format) } // 3D環境 -> 効果音用ミキサー let format: AVAudioFormat = constructOutputConnectionFormatForEnvironment() engine.connect(environment, to: sfxMixer, format: format) // BGM再生プレイヤー -> ピッチコントローラー -> BGM用ミキサー let stereoFormat: AVAudioFormat = AVAudioFormat(standardFormatWithSampleRate: 44100, channels: 2)! for (i, bgmPlayerNode) in bgmPlayerNodes.enumerated() { let varispeed = varispeeds[i] engine.connect(bgmPlayerNode, to: varispeed, format: stereoFormat) engine.connect(varispeed, to: bgmMixer, format: stereoFormat) varispeed.rate = 1.0 } // 効果音用ミキサー -> リバーブ -> マスター卓 engine.connect(sfxMixer, to: reverb, format: stereoFormat) engine.connect(reverb, to: engine.mainMixerNode, fromBus: 0, toBus: 0, format: stereoFormat) // BGMミキサー -> マスター卓 engine.connect(bgmMixer, to: engine.mainMixerNode, fromBus: 0, toBus: 1, format: stereoFormat) // レンダリングアルゴリズム for playerNode in playerNodes { playerNode.renderingAlgorithm = multichannelOutputEnabled ? AVAudio3DMixingRenderingAlgorithm.soundField : AVAudio3DMixingRenderingAlgorithm.sphericalHead } for bgmPlayerNode in bgmPlayerNodes { bgmPlayerNode.renderingAlgorithm = multichannelOutputEnabled ? AVAudio3DMixingRenderingAlgorithm.soundField : AVAudio3DMixingRenderingAlgorithm.sphericalHead } } @discardableResult func startEngine() -> Bool { do { try engine.start() return true } catch { return false } } func setUp() -> Bool { // 他アプリの音楽などを再生できるようにする do { try AVAudioSession.sharedInstance().setCategory(.ambient) try AVAudioSession.sharedInstance().setActive(true) } catch { puts(error.localizedDescription) } AVAudioPlayerNodeやAVAudioMixerNodeを AVAudioEngineに接続 guard !engine.isRunning else { return false } (0..<kNumPlayerNodes).forEach{ (_) in let playerNode = AVAudioPlayerNode() playerNodes.append(playerNode) engine.attach(playerNode) let varispeed = AVAudioUnitVarispeed() varispeeds.append(varispeed) engine.attach(varispeed) let repeater = Repeater() repeaters.append(repeater) } bgmPlayerNodes = (0..<kNumBgmPlayerNodes).map { _ in let bgmPlayerNode = AVAudioPlayerNode() engine.attach(bgmPlayerNode) return bgmPlayerNode } environment = AVAudioEnvironmentNode() guard let environment: AVAudioEnvironmentNode = environment else { return false } engine.attach(sfxMixer) engine.attach(bgmMixer) engine.attach(environment) engine.attach(reverb) Sound.allCases.forEach { sound in if let buffer = loadSoundIntoBuffer(sound.rawValue) { buffers[sound] = buffer } } makeEngineConnections() let nc = NotificationCenter.default nc.addObserver(forName: NSNotification.Name.AVAudioEngineConfigurationChange, object: engine, queue: nil) {[weak self] (note: Notification) -> Void in guard let strongSelf = self else { return } /* strongSelf.playerNodes.forEach({ (playerNode) in playerNode.pause() }) strongSelf.bgmPlayerNode.stop() */ strongSelf.makeEngineConnections() strongSelf.startEngine() } // ミキサーボリューム初期値 sfxMixer.outputVolume = Storage.sfxVolume bgmMixer.outputVolume = Storage.bgmVolume // リバーブ設定 reverb.wetDryMix = 5 reverb.loadFactoryPreset(.largeChamber) // 開始 return startEngine() } オーディオファイルをバッファに読み込み 音量等設定 AVAudioEngine起動