So my code below is supposed to replay the chimes.wav file over and over again, with a higher pitch, but crashes with the error at the bottom. Can anyone find what is causing this error?
import UIKit
import AVFoundation
class aboutViewController: UIViewController {
var audioEngine: AVAudioEngine = AVAudioEngine()
var audioFilePlayer: AVAudioPlayerNode = AVAudioPlayerNode()
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
var timePitch = AVAudioUnitTimePitch()
timePitch.pitch = 2000
let filePath: String = NSBundle.mainBundle().pathForResource("chimes", ofType: "wav")!
let fileURL: NSURL = NSURL(fileURLWithPath: filePath)!
let audioFile = AVAudioFile(forReading: fileURL, error: nil)
let audioFormat = audioFile.processingFormat
let audioFrameCount = UInt32(audioFile.length)
let audioFileBuffer = AVAudioPCMBuffer(PCMFormat: audioFormat, frameCapacity: audioFrameCount)
audioFile.readIntoBuffer(audioFileBuffer, error: nil)
var mainMixer = audioEngine.mainMixerNode
audioEngine.attachNode(audioFilePlayer)
audioEngine.attachNode(timePitch)
audioEngine.connect(audioFilePlayer, to:mainMixer, format: audioFileBuffer.format)
audioEngine.connect(timePitch, to: audioEngine.outputNode, format: audioFile.processingFormat)
audioEngine.startAndReturnError(nil)
audioFilePlayer.play()
audioFilePlayer.scheduleFile(audioFile, atTime: nil, completionHandler: nil)
audioFilePlayer.scheduleBuffer(audioFileBuffer, atTime: nil, options:.Loops, completionHandler: nil)
}
2014-11-10 18:34:37.746 windChimes[2350:108235] **** Terminating app due to uncaught exception 'com.apple.coreaudio.avfaudio', reason: 'player started when in a disconnected state'
**** First throw call stack:
(0x185f01e48 0x1965f40e4 0x185f01d08 0x1848726c0 0x18489a33c 0x18489975c 0x10009e638 0x10009e858 0x18a6b0e84 0x18a6b0b94 0x18a853ad4 0x18a765310 0x18a7650dc 0x18a76505c 0x18a6ada2c 0x18a005994 0x18a000564 0x18a000408 0x189fffc08 0x189fff98c 0x189ff93bc 0x185eba14c 0x185eb70d8 0x185eb74b8 0x185de51f4 0x18ef7b5a4 0x18a716784 0x1000a54f8 0x1000a5538 0x196c62a08)
libc++abi.dylib: terminating with uncaught exception of type NSException
(lldb)
The statement: "player started when in a disconnected state" indicates that there is a problem with the connection chain. This means either the nodes were not attached to the engine or the nodes were not linked together properly.Because both the audioFilePlayer and the timePitch nodes were attached, my impression would be to say that the problem lies with these two lines:
audioEngine.connect(audioFilePlayer, to:mainMixer, format: audioFileBuffer.format)
audioEngine.connect(timePitch, to: audioEngine.outputNode, format: audioFile.processingFormat)
The connection should link all components together:
audioFilePlayer -> timePitch -> audioEngine.mainMixerNode (or outputNode)
So the connection should look like:
audioEngine.connect(audioFilePlayer, to:timePitch, format: audioFile.processingFormat)
audioEngine.connect(timePitch, to: audioEngine.outputNode, format: audioFile.processingFormat)
I hope this helps.
This error can also happen if you connect more than one AVAudioPlayerNode to a node that only takes in one input.
For example:
let playerOne = AVAudioPlayerNode()
let playerTwo = AVAudioPlayerNode()
let reverbEffect = AVAudioUnitReverb()
engine.attach(playerOne)
engine.attach(playerTwo)
engine.attach(reverbEffect)
engine.connect(playerOne, to: reverbEffect, format: format)
engine.connect(playerTwo, to: reverbEffect, format: format)
engine.connect(reverbEffect, to: engine.outputNode, format: format)
An error will now be thrown if you try to play audio with playerOne, because it's no longer connected to any node (its output was implicitly disconnected when we called engine.connect(playerTwo, to: reverbEffect, format: format))
The fix is simple; connect both your player nodes to an AVAudioMixerNode:
let playerOne = AVAudioPlayerNode()
let playerTwo = AVAudioPlayerNode()
let mixer = AVAudioMixerNode()
let reverbEffect = AVAudioUnitReverb()
engine.attach(playerOne)
engine.attach(playerTwo)
engine.attach(mixer)
engine.attach(reverbEffect)
engine.connect(playerOne, to: mixer, format: format)
engine.connect(playerTwo, to: mixer, format: format)
engine.connect(mixer, to: reverbEffect, format: format)
engine.connect(reverbEffect, to: engine.outputNode, format: format)
Related
I'm trying to tap the microphone and also get SRC.
#main
struct TapSilenceApp: App {
let engine = AVAudioEngine()
let mixer = AVAudioMixerNode()
let mixer2 = AVAudioMixerNode()
init() {
let format = AVAudioFormat(standardFormatWithSampleRate: 16000, channels: 2)!
let inputNode = engine.inputNode
// Try using two mixers:
engine.attach(mixer)
engine.attach(mixer2)
engine.connect(inputNode, to: mixer, format: nil)
engine.connect(mixer, to: mixer2, format: format)
mixer2.installTap(onBus: 0, bufferSize: 256, format: format) { buffer, _ in
assert(!buffer.isSilent)
}
print("engine: \(engine)")
engine.prepare()
try! engine.start()
}
var body: some Scene {
WindowGroup {
ContentView()
}
}
}
That throws an exception with AUGraphParser::InitializeActiveNodesInInputChain(ThisGraph, *GetInputNode()). (AVAudioEngine wins the award for lousiest error messages of any Apple API)
Interestingly, when I remove the call to installTap, it runs.
I still need the tap of course. So trying to connect mixer2 to the output, I do get the tap to run, but the buffers are silent.
Here's the version that's silent:
let session = AVAudioSession.sharedInstance()
do {
try session.setCategory(.playAndRecord, options: .defaultToSpeaker)
} catch {
print("Could not set audio category: \(error.localizedDescription)")
}
let format = AVAudioFormat(standardFormatWithSampleRate: 16000, channels: 2)!
let inputNode = engine.inputNode
// Try using two mixers:
engine.attach(mixer)
engine.attach(mixer2)
engine.connect(inputNode, to: mixer, format: nil)
engine.connect(mixer, to: mixer2, format: format)
engine.connect(mixer2, to: engine.mainMixerNode, format: nil)
mixer2.installTap(onBus: 0, bufferSize: 256, format: mixer2.outputFormat(forBus: 0)) { buffer, _ in
// assert(!buffer.isSilent)
print("max value: \(buffer.maxValue)")
}
print("engine: \(engine)")
engine.prepare()
try! engine.start()
If I just put a tap on the inputNode and remove the mixers, I do get audio input, but of course no SRC.
Once I have connected the Bluetooth LE peripheral (headphones) to my device.
How I could use it to play sound ?
EDIT: I want to force play sound on peripheral
Actually I'm using this code to play sound on device speaker:
var engine = AVAudioEngine()
var player = AVAudioPlayerNode()
var pitch = AVAudioUnitTimePitch()
override func viewDidLoad() {
super.viewDidLoad()
player.volume = 1.0
let path = NSBundle.mainBundle().pathForResource("Test", ofType: "m4a")!
let url = NSURL.fileURLWithPath(path)
let file = try? AVAudioFile(forReading: url)
let buffer = AVAudioPCMBuffer(PCMFormat: file!.processingFormat, frameCapacity: AVAudioFrameCount(file!.length))
do {
try file!.readIntoBuffer(buffer)
} catch _ {
}
engine.attachNode(player)
engine.attachNode(pitch)
engine.connect(player, to: pitch, format: buffer.format)
engine.connect(pitch, to: engine.mainMixerNode, format: buffer.format)
player.scheduleBuffer(buffer, atTime: nil, options: AVAudioPlayerNodeBufferOptions.Loops, completionHandler: nil)
engine.prepare()
do {
try engine.start()
} catch _ {
}
}
I'm using AVAudioEngine to capture users' voice and apply some effects to it.When recording with headphone's mic , everything goes well. But when it comes to recording with phone's built-in mic , and playback the sound through headphone , only the left-side earbud has the sound, it seems the built-in mic only have single channel input. So how can I fix this issue? Here's some of my code:
func connectNode(){
engine.connect(engine.inputNode!, to: reverbNode, format:reverbNode.outputFormatForBus(0))
engine.connect(reverbNode, to: delayNode, format: delayNode.outputFormatForBus(0))
engine.connect(delayNode, to: distortion, format: distortion.outputFormatForBus(0))
engine.connect(distortion, to: engine.mainMixerNode, format: engine.mainMixerNode.outputFormatForBus(0))
engine.connect(player, to: engine.mainMixerNode, format: engine.mainMixerNode.outputFormatForBus(0))
}
func recordToFile(){
setSessionRecord()
recordSetting[AVFormatIDKey] = NSNumber(unsignedInt: UInt32(kAudioFormatMPEG4AAC))
recordSetting[AVNumberOfChannelsKey] = NSNumber(int: 2)
var file: AVAudioFile!
do{
try file = AVAudioFile(forWriting: URLFor(fileName)!, settings: recordSetting)
}catch let error as NSError{
print("error:\(error)")
}
engine.mainMixerNode.installTapOnBus(0, bufferSize: 1024, format: file.processingFormat) { (buffer, time) -> Void in
try! file.writeFromBuffer(buffer)
}
}
func playbackRecord(){
setSessionPlayback()
var file:AVAudioFile!
file = try! AVAudioFile(forReading:URLFor(fileName)!)
let audioFormat = file.processingFormat
let audioFrameCount = UInt32(file.length)
let audioFileBuffer = AVAudioPCMBuffer(PCMFormat: audioFormat, frameCapacity: audioFrameCount)
try! file.readIntoBuffer(audioFileBuffer, frameCount: audioFrameCount)
player.scheduleBuffer(audioFileBuffer, atTime: nil, options: .Interrupts, completionHandler: {print(self.player.stop())})
if(!player.playing){
player.play()
}else{
print("stop")
player.stop()
}
}
I found a way to fix this issue.Just modify this method to give the engine a single channel format and then everything will be fine.
func connectNode(){
let format = AVAudioFormat(commonFormat: AVAudioCommonFormat.PCMFormatFloat32, sampleRate: 44100.0, channels:AVAudioChannelCount(1), interleaved: false)
engine.connect(engine.inputNode!, to: reverbNode, format:format)
engine.connect(reverbNode, to: delayNode, format: format)
engine.connect(delayNode, to: distortion, format: format)
engine.connect(distortion, to: engine.mainMixerNode, format: format)
engine.connect(player, to: engine.mainMixerNode, format: engine.mainMixerNode.outputFormatForBus(0))
}
I'm trying to change the pitch of a sound using the AVAudioEngine() in Swift. This is my code:
func setUpEngine() {
let fileString = NSBundle.mainBundle().pathForResource("400", ofType: "wav")
let url = NSURL(fileURLWithPath: fileString!)
do {
try audioFile = AVAudioFile(forReading: url)
print("done")
}
catch{
}
}
var engine = AVAudioEngine()
var audioFile = AVAudioFile()
var audioPlayerNode = AVAudioPlayerNode()
var changeAudioUnitTime = AVAudioUnitTimePitch()
override func viewDidLoad() {
setUpEngine()
let defaults = NSUserDefaults.standardUserDefaults()
audioPlayerNode.stop()
engine.stop()
engine.reset()
engine.attachNode(audioPlayerNode)
changeAudioUnitTime.pitch = 800
engine.attachNode(changeAudioUnitTime)
engine.connect(audioPlayerNode, to: changeAudioUnitTime, format: nil)
engine.connect(changeAudioUnitTime, to: engine.outputNode, format: nil)
audioPlayerNode.scheduleFile(audioFile, atTime: nil, completionHandler: nil)
engine.startAndReturnError(nil)
audioPlayerNode.play()
The rest of my code is below (I do close the brackets).
I found most of this code online and I get an error with the line
engine.startAndReturnError(nil)
'Value of type has no member'.
When I remove this line I get the following error:
'AVAudioPlayerNode.mm:333: Start: required condition is false:
_engine->IsRunning() Terminating app due to uncaught exception 'com.apple.coreaudio.avfaudio', reason: 'required condition is false:
_engine->IsRunning()''
Any help would be greatly appreciated. I am using Swift in xCode and a single view application.
The error is that the engine is not running. You need to reorder your operations like this...
setUpEngine()
let defaults = NSUserDefaults.standardUserDefaults()
engine.attachNode(audioPlayerNode)
engine.attachNode(changeAudioUnitTime)
engine.connect(audioPlayerNode, to: changeAudioUnitTime, format: nil)
engine.connect(changeAudioUnitTime, to: engine.outputNode, format: nil)
changeAudioUnitTime.pitch = 800
engine.prepare()
engine.start()
audioPlayerNode.scheduleFile(audioFile, atTime: nil, completionHandler: nil)
audioPlayerNode.play()
Some time later...
engine.stop()
This is because you are running on either outdated version of XCode or incompatible version of iOS. I had this issue and in the latest Swift version there is no such method instead they provide .start().
I'm working on an iOS app with Swift and Xcode 6. What I would like to do is play an audio file using an AVAudioEngine, and until this point everything OK. But how can I play it without stopping, I mean, that when it ends playing it starts again?
This is my code:
/*==================== CONFIGURATES THE AVAUDIOENGINE ===========*/
audioEngine.reset() //Resets any previous configuration on AudioEngine
let audioPlayerNode = AVAudioPlayerNode() //The node that will play the actual sound
audioEngine.attachNode(audioPlayerNode) //Attachs the node to the audioengine
audioEngine.connect(audioPlayerNode, to: audioEngine.outputNode, format: nil) //Connects the applause playback node to the sound output
audioPlayerNode.scheduleFile(applause.applauseFile, atTime: nil, completionHandler: nil)
audioEngine.startAndReturnError(nil)
audioPlayerNode.play() //Plays the sound
Before saying me that I should use AVAudioPlayer for this, I can't because later I will have to use some effects and play three audio files at the same time, also repeatedly.
I found the solution in another question, asked and also auto-answered by #CarveDrone , so I've just copied the code he used:
class aboutViewController: UIViewController {
var audioEngine: AVAudioEngine = AVAudioEngine()
var audioFilePlayer: AVAudioPlayerNode = AVAudioPlayerNode()
override func viewDidLoad() {
super.viewDidLoad()
let filePath: String = NSBundle.mainBundle().pathForResource("chimes", ofType: "wav")!
println("\(filePath)")
let fileURL: NSURL = NSURL(fileURLWithPath: filePath)!
let audioFile = AVAudioFile(forReading: fileURL, error: nil)
let audioFormat = audioFile.processingFormat
let audioFrameCount = UInt32(audioFile.length)
let audioFileBuffer = AVAudioPCMBuffer(PCMFormat: audioFormat, frameCapacity: audioFrameCount)
audioFile.readIntoBuffer(audioFileBuffer, error: nil)
var mainMixer = audioEngine.mainMixerNode
audioEngine.attachNode(audioFilePlayer)
audioEngine.connect(audioFilePlayer, to:mainMixer, format: audioFileBuffer.format)
audioEngine.startAndReturnError(nil)
audioFilePlayer.play()
audioFilePlayer.scheduleBuffer(audioFileBuffer, atTime: nil, options:.Loops, completionHandler: nil)
}
The only thing you have to change is the filePath constant. Here is the link to the original answer: Having AVAudioEngine repeat a sound
Swift 5, thanks at #Guillermo Barreiro
var audioEngine: AVAudioEngine = AVAudioEngine()
var audioFilePlayer: AVAudioPlayerNode = AVAudioPlayerNode()
override func viewDidLoad() {
super. viewDidLoad()
guard let filePath: String = Bundle.main.path(forResource: "chimes", ofType: "wav") else{ return }
print("\(filePath)")
let fileURL: URL = URL(fileURLWithPath: filePath)
guard let audioFile = try? AVAudioFile(forReading: fileURL) else{ return }
let audioFormat = audioFile.processingFormat
let audioFrameCount = UInt32(audioFile.length)
guard let audioFileBuffer = AVAudioPCMBuffer(pcmFormat: audioFormat, frameCapacity: audioFrameCount) else{ return }
do{
try audioFile.read(into: audioFileBuffer)
} catch{
print("over")
}
let mainMixer = audioEngine.mainMixerNode
audioEngine.attach(audioFilePlayer)
audioEngine.connect(audioFilePlayer, to:mainMixer, format: audioFileBuffer.format)
try? audioEngine.start()
audioFilePlayer.play()
audioFilePlayer.scheduleBuffer(audioFileBuffer, at: nil, options:AVAudioPlayerNodeBufferOptions.loops)
}