Saving Audio After Effect in iOS - ios

i am developing an applicatoin so that people can record and change their voices thru app and share it . Basically i so many things and now its time to ask you to help . Here is my play function which plays recorded audio file and adds effects on it .
private func playAudio(pitch : Float, rate: Float, reverb: Float, echo: Float) {
// Initialize variables
audioEngine = AVAudioEngine()
audioPlayerNode = AVAudioPlayerNode()
audioEngine.attachNode(audioPlayerNode)
// Setting the pitch
let pitchEffect = AVAudioUnitTimePitch()
pitchEffect.pitch = pitch
audioEngine.attachNode(pitchEffect)
// Setting the platback-rate
let playbackRateEffect = AVAudioUnitVarispeed()
playbackRateEffect.rate = rate
audioEngine.attachNode(playbackRateEffect)
// Setting the reverb effect
let reverbEffect = AVAudioUnitReverb()
reverbEffect.loadFactoryPreset(AVAudioUnitReverbPreset.Cathedral)
reverbEffect.wetDryMix = reverb
audioEngine.attachNode(reverbEffect)
// Setting the echo effect on a specific interval
let echoEffect = AVAudioUnitDelay()
echoEffect.delayTime = NSTimeInterval(echo)
audioEngine.attachNode(echoEffect)
// Chain all these up, ending with the output
audioEngine.connect(audioPlayerNode, to: playbackRateEffect, format: nil)
audioEngine.connect(playbackRateEffect, to: pitchEffect, format: nil)
audioEngine.connect(pitchEffect, to: reverbEffect, format: nil)
audioEngine.connect(reverbEffect, to: echoEffect, format: nil)
audioEngine.connect(echoEffect, to: audioEngine.outputNode, format: nil)
audioPlayerNode.stop()
let length = 4000
let buffer = AVAudioPCMBuffer(PCMFormat: audioPlayerNode.outputFormatForBus(0),frameCapacity:AVAudioFrameCount(length))
buffer.frameLength = AVAudioFrameCount(length)
try! audioEngine.start()
let dirPaths: AnyObject = NSSearchPathForDirectoriesInDomains( NSSearchPathDirectory.DocumentDirectory, NSSearchPathDomainMask.UserDomainMask, true)[0]
let tmpFileUrl: NSURL = NSURL.fileURLWithPath(dirPaths.stringByAppendingPathComponent("effectedSound.m4a"))
do{
print(dirPaths)
let settings = [AVFormatIDKey: NSNumber(unsignedInt: kAudioFormatMPEG4AAC), AVSampleRateKey: NSNumber(integer: 44100), AVNumberOfChannelsKey: NSNumber(integer: 2)]
self.newAudio = try AVAudioFile(forWriting: tmpFileUrl, settings: settings)
audioEngine.outputNode.installTapOnBus(0, bufferSize: (AVAudioFrameCount(self.player!.duration)), format: self.audioPlayerNode.outputFormatForBus(0)){
(buffer: AVAudioPCMBuffer!, time: AVAudioTime!) in
print(self.newAudio.length)
print("=====================")
print(self.audioFile.length)
print("**************************")
if (self.newAudio.length) < (self.audioFile.length){
do{
//print(buffer)
try self.newAudio.writeFromBuffer(buffer)
}catch _{
print("Problem Writing Buffer")
}
}else{
self.audioPlayerNode.removeTapOnBus(0)
}
}
}catch _{
print("Problem")
}
audioPlayerNode.play()
}
I guess the problem is i am installTapOnBus to audioPlayerNode but the effected audio is on audioEngine.outputNode .However i tried to installTapOnBus to audioEngine.outputNode but it gives me error.Also i've tried to connect effects to audioEngine.mixerNode but it also not a solution . So that do you have any experiences on saving effected audio file ? How can i get this effected audio?
Any help is appreciated
Thank you

Here it is my solution to question :
func playAndRecord(pitch : Float, rate: Float, reverb: Float, echo: Float) {
// Initialize variables
// These are global variables . if you want you can just (let audioEngine = etc ..) init here these variables
audioEngine = AVAudioEngine()
audioPlayerNode = AVAudioPlayerNode()
audioEngine.attachNode(audioPlayerNode)
playerB = AVAudioPlayerNode()
audioEngine.attachNode(playerB)
// Setting the pitch
let pitchEffect = AVAudioUnitTimePitch()
pitchEffect.pitch = pitch
audioEngine.attachNode(pitchEffect)
// Setting the platback-rate
let playbackRateEffect = AVAudioUnitVarispeed()
playbackRateEffect.rate = rate
audioEngine.attachNode(playbackRateEffect)
// Setting the reverb effect
let reverbEffect = AVAudioUnitReverb()
reverbEffect.loadFactoryPreset(AVAudioUnitReverbPreset.Cathedral)
reverbEffect.wetDryMix = reverb
audioEngine.attachNode(reverbEffect)
// Setting the echo effect on a specific interval
let echoEffect = AVAudioUnitDelay()
echoEffect.delayTime = NSTimeInterval(echo)
audioEngine.attachNode(echoEffect)
// Chain all these up, ending with the output
audioEngine.connect(audioPlayerNode, to: playbackRateEffect, format: nil)
audioEngine.connect(playbackRateEffect, to: pitchEffect, format: nil)
audioEngine.connect(pitchEffect, to: reverbEffect, format: nil)
audioEngine.connect(reverbEffect, to: echoEffect, format: nil)
audioEngine.connect(echoEffect, to: audioEngine.mainMixerNode, format: nil)
// Good practice to stop before starting
audioPlayerNode.stop()
// Play the audio file
// this player is also a global variable AvAudioPlayer
if(player != nil){
player?.stop()
}
// audioFile here is our original audio
audioPlayerNode.scheduleFile(audioFile, atTime: nil, completionHandler: {
print("Complete")
})
try! audioEngine.start()
let dirPaths: AnyObject = NSSearchPathForDirectoriesInDomains( NSSearchPathDirectory.DocumentDirectory, NSSearchPathDomainMask.UserDomainMask, true)[0]
let tmpFileUrl: NSURL = NSURL.fileURLWithPath(dirPaths.stringByAppendingPathComponent("effectedSound2.m4a"))
//Save the tmpFileUrl into global varibale to not lose it (not important if you want to do something else)
filteredOutputURL = tmpFileUrl
do{
print(dirPaths)
self.newAudio = try! AVAudioFile(forWriting: tmpFileUrl, settings: [
AVFormatIDKey: NSNumber(unsignedInt:kAudioFormatAppleLossless),
AVEncoderAudioQualityKey : AVAudioQuality.Low.rawValue,
AVEncoderBitRateKey : 320000,
AVNumberOfChannelsKey: 2,
AVSampleRateKey : 44100.0
])
let length = self.audioFile.length
audioEngine.mainMixerNode.installTapOnBus(0, bufferSize: 1024, format: self.audioEngine.mainMixerNode.inputFormatForBus(0)) {
(buffer: AVAudioPCMBuffer!, time: AVAudioTime!) -> Void in
print(self.newAudio.length)
print("=====================")
print(length)
print("**************************")
if (self.newAudio.length) < length {//Let us know when to stop saving the file, otherwise saving infinitely
do{
//print(buffer)
try self.newAudio.writeFromBuffer(buffer)
}catch _{
print("Problem Writing Buffer")
}
}else{
self.audioEngine.mainMixerNode.removeTapOnBus(0)//if we dont remove it, will keep on tapping infinitely
//DO WHAT YOU WANT TO DO HERE WITH EFFECTED AUDIO
}
}
}catch _{
print("Problem")
}
audioPlayerNode.play()
}

This doesn't seem to be hooked up correctly. I'm just learning all this myself, but I found that the effects are correctly added when you connect them to a mixer node. Also, you'll want to tap the mixer, not the engine output node. I've just copied your code and made a few modifications to take this into account.
private func playAudio(pitch : Float, rate: Float, reverb: Float, echo: Float) {
// Initialize variables
audioEngine = AVAudioEngine()
audioPlayerNode = AVAudioPlayerNode()
audioEngine.attachNode(audioPlayerNode)
// Setting the pitch
let pitchEffect = AVAudioUnitTimePitch()
pitchEffect.pitch = pitch
audioEngine.attachNode(pitchEffect)
// Setting the playback-rate
let playbackRateEffect = AVAudioUnitVarispeed()
playbackRateEffect.rate = rate
audioEngine.attachNode(playbackRateEffect)
// Setting the reverb effect
let reverbEffect = AVAudioUnitReverb()
reverbEffect.loadFactoryPreset(AVAudioUnitReverbPreset.Cathedral)
reverbEffect.wetDryMix = reverb
audioEngine.attachNode(reverbEffect)
// Setting the echo effect on a specific interval
let echoEffect = AVAudioUnitDelay()
echoEffect.delayTime = NSTimeInterval(echo)
audioEngine.attachNode(echoEffect)
// Set up a mixer node
let audioMixer = AVAudioMixerNode()
audioEngine.attachNode(audioMixer)
// Chain all these up, ending with the output
audioEngine.connect(audioPlayerNode, to: playbackRateEffect, format: nil)
audioEngine.connect(playbackRateEffect, to: pitchEffect, format: nil)
audioEngine.connect(pitchEffect, to: reverbEffect, format: nil)
audioEngine.connect(reverbEffect, to: echoEffect, format: nil)
audioEngine.connect(echoEffect, to: audioMixer, format: nil)
audioEngine.connect(audioMixer, to: audioEngine.outputNode, format: nil)
audioPlayerNode.stop()
let length = 4000
let buffer = AVAudioPCMBuffer(PCMFormat: audioPlayerNode.outputFormatForBus(0),frameCapacity:AVAudioFrameCount(length))
buffer.frameLength = AVAudioFrameCount(length)
try! audioEngine.start()
let dirPaths: AnyObject = NSSearchPathForDirectoriesInDomains( NSSearchPathDirectory.DocumentDirectory, NSSearchPathDomainMask.UserDomainMask, true)[0]
let tmpFileUrl: NSURL = NSURL.fileURLWithPath(dirPaths.stringByAppendingPathComponent("effectedSound.m4a"))
do{
print(dirPaths)
let settings = [AVFormatIDKey: NSNumber(unsignedInt: kAudioFormatMPEG4AAC), AVSampleRateKey: NSNumber(integer: 44100), AVNumberOfChannelsKey: NSNumber(integer: 2)]
self.newAudio = try AVAudioFile(forWriting: tmpFileUrl, settings: settings)
audioMixer.installTapOnBus(0, bufferSize: (AVAudioFrameCount(self.player!.duration)), format: self.audioMixer.outputFormatForBus(0)){
(buffer: AVAudioPCMBuffer!, time: AVAudioTime!) in
print(self.newAudio.length)
print("=====================")
print(self.audioFile.length)
print("**************************")
if (self.newAudio.length) < (self.audioFile.length){
do{
//print(buffer)
try self.newAudio.writeFromBuffer(buffer)
}catch _{
print("Problem Writing Buffer")
}
}else{
self.audioMixer.removeTapOnBus(0)
}
}
}catch _{
print("Problem")
}
audioPlayerNode.play()
}
I also had trouble getting the file formatted properly. I finally got it working when I changed my path of the output file from m4a to caf. One other suggestion is to not have nil for the format parameter. I used the audioFile.processingFormat. I hope this helps. My audio effects/mixing is functional, although I did not chain my effects. So feel free to ask questions.

just change the parameter unsigned int from kAudioFormatMPEG4AAC to kAudioFormatLinearPCM and also change file type to .caf it will sure helpfull my friend

For anyone who have the problem of having to play the audio file TWICE to save it, i just added the following line at the respective place and it solved my problem.
might help someone in the future.
P.S: I used the EXACT same code as the checked Answer from above, just added this one line and solved my problem.
//Do what you want to do here with effected Audio
self.newAudio = try! AVAudioFile(forReading: tmpFileUrl)

We can use a certain way to adjust the voices such as: aliens, men, old people, robots, children, ....
and has a playback counter
var delayInSecond: Double = 0
if let lastRenderTime = self.audioPlayerNode.lastRenderTime, let playerTime = self.audioPlayerNode.playerTime(forNodeTime: lastRenderTime)
{
if let rate = rate {
delayInSecond = Double(self.audioFile.length - playerTime.sampleTime) / Double(self.audioFile.processingFormat.sampleRate) / Double(rate)
}else{
delayInSecond = Double(self.audioFile.length - playerTime.sampleTime) / Double(self.audioFile.processingFormat.sampleRate)
}
//schedule a stop timer for when audio finishes playing
self.stopTimer = Timer(timeInterval: delayInSecond, target: self, selector: #selector(stopPlay), userInfo: nil, repeats: true)
RunLoop.main.add(self.stopTimer, forMode: .default)
}

I got this after I add
self.newAudio = try! AVAudioFile(forReading: tmpFileUrl)
return like this
Error
Domain=com.apple.coreaudio.avfaudio
Code=1685348671 "(null)" UserInfo={failed
call=ExtAudioFileOpenURL((CFURLRef)fileUR
L, &_extAudioFile)}

Related

How to implement band stop filter using AVAudioEngine

I am building an app that needs to perform analysis on the audio it receives from the microphone in real time. In my app, I also need to play a beep sound and start recording audio at the same time, in other words, I can't play the beep sound and then start recording. This introduces the problem of hearing the beep sound in my recording, (this might be because I am playing the beep sound through the speaker, but unfortunately I cannot compromise in this regard either). Since the beep sound is just a tone of about 2350 kHz, I was wondering how I could exclude that range of frequencies (say from 2300 kHz to 2400 kHz) in my recordings and prevent it from influencing my audio samples. After doing some googling I came up with what I think might be the solution, a band stop filter. According to Wikipedia: "a band-stop filter or band-rejection filter is a filter that passes most frequencies unaltered, but attenuates those in a specific range to very low levels". This seems like what I need to to exclude frequencies from 2300 kHz to 2400 kHz in my recordings (or at least for the first second of the recording while the beep sound is playing). My question is: how would I implement this with AVAudioEngine? Is there a way I can turn off the filter after the first second of the recording when the beep sound is done playing without stopping the recording?
Since I am new to working with audio with AVAudioEngine (I've always just stuck to the higher levels of AVFoundation) I followed this tutorial to help me create a class to handle all the messy stuff. This is what my code looks like:
class Recorder {
enum RecordingState {
case recording, paused, stopped
}
private var engine: AVAudioEngine!
private var mixerNode: AVAudioMixerNode!
private var state: RecordingState = .stopped
private var audioPlayer = AVAudioPlayerNode()
init() {
setupSession()
setupEngine()
}
fileprivate func setupSession() {
let session = AVAudioSession.sharedInstance()
//The original tutorial sets the category to .record
//try? session.setCategory(.record)
try? session.setCategory(.playAndRecord, options: [.mixWithOthers, .defaultToSpeaker])
try? session.setActive(true, options: .notifyOthersOnDeactivation)
}
fileprivate func setupEngine() {
engine = AVAudioEngine()
mixerNode = AVAudioMixerNode()
// Set volume to 0 to avoid audio feedback while recording.
mixerNode.volume = 0
engine.attach(mixerNode)
//Attach the audio player node
engine.attach(audioPlayer)
makeConnections()
// Prepare the engine in advance, in order for the system to allocate the necessary resources.
engine.prepare()
}
fileprivate func makeConnections() {
let inputNode = engine.inputNode
let inputFormat = inputNode.outputFormat(forBus: 0)
engine.connect(inputNode, to: mixerNode, format: inputFormat)
let mainMixerNode = engine.mainMixerNode
let mixerFormat = AVAudioFormat(commonFormat: .pcmFormatFloat32, sampleRate: inputFormat.sampleRate, channels: 1, interleaved: false)
engine.connect(mixerNode, to: mainMixerNode, format: mixerFormat)
//AudioPlayer Connection
let path = Bundle.main.path(forResource: "beep.mp3", ofType:nil)!
let url = URL(fileURLWithPath: path)
let file = try! AVAudioFile(forReading: url)
engine.connect(audioPlayer, to: mainMixerNode, format: nil)
audioPlayer.scheduleFile(file, at: nil)
}
//MARK: Start Recording Function
func startRecording() throws {
print("Start Recording!")
let tapNode: AVAudioNode = mixerNode
let format = tapNode.outputFormat(forBus: 0)
let documentURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
// AVAudioFile uses the Core Audio Format (CAF) to write to disk.
// So we're using the caf file extension.
let file = try AVAudioFile(forWriting: documentURL.appendingPathComponent("recording.caf"), settings: format.settings)
tapNode.installTap(onBus: 0, bufferSize: 4096, format: format, block: {
(buffer, time) in
try? file.write(from: buffer)
print(buffer.description)
print(buffer.stride)
let floatArray = Array(UnsafeBufferPointer(start: buffer.floatChannelData![0], count:Int(buffer.frameLength)))
})
try engine.start()
audioPlayer.play()
state = .recording
}
//MARK: Other recording functions
func resumeRecording() throws {
try engine.start()
state = .recording
}
func pauseRecording() {
engine.pause()
state = .paused
}
func stopRecording() {
// Remove existing taps on nodes
mixerNode.removeTap(onBus: 0)
engine.stop()
state = .stopped
}
}
AVAudioUnitEQ supports a band-stop filter.
Perhaps something like:
// Create an instance of AVAudioUnitEQ and connect it to the engine's main mixer
let eq = AVAudioUnitEQ(numberOfBands: 1)
engine.attach(eq)
engine.connect(eq, to: engine.mainMixerNode, format: nil)
engine.connect(player, to: eq, format: nil)
eq.bands[0].frequency = 2350
eq.bands[0].filterType = .bandStop
eq.bands[0].bypass = false
A slightly more complete answer, linked to an IBAction; in this example, I use .parametric for the filter type, with more bands than required, to give a broader insight on how to use it:
#IBAction func PlayWithEQ(_ sender: Any) {
self.engine.stop()
self.engine = AVAudioEngine()
let player = AVAudioPlayerNode()
let url = Bundle.main.url(forResource:"yoursong", withExtension: "m4a")!
let f = try! AVAudioFile(forReading: url)
self.engine.attach(player)
// adding eq effect node
let effect = AVAudioUnitEQ(numberOfBands: 4)
let bands = effect.bands
let freq = [125, 250, 2350, 8000]
for i in 0...(bands.count - 1) {
bands[i].frequency = Float(freq[i])
}
bands[0].gain = 0.0
bands[0].filterType = .parametric
bands[0].bandwidth = 1
bands[1].gain = 0.0
bands[1].filterType = .parametric
bands[1].bandwidth = 0.5
// filter of interest, rejecting 2350Hz (adjust bandwith as needed)
bands[2].gain = -60.0
bands[2].filterType = .parametric
bands[2].bandwidth = 1
bands[3].gain = 0.0
bands[3].filterType = .parametric
bands[3].bandwidth = 1
self.engine.attach(effect)
self.engine.connect(player, to: effect, format: f.processingFormat)
let mixer = self.engine.mainMixerNode
self.engine.connect(effect, to: mixer, format: f.processingFormat)
player.scheduleFile(f, at: nil) {
delay(0.05) {
if self.engine.isRunning {
self.engine.stop()
}
}
}
self.engine.prepare()
try! self.engine.start()
player.play()
}

Tap audio output using AVAudioEngine

I'm trying install a tap on the output audio that is played on my app. I have no issue catching buffer from microphone input, but when it comes to catch sound that it goes trough the speaker or the earpiece or whatever the output device is, it does not succeed. Am I missing something?
In my example I'm trying to catch the audio buffer from an audio file that an AVPLayer is playing. But let's pretend I don't have access directly to the AVPlayer instance.
The goal is to perform Speech Recognition on an audio stream.
func catchAudioBuffers() throws {
let audioSession = AVAudioSession.sharedInstance()
try audioSession.setCategory(.playAndRecord, mode: .voiceChat, options: .allowBluetooth)
try audioSession.setActive(true)
let outputNode = audioEngine.outputNode
let recordingFormat = outputNode.outputFormat(forBus: 0)
outputNode.installTap(onBus: 0, bufferSize: 1024, format: recordingFormat) { (buffer: AVAudioPCMBuffer, when: AVAudioTime) in
// PROCESS AUDIO BUFFER
}
audioEngine.prepare()
try audioEngine.start()
// For example I am playing an audio conversation with an AVPlayer and a local file.
player.playSound()
}
This code results in a:
AVAEInternal.h:76 required condition is false: [AVAudioIONodeImpl.mm:1057:SetOutputFormat: (_isInput)]
*** Terminating app due to uncaught exception 'com.apple.coreaudio.avfaudio', reason: 'required condition is false: _isInput'
I was facing the same problem and during 2 days of brainstorming found the following.
Apple says that For AVAudioOutputNode, tap format must be specified as nil. I'm not sure that it's important but in my case, that finally worked, format was nil.
You need to start recording and don't forget to stop it.
Removing tap is really important, otherwise you will have file that you can't open.
Try to save the file with the same audio settings that you used in source file.
Here's my code that finally worked. It was partly taken from this question Saving Audio After Effect in iOS.
func playSound() {
let rate: Float? = effect.speed
let pitch: Float? = effect.pitch
let echo: Bool? = effect.echo
let reverb: Bool? = effect.reverb
// initialize audio engine components
audioEngine = AVAudioEngine()
// node for playing audio
audioPlayerNode = AVAudioPlayerNode()
audioEngine.attach(audioPlayerNode)
// node for adjusting rate/pitch
let changeRatePitchNode = AVAudioUnitTimePitch()
if let pitch = pitch {
changeRatePitchNode.pitch = pitch
}
if let rate = rate {
changeRatePitchNode.rate = rate
}
audioEngine.attach(changeRatePitchNode)
// node for echo
let echoNode = AVAudioUnitDistortion()
echoNode.loadFactoryPreset(.multiEcho1)
audioEngine.attach(echoNode)
// node for reverb
let reverbNode = AVAudioUnitReverb()
reverbNode.loadFactoryPreset(.cathedral)
reverbNode.wetDryMix = 50
audioEngine.attach(reverbNode)
// connect nodes
if echo == true && reverb == true {
connectAudioNodes(audioPlayerNode, changeRatePitchNode, echoNode, reverbNode, audioEngine.mainMixerNode, audioEngine.outputNode)
} else if echo == true {
connectAudioNodes(audioPlayerNode, changeRatePitchNode, echoNode, audioEngine.mainMixerNode, audioEngine.outputNode)
} else if reverb == true {
connectAudioNodes(audioPlayerNode, changeRatePitchNode, reverbNode, audioEngine.mainMixerNode, audioEngine.outputNode)
} else {
connectAudioNodes(audioPlayerNode, changeRatePitchNode, audioEngine.mainMixerNode, audioEngine.outputNode)
}
// schedule to play and start the engine!
audioPlayerNode.stop()
audioPlayerNode.scheduleFile(audioFile, at: nil) {
var delayInSeconds: Double = 0
if let lastRenderTime = self.audioPlayerNode.lastRenderTime, let playerTime = self.audioPlayerNode.playerTime(forNodeTime: lastRenderTime) {
if let rate = rate {
delayInSeconds = Double(self.audioFile.length - playerTime.sampleTime) / Double(self.audioFile.processingFormat.sampleRate) / Double(rate)
} else {
delayInSeconds = Double(self.audioFile.length - playerTime.sampleTime) / Double(self.audioFile.processingFormat.sampleRate)
}
}
// schedule a stop timer for when audio finishes playing
self.stopTimer = Timer(timeInterval: delayInSeconds, target: self, selector: #selector(EditViewController.stopAudio), userInfo: nil, repeats: false)
RunLoop.main.add(self.stopTimer!, forMode: RunLoop.Mode.default)
}
do {
try audioEngine.start()
} catch {
showAlert(Alerts.AudioEngineError, message: String(describing: error))
return
}
//Try to save
let dirPaths: String = (NSSearchPathForDirectoriesInDomains(.libraryDirectory, .userDomainMask, true)[0]) + "/sounds/"
let tmpFileUrl = URL(fileURLWithPath: dirPaths + "effected.caf")
//Save the tmpFileUrl into global varibale to not lose it (not important if you want to do something else)
filteredOutputURL = URL(fileURLWithPath: filePath)
do{
print(dirPaths)
let settings = [AVSampleRateKey : NSNumber(value: Float(44100.0)),
AVFormatIDKey : NSNumber(value: Int32(kAudioFormatMPEG4AAC)),
AVNumberOfChannelsKey : NSNumber(value: 1),
AVEncoderAudioQualityKey : NSNumber(value: Int32(AVAudioQuality.medium.rawValue))]
self.newAudio = try! AVAudioFile(forWriting: tmpFileUrl as URL, settings: settings)
let length = self.audioFile.length
audioEngine.mainMixerNode.installTap(onBus: 0, bufferSize: 4096, format: nil) {
(buffer: AVAudioPCMBuffer?, time: AVAudioTime!) -> Void in
//Let us know when to stop saving the file, otherwise saving infinitely
if (self.newAudio.length) <= length {
do{
try self.newAudio.write(from: buffer!)
} catch _{
print("Problem Writing Buffer")
}
} else {
//if we dont remove it, will keep on tapping infinitely
self.audioEngine.mainMixerNode.removeTap(onBus: 0)
}
}
}
// play the recording!
audioPlayerNode.play()
}
#objc func stopAudio() {
if let audioPlayerNode = audioPlayerNode {
let engine = audioEngine
audioPlayerNode.stop()
engine?.mainMixerNode.removeTap(onBus: 0)
}
if let stopTimer = stopTimer {
stopTimer.invalidate()
}
configureUI(.notPlaying)
if let audioEngine = audioEngine {
audioEngine.stop()
audioEngine.reset()
}
isPlaying = false
}

Give priority to background music ( itunes ) over avfoundation player node

I'm using AVFoundation framework. Whenever the player plays the buffer, my background music gets stopped so I used below code to allow it to continue playing irrespective of the AVFoundation player.
try audioSession.setCategory(AVAudioSessionCategoryPlayAndRecord, with: [.mixWithOthers,.allowBluetooth])
try audioSession.setMode(AVAudioSessionModeDefault)
try audioSession.setActive(true)
It does work but the problem is the quality of the background music gets dramatically affected. The music don't have the bass effects anymore whenever the AVPlayer plays the buffer.
I want the background music uninterrupted while using AVPlayer. Is it possible?
update : I added full code if anyone wants to check. Can feel the difference in background itune music as soon as the app is opened or the session is activated when using this code.
class ViewCosdfntroller: UIViewController {
var engine = AVAudioEngine()
let audioSession = AVAudioSession.sharedInstance()
let player = AVAudioPlayerNode()
let mixer = AVAudioMixerNode()
override func viewDidLoad() {
super.viewDidLoad()
do {
try audioSession.setCategory(AVAudioSessionCategoryPlayAndRecord, with: [.mixWithOthers,.allowBluetooth])
try audioSession.setMode(AVAudioSessionModeDefault)
try audioSession.setActive(true)
} catch {
}
let input = engine.inputNode
let bus = 0
let inputFormat = input.outputFormat(forBus: bus)
let recordingFormat = AVAudioFormat(commonFormat: .pcmFormatFloat32, sampleRate: 11025.0, channels: 1, interleaved: false)
engine.attach(player)
engine.attach(mixer)
engine.connect(input, to: mixer, format: input.outputFormat(forBus: 0))
engine.connect(player, to: engine.mainMixerNode, format: recordingFormat)
mixer.installTap(onBus: bus, bufferSize: AVAudioFrameCount(inputFormat.sampleRate * 0.4), format: inputFormat, block: { (buffer: AVAudioPCMBuffer!, time: AVAudioTime!) -> Void in
let Converter:AVAudioConverter = AVAudioConverter.init(from: inputFormat, to: recordingFormat!)!
let newbuffer = AVAudioPCMBuffer(pcmFormat: recordingFormat!,frameCapacity: AVAudioFrameCount((recordingFormat?.sampleRate)! * 0.4))
let inputBlock : AVAudioConverterInputBlock = { (inNumPackets, outStatus) -> AVAudioBuffer? in
outStatus.pointee = AVAudioConverterInputStatus.haveData
let audioBuffer : AVAudioBuffer = buffer
return audioBuffer
}
var error : NSError?
Converter.convert(to: newbuffer!, error: &error, withInputFrom: inputBlock)
self.player.scheduleBuffer(newbuffer!)
})
do {
try! engine.start()
player.play()
} catch {
print(error)
}
}
}
Unless this is some weird mixing quirk, the quality change you report may just be that recording categories change the default audio output device to the tiny, tinny receiver (because telephones, don't ask). Override this behaviour by adding .defaultToSpeaker to your setCategory() call:
try audioSession.setCategory(AVAudioSessionCategoryPlayAndRecord, with: [.mixWithOthers,.allowBluetooth, .defaultToSpeaker])
I think you need this one:
try audioSession.setCategory(AVAudioSessionCategoryAmbient)
Documentation:
https://developer.apple.com/documentation/avfoundation/avaudiosessioncategoryambient
When you use this category, audio from other apps mixes with your audio

Tap installed on audio engine only producing short files

I am working on an app that allows the user to record audio, play it back while changing the pitch then record what they have done as a separate file.
The code seems to be working but the new file has a duration of only 0.37 seconds (original 5 seconds).
I am guessing when I write from the buffer it keeps saving over itself, thus leaving me with just the last segment. If this is my issue, how do I append the file instead of writing over it?
let recordSettings:[String : AnyObject] = [
AVFormatIDKey: NSNumber(unsignedInt:kAudioFormatAppleLossless),
AVEncoderAudioQualityKey : AVAudioQuality.Max.rawValue,
AVEncoderBitRateKey : 320000,
AVNumberOfChannelsKey: 2,
AVSampleRateKey : 44100.0
]
var outputFile = AVAudioFile()
let format = NSDateFormatter()
format.dateFormat="dd-HH-mm-ss"
let currentFileName = "recording-\(format.stringFromDate(NSDate())).m4a"
print(currentFileName)
let documentsDirectory = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)[0]
self.url2 = documentsDirectory.URLByAppendingPathComponent(currentFileName)
let inputNode = engine.inputNode
let bus = 0
engine.mainMixerNode.installTapOnBus(bus, bufferSize: 2048, format: self.engine.mainMixerNode.inputFormatForBus(0)) {
(buffer: AVAudioPCMBuffer!, time: AVAudioTime!) -> Void in
do {
let outputFile = try AVAudioFile(forWriting: self.url2, settings: recordSettings, commonFormat: AVAudioCommonFormat.PCMFormatFloat32, interleaved: false)
try outputFile.writeFromBuffer(buffer)
outputFile.framePosition = outputFile.length
} catch let error as NSError {
NSLog("Error writing %#", error.localizedDescription)
}
}
Updated code creating a file with a duration of 0.0:
func play() {
let duration = CMTimeGetSeconds(AVAsset(URL: url).duration)
print("Duration")
print(duration)
let file = try! AVAudioFile(forReading: url)
let buffer = AVAudioPCMBuffer(PCMFormat: file.processingFormat, frameCapacity: AVAudioFrameCount(file.length))
do {
try file.readIntoBuffer(buffer)
} catch _ {
}
engine = AVAudioEngine()
player = AVAudioPlayerNode()
pitch.pitch = 500
engine.attachNode(player)
engine.attachNode(pitch)
engine.connect(player, to: pitch, format: buffer.format)
engine.connect(pitch, to: engine.mainMixerNode, format: nil)
let format = NSDateFormatter()
format.dateFormat="dd-HH-mm-ss"
let currentFileName = "recording-\(format.stringFromDate(NSDate())).m4a"
print(currentFileName)
let documentsDirectory = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)[0]
self.url2 = documentsDirectory.URLByAppendingPathComponent(currentFileName)
let outputFile = try! AVAudioFile(forWriting: url2, settings: [
AVFormatIDKey: NSNumber(unsignedInt:kAudioFormatAppleLossless),
AVEncoderAudioQualityKey : AVAudioQuality.Max.rawValue,
AVEncoderBitRateKey : 320000,
AVNumberOfChannelsKey: 2,
AVSampleRateKey : 44100.0
])
done = false
distortion.installTapOnBus(0, bufferSize: 2048, format: outputFile.processingFormat) {
(buffer: AVAudioPCMBuffer!, time: AVAudioTime!) in
let dataptrptr = buffer.floatChannelData
let dataptr = dataptrptr.memory
let datum = dataptr[Int(buffer.frameLength) - 1]
if self.done && fabs(datum) < 0.000001 {
print("stopping")
self.engine.stop()
return
}
do {
try outputFile.writeFromBuffer(buffer)
} catch let error as NSError {
NSLog("Error writing %#", error.localizedDescription)
}
}
player.scheduleBuffer(buffer, atTime: nil, options: AVAudioPlayerNodeBufferOptions.Loops, completionHandler: {
dispatch_async(dispatch_get_main_queue(),{
self.done = true
self.player.stop()
self.engine.stop()
print("complete")
})
})
engine.prepare()
do {
try engine.start()
player.play()
} catch _ {
print("Play session Error")
}
}
Keep in mind that the installTapOnBus handler will be called many times: every time the buffer fills up. Think of it as a loop. Thus it makes no sense to create the output file each time through that loop! You want to create the output file once and then write to it repeatedly. Thus, your overall structure needs to look like this:
let outfile = try! AVAudioFile(forWriting: outurl, settings: // ...
node.installTapOnBus(bus, bufferSize: size, format: outfile.processingFormat) {
(buffer : AVAudioPCMBuffer!, time : AVAudioTime!) in
do {
try outfile.writeFromBuffer(buffer)
} catch {
print(error)
}
}
The other thing to remember is that your buffer will fill-and-write exactly so long as the engine keeps running, so don't stop the engine prematurely (I don't know whether you're doing that, but it's important to keep in mind).

Play audio from AVAudioPCMBuffer with AVAudioEngine

I have two classes, MicrophoneHandler, and AudioPlayer. I have managed to use AVCaptureSession to tap microphone data using the approved answer here, and and converted the CMSampleBuffer to NSData using this function:
func sendDataToDelegate(buffer: CMSampleBuffer!)
{
let block = CMSampleBufferGetDataBuffer(buffer)
var length = 0
var data: UnsafeMutablePointer<Int8> = nil
var status = CMBlockBufferGetDataPointer(block!, 0, nil, &length, &data) // TODO: check for errors
let result = NSData(bytesNoCopy: data, length: length, freeWhenDone: false)
self.delegate.handleBuffer(result)
}
I would now like to play the audio over the speaker by converting the NSData produced above to AVAudioPCMBuffer and play it using AVAudioEngine. My AudioPlayerclass is as follows:
var engine: AVAudioEngine!
var playerNode: AVAudioPlayerNode!
var mixer: AVAudioMixerNode!
override init()
{
super.init()
self.setup()
self.start()
}
func handleBuffer(data: NSData)
{
let newBuffer = self.toPCMBuffer(data)
print(newBuffer)
self.playerNode.scheduleBuffer(newBuffer, completionHandler: nil)
}
func setup()
{
self.engine = AVAudioEngine()
self.playerNode = AVAudioPlayerNode()
self.engine.attachNode(self.playerNode)
self.mixer = engine.mainMixerNode
engine.connect(self.playerNode, to: self.mixer, format: self.mixer.outputFormatForBus(0))
}
func start()
{
do {
try self.engine.start()
}
catch {
print("error couldn't start engine")
}
self.playerNode.play()
}
func toPCMBuffer(data: NSData) -> AVAudioPCMBuffer
{
let audioFormat = AVAudioFormat(commonFormat: AVAudioCommonFormat.PCMFormatFloat32, sampleRate: 8000, channels: 2, interleaved: false) // given NSData audio format
let PCMBuffer = AVAudioPCMBuffer(PCMFormat: audioFormat, frameCapacity: UInt32(data.length) / audioFormat.streamDescription.memory.mBytesPerFrame)
PCMBuffer.frameLength = PCMBuffer.frameCapacity
let channels = UnsafeBufferPointer(start: PCMBuffer.floatChannelData, count: Int(PCMBuffer.format.channelCount))
data.getBytes(UnsafeMutablePointer<Void>(channels[0]) , length: data.length)
return PCMBuffer
}
The buffer reaches the handleBuffer:buffer function when self.delegate.handleBuffer(result) is called in the first snippet above.
I am able to print(newBuffer), and see the memory locations of the converted buffers, but nothing comes out of the speakers. I can only imagine something is not consistent between the conversions to and from NSData. Any ideas? Thanks in advance.
Skip the raw NSData format
Why not use AVAudioPlayer all the way? If you positively need NSData, you can always load such data from the soundURL below. In this example, the disk buffer is something like:
let soundURL = documentDirectory.URLByAppendingPathComponent("sound.m4a")
It makes sense to record directly to a file anyway for optimal memory and resource management. You get NSData from your recording this way:
let data = NSFileManager.defaultManager().contentsAtPath(soundURL.path())
The code below is all you need:
Record
if !audioRecorder.recording {
let audioSession = AVAudioSession.sharedInstance()
do {
try audioSession.setActive(true)
audioRecorder.record()
} catch {}
}
Play
if (!audioRecorder.recording){
do {
try audioPlayer = AVAudioPlayer(contentsOfURL: audioRecorder.url)
audioPlayer.play()
} catch {}
}
Setup
let audioSession = AVAudioSession.sharedInstance()
do {
try audioSession.setCategory(AVAudioSessionCategoryPlayAndRecord)
try audioRecorder = AVAudioRecorder(URL: self.directoryURL()!,
settings: recordSettings)
audioRecorder.prepareToRecord()
} catch {}
Settings
let recordSettings = [AVSampleRateKey : NSNumber(float: Float(44100.0)),
AVFormatIDKey : NSNumber(int: Int32(kAudioFormatMPEG4AAC)),
AVNumberOfChannelsKey : NSNumber(int: 1),
AVEncoderAudioQualityKey : NSNumber(int: Int32(AVAudioQuality.Medium.rawValue))]
Download Xcode Project:
You can find this very example here. Download the full project, which records and plays on both simulator and device, from Swift Recipes.

Resources