I have following code
class Audio {
let mic: AKMicrophone!
let ampTracker: AKAmplitudeTracker!
init() {
mic = AKMicrophone()
ampTracker = AKAmplitudeTracker(mic)
AudioKit.output = mic
try? AudioKit.start()
}
let audio = Audio()
#IBAction func start(_ sender: UIButton) {
if audio.mic.isStopped == true {
audio.mic.start()
audio.ampTracker.start()
for i in 0...50 {
print(audio.ampTracker.amplitude)
usleep(200000)
}
} else {
audio.mic.stop()
}
So, Microphone is working ok - I hear the signal in headphones, but AKAmplitudeTracker always print value 0.0.
I also tried to use AKMicrophoneTracker - the same result
it is on Iphone Simulator in xCode
#distapcher,
In order to modify the mic and ampTracker parameters, their instance variables should be changed to vars, instead of lets:
var mic: AKMicrophone!
var ampTracker: AKAmplitudeTracker!
Related
I am developing an iOS 14 app that plays fragments of an audio file for the user to imitate. If the user wants to, the app can record the user's responses and play these back immediately. The user can also export an audio recording that comprises the original fragments, plus the user's responses.
I am using AudioKit 4.11
Because it is possible the user may never wish to take advantage of the app's recording abilities, the app initially adopts the audio session category of .playback. If the user wants to use the recording feature, the app triggers the standard Apple dialog for requesting microphone access, and if this is granted, switches the session category to .playAndRecord.
I have found that when the session category is .playback and the user has not yet granted microphone permission, I am able to listen to the app's output on a Bluetooth speaker, or on my Jabra Elite 65t Bluetooth earbuds when the app is running on a real iPhone. In the example below, this is the case when the app first runs and the user has only ever tapped "Play sound" or "Stop".
However, as soon as I tap "Play sound and record response" and grant microphone access, I am unable to listen to the app's output on a Bluetooth device, regardless of whether the session category applicable at the time is .playback (after tapping "Play sound and record response") or .playAndRecord (after tapping "Play sound") - unless I subsequently go to my phone's Privacy settings and toggle microphone access to off. Playback is available only through the phone's speaker, or through plugged in headphones.
When setting the session category of .playAndRecord I have tried invoking the .allowBluetoothA2DP option.
Apple's advice implies this should allow me to listen to my app's sound over Bluetooth in the circumstances I have described above (see https://developer.apple.com/documentation/avfoundation/avaudiosession/categoryoptions/1771735-allowbluetootha2dp). However I've not found this to be the case.
The code below represents a runnable app (albeit one requiring the presence of AudioKit 4.11) that illustrates the problem in a simplified form. The only elements not shown here are an NSMicrophoneUsageDescription that I added to info.plist, and the file "blues.mp3" which I imported into the project.
ContentView:
import SwiftUI
import AudioKit
import AVFoundation
struct ContentView: View {
private var pr = PlayerRecorder()
var body: some View {
VStack{
Text("Play sound").onTapGesture{
pr.setupforPlay()
pr.playSound()
}
.padding()
Text("Play sound and record response").onTapGesture{
if recordingIsAllowed() {
pr.activatePlayAndRecord()
pr.startSoundAndResponseRecording()
}
}
.padding()
Text("Stop").onTapGesture{
pr.stop()
}
.padding()
}
}
func recordingIsAllowed() -> Bool {
var retval = false
AVAudioSession.sharedInstance().requestRecordPermission { granted in
retval = granted
}
return retval
}
}
PlayerRecorder:
import Foundation
import AudioKit
class PlayerRecorder {
private var mic: AKMicrophone!
private var micBooster: AKBooster!
private var mixer: AKMixer!
private var outputBooster: AKBooster!
private var player: AKPlayer!
private var playerBooster: AKBooster!
private var recorder: AKNodeRecorder!
private var soundFile: AKAudioFile!
private var twentySecondTimer = Timer()
init() {
AKSettings.defaultToSpeaker = true
AKSettings.disableAudioSessionDeactivationOnStop = true
AKSettings.notificationsEnabled = true
}
func activatePlayAndRecord() {
do {
try AKManager.shutdown()
} catch {
print("Shutdown failed")
}
setupForPlayAndRecord()
}
func playSound() {
do {
soundFile = try AKAudioFile(readFileName: "blues.mp3")
} catch {
print("Failed to open sound file")
}
do {
try player.load(audioFile: soundFile!)
} catch {
print("Player failed to load sound file")
}
if micBooster != nil{
micBooster.gain = 0.0
}
player.play()
}
func setupforPlay() {
do {
try AKSettings.setSession(category: .playback)
} catch {
print("Failed to set session category to .playback")
}
mixer = AKMixer()
outputBooster = AKBooster(mixer)
player = AKPlayer()
playerBooster = AKBooster(player)
playerBooster >>> mixer
AKManager.output = outputBooster
if !AKManager.engine.isRunning {
try? AKManager.start()
}
}
func setupForPlayAndRecord() {
AKSettings.audioInputEnabled = true
do {
try AKSettings.setSession(category: .playAndRecord)
/* Have tried the following instead of the line above, but without success
let options: AVAudioSession.CategoryOptions = [.allowBluetoothA2DP]
try AKSettings.setSession(category: .playAndRecord, options: options.rawValue)
Have also tried:
try AKSettings.setSession(category: .multiRoute)
*/
} catch {
print("Failed to set session category to .playAndRecord")
}
mic = AKMicrophone()
micBooster = AKBooster(mic)
mixer = AKMixer()
outputBooster = AKBooster(mixer)
player = AKPlayer()
playerBooster = AKBooster(player)
mic >>> micBooster
micBooster >>> mixer
playerBooster >>> mixer
AKManager.output = outputBooster
micBooster.gain = 0.0
outputBooster.gain = 1.0
if !AKManager.engine.isRunning {
try? AKManager.start()
}
}
func startSoundAndResponseRecording() {
// Start player and recorder. After 20 seconds, call a function that stops the player
// (while allowing recording to continue until user taps Stop button).
activatePlayAndRecord()
playSound()
// Force removal of any tap not previously removed with stop() call for recorder
var mixerNode: AKNode?
mixerNode = mixer
for i in 0..<8 {
mixerNode?.avAudioUnitOrNode.removeTap(onBus: i)
}
do {
recorder = try? AKNodeRecorder(node: mixer)
try recorder.record()
} catch {
print("Failed to start recorder")
}
twentySecondTimer = Timer.scheduledTimer(timeInterval: 20.0, target: self, selector: #selector(stopPlayerOnly), userInfo: nil, repeats: false)
}
func stop(){
twentySecondTimer.invalidate()
if player.isPlaying {
player.stop()
}
if recorder != nil {
if recorder.isRecording {
recorder.stop()
}
}
if AKManager.engine.isRunning {
do {
try AKManager.stop()
} catch {
print("Error occurred while stopping engine.")
}
}
print("Stopped")
}
#objc func stopPlayerOnly () {
player.stop()
if !mic.isStarted {
mic.start()
}
if !micBooster.isStarted {
micBooster.start()
}
mic.volume = 1.0
micBooster.gain = 1.0
outputBooster.gain = 0.0
}
}
Three additional lines of code near the beginning of setupForPlayAndRecord() solve the problem:
func setupForPlayAndRecord() {
AKSettings.audioInputEnabled = true
// Adding the following three lines solves the problem
AKSettings.useBluetooth = true
let categoryOptions: AVAudioSession.CategoryOptions = [.allowBluetoothA2DP]
AKSettings.bluetoothOptions = categoryOptions
do {
try AKSettings.setSession(category: .playAndRecord)
} catch {
print("Failed to set session category to .playAndRecord")
}
mic = AKMicrophone()
micBooster = AKBooster(mic)
mixer = AKMixer()
outputBooster = AKBooster(mixer)
player = AKPlayer()
playerBooster = AKBooster(player)
mic >>> micBooster
micBooster >>> mixer
playerBooster >>> mixer
AKManager.output = outputBooster
micBooster.gain = 0.0
outputBooster.gain = 1.0
if !AKManager.engine.isRunning {
try? AKManager.start()
}
}
The environment for this is iOS 13.6 and Swift 5. I have a very simple app that successfully plays an MP3 file in the foreground or background. I added MPRemoteCommandCenter play and pause command handlers to it. I play the sound file in the foreground and then pause it.
When I tap the play button from the lock screen, my code calls audioPlayer.play(), which returns true. I hear the sound start playing again, but the currentTime of the player does not advance. After that, the play and pause buttons on the lock screen do nothing. When I foreground the app again, the play button plays from where it was before I went to the lock screen.
Here is my AudioPlayer class:
import AVFoundation
import MediaPlayer
class AudioPlayer: RemoteAudioCommandDelegate {
var audioPlayer = AVAudioPlayer()
let remoteCommandHandler = RemoteAudioCommandHandler()
var timer:Timer!
func play(title: String) {
let path = Bundle.main.path(forResource: title, ofType: "mp3")!
let url = URL(fileURLWithPath: path)
do {
try AVAudioSession.sharedInstance().setCategory(AVAudioSession.Category.playback)
try AVAudioSession.sharedInstance().setActive(true)
audioPlayer = try AVAudioPlayer(contentsOf: url)
remoteCommandHandler.delegate = self
remoteCommandHandler.enableDisableRemoteCommands(true)
timer = Timer.scheduledTimer(timeInterval: 1.0, target: self, selector: #selector(updateNowPlayingInfo), userInfo: nil, repeats: true)
} catch let error as NSError {
print("error = \(error)")
}
}
func play() {
print ("audioPlayer.play() returned \(audioPlayer.play())")
}
func pause() {
audioPlayer.pause()
}
func stop() {
audioPlayer.stop()
}
func currentTime() -> TimeInterval {
return audioPlayer.currentTime
}
func setCurrentTime(_ time:TimeInterval) {
audioPlayer.currentTime = time
}
#objc func updateNowPlayingInfo() {
// Hard-code the nowPlayingInfo since this is a simple test app
var nowPlayingDict =
[MPMediaItemPropertyTitle: "Tin Man",
MPMediaItemPropertyAlbumTitle: "The Complete Greatest Hits",
MPMediaItemPropertyAlbumTrackNumber: NSNumber(value: UInt(10) as UInt),
MPMediaItemPropertyArtist: "America",
MPMediaItemPropertyPlaybackDuration: 208,
MPNowPlayingInfoPropertyPlaybackRate: NSNumber(value: 1.0 as Float)] as [String : Any]
nowPlayingDict[MPNowPlayingInfoPropertyElapsedPlaybackTime] = NSNumber(value: audioPlayer.currentTime as Double)
MPNowPlayingInfoCenter.default().nowPlayingInfo = nowPlayingDict
}
}
Here is my RemoteCommandHandler class:
import Foundation
import MediaPlayer
protocol RemoteAudioCommandDelegate: class {
func play()
func pause()
}
class RemoteAudioCommandHandler: NSObject {
weak var delegate: RemoteAudioCommandDelegate?
var remoteCommandCenter = MPRemoteCommandCenter.shared()
var playTarget: Any? = nil
var pauseTarget: Any? = nil
func enableDisableRemoteCommands(_ enabled: Bool) {
print("Called with enabled = \(enabled)")
remoteCommandCenter.playCommand.isEnabled = enabled
remoteCommandCenter.pauseCommand.isEnabled = enabled
if enabled {
addRemoteCommandHandlers()
} else {
removeRemoteCommandHandlers()
}
}
fileprivate func addRemoteCommandHandlers() {
print( "Entered")
if playTarget == nil {
print( "Adding playTarget")
playTarget = remoteCommandCenter.playCommand.addTarget { (event) -> MPRemoteCommandHandlerStatus in
print("addRemoteCommandHandlers calling delegate play")
self.delegate?.play()
return .success
}
}
if pauseTarget == nil {
print( "Adding pauseTarget")
pauseTarget = remoteCommandCenter.pauseCommand.addTarget { (event) -> MPRemoteCommandHandlerStatus in
print("addRemoteCommandHandlers calling delegate pause")
self.delegate?.pause()
return .success
}
}
}
fileprivate func removeRemoteCommandHandlers() {
print( "Entered")
if playTarget != nil {
print( "Removing playTarget")
remoteCommandCenter.playCommand.removeTarget(playTarget)
playTarget = nil
}
if pauseTarget != nil {
print( "Removing pauseTarget")
remoteCommandCenter.pauseCommand.removeTarget(pauseTarget)
pauseTarget = nil
}
}
}
I will gladly supply further required info, because I'm baffled at why this relatively straightforward code (in my mind) code doesn't work.
Assistance is much appreciated!
After some more debugging, I found that the AVAudioPlayer started to play the sound from the lock screen, but stopped again shortly after.
I mitigated the problem by adding a Timer. The timer checks if the last command by the user was play, but the sound is not playing. I also change the status when the user selects pause or the song stops playing at its natural end.
I am still at a loss for an actual fix for this problem.
I'm currently making a metronome app, it uses AKMetronome for the core part since it has the callback function, I need the callback function to visualize the beat, I also want to use it for customized click sound (and I'll mute the original AKMetronome sound, but it'll still play, just without the original sound).
Now, I have two wav files which are customized click sound, there will be two AKPlayers to play it, and the AKPlayers need to be triggered on every AKMetronome beat's callback.
Since AKPlayer and AKMetronome are both need to be played, I put them in an AKMixer, like this:
let mixer = AKMixer(playerA, playerB, metronome)
AudioKit.output = mixer
and the callback will call this func:
func playAudio() {
playerA.play()
playerB.play()
}
Then, when playerA.play() is executed, it'll crash.
This is the error message:
AURemoteIO::IOThread (11): EXC_BAD_ACCESS (code=1, address=0xffff9ffffdb0e360)
same error message in screenshot
If I only put one of the AKPlayer object or AKMetronome in the AKMixer, then it works fine.
I can't understand the error message, also don't know why this happens.
Any help would be appreciated.
Here is the full code:
var playerA: AKPlayer!
var playerB: AKPlayer!
var clickA: AKAudioFile?
var clickB: AKAudioFile?
var metronome: AKMetronome?
override func viewDidLoad() {
super.viewDidLoad()
prepareAudio()
metronome!.start()
}
func prepareAudio() {
clickA = try? AKAudioFile(readFileName: "Click1A.wav")
clickB = try? AKAudioFile(readFileName: "Click1B.wav")
playerA = AKPlayer(audioFile: clickA!)
playerA.buffering = .always
playerB = AKPlayer(audioFile: clickB!)
playerB.buffering = .always
//metronome
metronome = AKMetronome.init()
metronome!.subdivision = 4
metronome!.frequency1 = 1000
metronome!.frequency2 = 800
metronome!.tempo = 60
metronome!.callback = {
self.playAudio()
}
let mixer = AKMixer(playerA, playerB, metronome)
AudioKit.output = mixer
do {
try AudioKit.start()
} catch {
print("audiokit start fail!")
}
}
func playAudio() {
playerA.play()
playerB.play()
}
Early in my learning with AudioKit, and scaling in a larger app, I took the standard advice that AudioKit should be effectively be a global singleton. I managed to build a really sophisticated prototype and all was well in the world.
Once I started to scale up and get closer to an actual release. We decided to go MVVM for our architecture and try to not have a monstrous large AudioKit Singelton to handle every aspect of our audio needs in the app. In short, MVVM has been so incredibly elegant and has demonstrably cleaned up our code base.
In direct relation to our structure of AudioKit, it goes something like this:
AudioKit and AKMixer reside in a Singelton instance, and have public functions that allow the various viewmodels and our other Audio models to attach and detach the various nodes (AKPlayer, AKSampler, etc...). In the minimal testing I have done, I can confirm that this works as I tried it with my AKPlayer module and it works great.
I'm running into an issue where I cannot, for the life of me, get AKNodeOutputPlot and AKMicrophone to work with each other, despite the actual code implementation being identical to my working prototypes.
My concern is did I do the wrong thing thinking I could modularize AudioKit and the various nodes and components that need to connect to it, or does AKNodeOutputPlot have special requirements I am not aware of.
Here is the briefest snippets of Code I can provide without overwhelming the question:
AudioKit Singelton (called in AppDelegate):
import Foundation
import AudioKit
class AudioKitConfigurator
{
static let shared: AudioKitConfigurator = AudioKitConfigurator()
private let mainMixer: AKMixer = AKMixer()
private init()
{
makeMainMixer()
configureAudioKitSettings()
startAudioEngine()
}
deinit
{
stopAudioEngine()
}
private func makeMainMixer()
{
AudioKit.output = mainMixer
}
func mainMixer(add node: AKNode)
{
mainMixer.connect(input: node)
}
func mainMixer(remove node: AKNode)
{
node.detach()
}
private func configureAudioKitSettings()
{
AKAudioFile.cleanTempDirectory()
AKSettings.defaultToSpeaker = true
AKSettings.playbackWhileMuted = true
AKSettings.bufferLength = .medium
do
{
try AKSettings.setSession(category: .playAndRecord, with: .allowBluetoothA2DP)
}
catch
{
AKLog("Could not set session category.")
}
}
private func startAudioEngine()
{
do
{
try AudioKit.start()
}
catch
{
AKLog("Fatal Error: AudioKit did not start!")
}
}
private func stopAudioEngine()
{
do
{
try AudioKit.stop()
}
catch
{
AKLog("Fatal Error: AudioKit did not stop!")
}
}
}
Microphone Component:
import Foundation
import AudioKit
import AudioKitUI
enum MicErrorsToThrow: String, Error
{
case recordingTooShort = "The recording was too short, just silently failing"
case audioFileFailedToUnwrap = "The Audio File failed to Unwrap from the recorder"
case recorderError = "The Recorder was unable to start recording."
case recorderCantReset = "In attempt to reset the recorder, it was unable to"
}
class Microphone
{
private var mic: AKMicrophone = AKMicrophone()
private var micMixer: AKMixer = AKMixer()
private var micBooster: AKBooster = AKBooster()
private var recorder: AKNodeRecorder!
private var recordingTimer: Timer
init()
{
micMixer = AKMixer(mic)
micBooster = AKBooster(micMixer)
micBooster.gain = 0
recorder = try? AKNodeRecorder(node: micMixer)
//TODO: Need to finish the recording timer implementation, leaving blank for now
recordingTimer = Timer(timeInterval: 120, repeats: false, block: { (timer) in
})
AudioKitConfigurator.shared.mainMixer(add: micBooster)
}
deinit {
// removeComponent()
}
public func removeComponent()
{
AudioKitConfigurator.shared.mainMixer(remove: micBooster)
}
public func reset() throws
{
if recorder.isRecording
{
recorder.stop()
}
do
{
try recorder.reset()
}
catch
{
AKLog("Recorder can't reset!")
throw MicErrorsToThrow.recorderCantReset
}
}
public func setHeadphoneMonitoring()
{
// microphone will be monitored while recording
// only if headphones are plugged
if AKSettings.headPhonesPlugged {
micBooster.gain = 1
}
}
/// Start recording from mic, call this function when using in conjunction with a AKNodeOutputPlot so that it can display the waveform in realtime while recording
///
/// - Parameter waveformPlot: AKNodeOutputPlot view object which displays waveform from recording
/// - Throws: Only error to throw is from recorder property can't start recording, something wrong with microphone. Enum is MicErrorsToThrow.recorderError
public func record(waveformPlot: AKNodeOutputPlot) throws
{
waveformPlot.node = mic
do
{
try recorder.record()
// self.recordingTimer.fire()
}
catch
{
print("Error recording!")
throw MicErrorsToThrow.recorderError
}
}
/// Stop the recorder, and get the recording as an AKAudioFile, necessary to call if you are using AKNodeOutputPlot
///
/// - Parameter waveformPlot: AKNodeOutputPlot view object which displays waveform from recording
/// - Returns: AKAudioFile
/// - Throws: Two possible errors, recording was too short (right now is 0.0, but should probably be like 0.5 secs), or could not retrieve audio file from recorder, MicErrorsToThrow.audioFileFailedToUnwrap, MicErrorsToThrow.recordingTooShort
public func stopRecording(waveformPlot: AKNodeOutputPlot) throws -> AKAudioFile
{
waveformPlot.pause()
waveformPlot.node = nil
recordingTimer.invalidate()
if let tape = recorder.audioFile
{
if tape.duration > 0.0
{
recorder.stop()
AKLog("Printing tape: CountOfFloatChannelData:\(tape.floatChannelData?.first?.count) | maxLevel:\(tape.maxLevel)")
return tape
}
else
{
//TODO: This should be more gentle than an NSError, it's just that they managed to tap the buttona and tap again to record nothing, honestly duration should probbaly be like 0.5, or 1.0 even. But let's return some sort of "safe" error that doesn't require UI
throw MicErrorsToThrow.recordingTooShort
}
}
else
{
//TODO: need to return error here, could not recover audioFile from recorder
AKLog("Can't retrieve or unwrap audioFile from recorder!")
throw MicErrorsToThrow.audioFileFailedToUnwrap
}
}
}
Now, in my VC, the AKNodeOutputPlot is a view on Storybard and hooked up via IBOutlet. It renders on screen, it's stylized per my liking and it's definitely connected and working. Also in the VC/VM is an instance property of my Microphone component. My thinking was that upon recording, we would pass the nodeOutput object to the ViewModel, which then would call the record(waveformPlot: AKNodeOutputPlot) function of Microphone, which then would waveformPlot.node = mic be sufficient to hook them up. Sadly this is not the case.
View:
class ComposerVC: UIViewController, Storyboarded
{
var coordinator: MainCoordinator?
let viewModel: ComposerViewModel = ComposerViewModel()
#IBOutlet weak var recordButton: RecordButton!
#IBOutlet weak var waveformPlot: AKNodeOutputPlot! // Here is our waveformPlot object, again confirmed rendering and styled
// MARK:- VC Lifecycle Methods
override func viewDidLoad()
{
super.viewDidLoad()
setupNavigationBar()
setupConductorButton()
setupRecordButton()
}
func setupWaveformPlot() {
waveformPlot.plotType = .rolling
waveformPlot.gain = 1.0
waveformPlot.shouldFill = true
}
override func viewDidAppear(_ animated: Bool)
{
super.viewDidAppear(animated)
setupWaveformPlot()
self.didDismissComposerDetailToRootController()
}
// Upon touching the Record Button, it in turn will talk to ViewModel which will then call Microphone module to record and hookup waveformPlot.node = mic
#IBAction func tappedRecordView(_ sender: Any)
{
self.recordButton.recording.toggle()
self.recordButton.animateToggle()
self.viewModel.tappedRecord(waveformPlot: waveformPlot)
{ (waveformViewModel, error) in
if let waveformViewModel = waveformViewModel
{
self.segueToEditWaveForm()
self.performSegue(withIdentifier: "composerToEditWaveForm", sender: waveformViewModel)
//self.performSegue(withIdentifier: "composerToDetailSegue", sender: self)
}
}
}
ViewModel:
import Foundation
import AudioKit
import AudioKitUI
class ComposerViewModel: ViewModelProtocol
{
//MARK:- Instance Variables
var recordingState: RecordingState
var mic: Microphone = Microphone()
init()
{
self.recordingState = .readyToRecord
}
func resetViewModel()
{
self.resetRecorder()
}
func resetRecorder()
{
do
{
try mic.reset()
}
catch let error as MicErrorsToThrow
{
switch error {
case .audioFileFailedToUnwrap:
print(error)
case .recorderCantReset:
print(error)
case .recorderError:
print(error)
case .recordingTooShort:
print(error)
}
}
catch {
print("Secondary catch in start recording?!")
}
recordingState = .readyToRecord
}
func tappedRecord(waveformPlot: AKNodeOutputPlot, completion: ((EditWaveFormViewModel?, Error?) -> ())? = nil)
{
switch recordingState
{
case .readyToRecord:
self.startRecording(waveformPlot: waveformPlot)
case .recording:
self.stopRecording(waveformPlot: waveformPlot, completion: completion)
case .finishedRecording: break
}
}
func startRecording(waveformPlot: AKNodeOutputPlot)
{
recordingState = .recording
mic.setHeadphoneMonitoring()
do
{
try mic.record(waveformPlot: waveformPlot)
}
catch let error as MicErrorsToThrow
{
switch error {
case .audioFileFailedToUnwrap:
print(error)
case .recorderCantReset:
print(error)
case .recorderError:
print(error)
case .recordingTooShort:
print(error)
}
}
catch {
print("Secondary catch in start recording?!")
}
}
I'm happy to provide more code but I just don't want to overwhelm anyway with their time. The logic seems sound, I just feel I'm missing something obvious and or a complete misunderstanding of AudioKit + AKNodeOutputPlot + AKMicrohone.
Any ideas are so welcome, thank you!
EDIT
AudioKit 4.6 fixed all the issues! Highly encourage MVVM/Modularization of AudioKit for your projects!
====
So after alot of experiments. I have come to a few conclusions:
In a separate project, I brought over my AudioKitConfigurator and Microphone classes, initialized them, hooked them to a AKNodeOutputPlot and it worked flawlessly.
In my very large project, no matter what I do, I cannot get the same classes to work at all.
For now, I am reverting back to an old build, slowly adding components until it breaks again, and will update the architecture one by one, as this problem is too complex and might be interacting with some other libraries. I have also downgraded from AudioKit 4.5.6, to AudioKit 4.5.3.
This is not a solution, but the only one that is workable right now. The good news is, it is entirely possible to format AudioKit to work with an MVVM architecture.
I have a system which uses various classes from AudioKit to record the microphone input and save it to file, with a maximum duration of 30s, and simultaneously during recording the output waveform is plotted onto a waveform plot using EZAudioPlot.
My problem is that I am using a Snapchat-style recording button that begins recording/plotting on a touch down event and stops recording/plotting on a touch up (inside & outside) event - this means that when a user keeps holding the record button longer than the maximum duration (as they sometimes will do), the waveform of the microphone output keeps being plotted despite the recorder being finished. I am asking whether there is a way in Swift/AudioKit to constantly 'listen' for the recorder to stop recording, akin to something like
while true
{
if recorder.isRecording() == false
{
plot.pause()
}
}
after the button has been pressed but before it has been released? If there were some way to listen indefinitely for the recorder to have finished recording between the button being pressed and the button being released, this would very easily solve my problem. Does such a functionality exist in some form in Swift or AudioKit?
My code is as follows (irrelevant code omitted):
import UIKit
import AudioKit
import AudioKitUI
// Define maximum recording time in seconds
let maxRecordingTime = 30.0
class ViewController: UIViewController
{
var microphone : AKMicrophone!
var mixer : AKMixer!
var waveformBooster: AKBooster!
var outputBooster : AKBooster!
var exportTape : AKAudioFile!
var recorder : AKNodeRecorder!
var player : AKClipPlayer!
var circleView : CircleView!
var plot : AKNodeOutputPlot!
#IBOutlet var startRecordingButton: CircularButton!
#IBOutlet var playRecordingButton: UIButton!
#IBOutlet var waveformPlot: EZAudioPlot!
override func viewDidLoad()
{
super.viewDidLoad()
microphone = AKMicrophone()
mixer = AKMixer(microphone)
// Initialise booster to set monitoring level to zero - this ensures that
// microphone output is recorded but not sent to main audio output
outputBooster = AKBooster(mixer)
outputBooster.gain = 0
// Initialise booster to set waveform display gain so that waveform can be set to display
// only when the app is recording
waveformBooster = AKBooster(microphone)
waveformBooster.gain = 0
AudioKit.output = outputBooster
try!AudioKit.start()
// Initialise file to store recorder output and set recorder to route mixer
// output to file
exportTape = try! AKAudioFile(name: "ExportTape")
recorder = try! AKNodeRecorder(node: mixer, file: exportTape)
recorder.durationToRecord = maxRecordingTime
plot = AKNodeOutputPlot(waveformBooster, frame: waveformPlot.bounds)
setupWaveformPlot()
}
#IBAction func startRecording(_ sender: UIButton)
{
// Delete contents of output file so it can be rewritten
try! recorder.reset()
// Perform various tasks related to getting the plot ready
// to be rewritten to in the event of several recordings being made
updateWaveformPlot()
// Start the microphone and
microphone.start()
waveformBooster.gain = 1
animateRecordingButton()
do
{
try recorder?.record()
} catch
{
AKLog("Couldn't record")
}
}
#IBAction func stopRecording(_ sender: UIButton)
{
microphone.stop()
waveformBooster.gain = 0
recorder.stop()
plot.pause()
}
#IBAction func playRecording(_ sender: UIButton)
{
let player = try! AKAudioPlayer(file: exportTape)
if player.isStarted == false
{
AudioKit.output = player
player.play()
}
}
func setupWaveformPlot()
{
plot.plotType = .rolling
plot.clipsToBounds = true
plot.shouldFill = true
plot.shouldMirror = true
plot.color = UIColor.white
plot.backgroundColor = UIColor.black
// Set the gain of the plot to control range of values displayed on the waveform plot
plot.gain = 25
waveformPlot.addSubview(plot)
}
func updateWaveformPlot()
{
plot.clear()
plot.resume()
// Set rolling history length to correct value for 30s
// such that waveform fills whole plot with no scrolling
plot.setRollingHistoryLength(Int32(Float(1284)))
}
}
I ended up implementing the behaviour I was after with a Timer, something like:
var recordingTimer = Timer!
let maxRecordingTime = 30.0
if recordingTimer == nil
{
recordingTimer = Timer.scheduledTimer(withTimeInterval: maxRecordingTime, repeats: false)
{
timer in
self.plot.pause()
}