Audiokit Audiobus MVC No Sound - audiokit

I have an Audiokit prototype working that uses Audiobus on iOS. I am loading an effects delay, input, AudioKit output, mixer, etc. in a view controller and everything works as expected.
I am now trying to refactor to use the MVC pattern. I created a Conductor class, and a delay class. I moved the Audiokit start, Audiobus start, etc. into the conductor. I get no errors on build or run. I also get no audio passing through at this point.
The Delay is just the example MultiDelay and is setup deriving AKNode, and AKInput.
Am I overlooking something simple on this?
Example MVC Audiokit
ViewController ...
import AudioKit
import AudioKitUI
import UIKit
class ViewController: UIViewController {
// Create instance of Conductor
let conductor = Conductor.sharedInstance
/// Views
let stackView = UIStackView()
/// Views
override func viewDidLoad() {
super.viewDidLoad()
setupUI()
}
func setupUI() {
stackView.axis = .vertical
stackView.distribution = .fillEqually
stackView.alignment = .fill
stackView.translatesAutoresizingMaskIntoConstraints = false
stackView.spacing = 10
/// Delay
// Simple Delay
// Range and Type = Delay time in seconds (Default: 1)
stackView.addArrangedSubview(AKRotaryKnob(
property: "Simple Delay Time",
value: conductor.delay.time,
format: "%0.2f s") { sliderValue in
self.conductor.delay.time = sliderValue
})
// Range and Type = Feedback (Normalized Value) ranges from 0 to 1 (Default: 0.5)
stackView.addArrangedSubview(AKRotaryKnob(
property: "Simple Delay Feedback",
value: conductor.delay.feedback,
range: 0.0 ... 0.99,
format: "%0.2f s") { sliderValue in
self.conductor.delay.feedback = sliderValue
})
// Range and Type = Dry/Wet Mix (Normalized Value) ranges from 0 to 1 (Default: 0.5)
stackView.addArrangedSubview(AKSlider(
property: "Simple Delay Dry/Wet Mix",
value: self.conductor.delayDrMixer.balance,
range: 0.0 ... 0.99,
format: "%0.2f") { sliderValue in
self.conductor.delayDrMixer.balance = sliderValue
})
stackView.addArrangedSubview(AKSlider(
property: "Output Volume",
value: conductor.booster.gain,
range: 0 ... 2,
format: "%0.2f") { sliderValue in
self.conductor.booster.gain = sliderValue
})
/// Output Volume Section
view.addSubview(stackView)
stackView.widthAnchor.constraint(equalTo: self.view.widthAnchor, multiplier: 0.9).isActive = true
stackView.heightAnchor.constraint(equalTo: self.view.heightAnchor, multiplier: 0.9).isActive = true
stackView.centerXAnchor.constraint(equalTo: self.view.centerXAnchor).isActive = true
stackView.centerYAnchor.constraint(equalTo: self.view.centerYAnchor).isActive = true
}
}
Conductor...
import Foundation
import AudioKit
class Conductor: AKMIDIListener {
// Globally accessible singleton
static let sharedInstance = Conductor()
let input = AKStereoInput()
// Simple Delay
// Example using AudioKit class but you could use your own
// custom class instead.
var delay: AKDelay!
var delayDrMixer: AKDryWetMixer!
var booster: AKBooster!
var mixer = AKMixer()
init() {
AKSettings.audioInputEnabled = true
// Simple Delay
delay = AKDelay(input) // Route audio input into delay node
delayDrMixer = AKDryWetMixer(input, delay) // route input and delay into dry wet mixer
// Master Mixer
mixer = AKMixer(delayDrMixer) // Route audio int final mix node
// Set Output & Start AudioKit
AudioKit.output = mixer // route mix to output
do {
try AudioKit.start()
} catch {
AKLog("Audiokit start failed")
}
Audiobus.start()
}
}

Related

Set left and right headphone volume using two different sliders

I am generating a wave sound for different frequencies and user should hear this wave sound using headphones only and he/she will set left and right headphone volumes using two different sliders. To achieve wave sound I wrote below code which works perfect.
But problem is: From last 5 days I am trying to set volume for left and right headphones separately, but no luck.
class Synth {
// MARK: Properties
public static let shared = Synth()
public var volume: Float {
set {
audioEngine.mainMixerNode.outputVolume = newValue
}
get {
audioEngine.mainMixerNode.outputVolume
}
}
public var frequencyRampValue: Float = 0
public var frequency: Float = 440 {
didSet {
if oldValue != 0 {
frequencyRampValue = frequency - oldValue
} else {
frequencyRampValue = 0
}
}
}
private var audioEngine: AVAudioEngine
private lazy var sourceNode = AVAudioSourceNode { _, _, frameCount, audioBufferList in
let ablPointer = UnsafeMutableAudioBufferListPointer(audioBufferList)
let localRampValue = self.frequencyRampValue
let localFrequency = self.frequency - localRampValue
let period = 1 / localFrequency
for frame in 0..<Int(frameCount) {
let percentComplete = self.time / period
let sampleVal = self.signal(localFrequency + localRampValue * percentComplete, self.time)
self.time += self.deltaTime
self.time = fmod(self.time, period)
for buffer in ablPointer {
let buf: UnsafeMutableBufferPointer<Float> = UnsafeMutableBufferPointer(buffer)
buf[frame] = sampleVal
}
}
self.frequencyRampValue = 0
return noErr
}
private var time: Float = 0
private let sampleRate: Double
private let deltaTime: Float
private var signal: Signal
// MARK: Init
init(signal: #escaping Signal = Oscillator.square) {
audioEngine = AVAudioEngine()
let mainMixer = audioEngine.mainMixerNode
let outputNode = audioEngine.outputNode
let format = outputNode.inputFormat(forBus: 0)
sampleRate = format.sampleRate
deltaTime = 1 / Float(sampleRate)
self.signal = signal
let inputFormat = AVAudioFormat(commonFormat: format.commonFormat,
sampleRate: format.sampleRate,
channels: 1,
interleaved: format.isInterleaved)
audioEngine.attach(sourceNode)
audioEngine.connect(sourceNode, to: mainMixer, format: inputFormat)
audioEngine.connect(mainMixer, to: outputNode, format: nil)
mainMixer.outputVolume = 0
audioEngine.mainMixerNode.pan = 100 // this does not work,
//audioEngine.mainMixerNode.pan = 1.0 // this also does not work
do {
try audioEngine.start()
} catch {
print("Could not start engine: \(error.localizedDescription)")
}
}
//This function will be called in view controller to generate sound
public func setWaveformTo(_ signal: #escaping Signal) {
self.signal = signal
}
}
With the above code I can hear the wave sound as normal in left and right headphone.
I tried to use audioEngine.mainMixerNode.pan for value 100 and -100 also -1.0 and 1.0 but this did not make any change.
I tried to use audioEngine.mainMixerNode.pan for value 100 and -100 but this did not make any change.
The allowable range for the pan value is {-1.0, 1.0}. The values that you say you used are outside that range, so it's not surprising that they had no effect. Try 0.75 or -0.75 instead.

AudioKit output changes to ear speakers

I implemented the AudioKit "MICROPHONE ANALYSIS" example https://audiokit.io/examples/MicrophoneAnalysis/ in my App.
I want to analyze the microphone input frequency and then play the correct note which is near the frequency which was determined.
Normally the sound output is the speaker or a Bluetooth device connected to my iPhone but after implementing the "MICROPHONE ANALYSIS" example the sound output changed to the tiny little speaker on the top of the iPhone which is normally used when you get a call.
How can I switch to the "normal" speaker or to the connected Bluetooth device like before?
var mic: AKMicrophone!
var tracker: AKFrequencyTracker!
var silence: AKBooster!
func initFrequencyTracker() {
AKSettings.audioInputEnabled = true
mic = AKMicrophone()
tracker = AKFrequencyTracker(mic)
silence = AKBooster(tracker, gain: 0)
}
func deinitFrequencyTracker() {
plotTimer.invalidate()
do {
try AudioKit.stop()
AudioKit.output = nil
} catch {
print(error)
}
}
func initPlotTimer() {
AudioKit.output = silence
do {
try AudioKit.start()
} catch {
AKLog("AudioKit did not start!")
}
setupPlot()
plotTimer = Timer.scheduledTimer(timeInterval: 0.1, target: self, selector: #selector(updatePlotUI), userInfo: nil, repeats: true)
}
func setupPlot() {
let plot = AKNodeOutputPlot(mic, frame: audioInputPlot.bounds)
plot.translatesAutoresizingMaskIntoConstraints = false
plot.alpha = 0.3
plot.plotType = .rolling
plot.shouldFill = true
plot.shouldCenterYAxis = false
plot.shouldMirror = true
plot.color = UIColor(named: uiFarbe)
audioInputPlot.addSubview(plot)
// Pin the AKNodeOutputPlot to the audioInputPlot
var constraints = [plot.leadingAnchor.constraint(equalTo: audioInputPlot.leadingAnchor)]
constraints.append(plot.trailingAnchor.constraint(equalTo: audioInputPlot.trailingAnchor))
constraints.append(plot.topAnchor.constraint(equalTo: audioInputPlot.topAnchor))
constraints.append(plot.bottomAnchor.constraint(equalTo: audioInputPlot.bottomAnchor))
constraints.forEach { $0.isActive = true }
}
#objc func updatePlotUI() {
if tracker.amplitude > 0.1 {
let trackerFrequency = Float(tracker.frequency)
guard trackerFrequency < 7_000 else {
// This is a bit of hack because of modern Macbooks giving super high frequencies
return
}
var frequency = trackerFrequency
while frequency > Float(noteFrequencies[noteFrequencies.count - 1]) {
frequency /= 2.0
}
while frequency < Float(noteFrequencies[0]) {
frequency *= 2.0
}
var minDistance: Float = 10_000.0
var index = 0
for i in 0..<noteFrequencies.count {
let distance = fabsf(Float(noteFrequencies[i]) - frequency)
if distance < minDistance {
index = i
minDistance = distance
}
}
// let octave = Int(log2f(trackerFrequency / frequency))
frequencyLabel.text = String(format: "%0.1f", tracker.frequency)
if frequencyTranspose(note: notesToTanspose[index]) != droneLabel.text {
note = frequencyTranspose(note: notesToTanspose[index])
droneLabel.text = note
DispatchQueue.main.asyncAfter(deadline: .now() + 0.03, execute: {
self.prepareSinglePlayerFirstForStart(note: self.note)
self.startSinglePlayer()
})
}
}
}
func frequencyTranspose(note: String) -> String {
var indexNote = notesToTanspose.firstIndex(of: note)!
let chosenInstrument = UserDefaults.standard.object(forKey: "whichInstrument") as! String
if chosenInstrument == "Bb" {
if indexNote + 2 >= notesToTanspose.count {
indexNote -= 12
}
return notesToTanspose[indexNote + 2]
} else if chosenInstrument == "Eb" {
if indexNote - 3 < 0 {
indexNote += 12
}
return notesToTanspose[indexNote - 3]
} else {
return note
}
}
It's a good practice to control the session settings, so start by creating a method in your application to take care of that during initialisation.
Following up, there's an example where I set a category and the desired options:
func start() {
do {
let session = AVAudioSession.sharedInstance()
try session.setCategory(.playAndRecord, options: .defaultToSpeaker)
try session.setActive(true, options: .notifyOthersOnDeactivation)
try session.overrideOutputAudioPort(AVAudioSession.PortOverride.speaker)
try AudioKit.start()
} catch {
// your error handler
}
}
You can call the method start where you make the call to AudioKit.Start() in initPlotTimer.
The example above is using the AVAudioSession, which I believe is what AKSettings wraps (please feel free to edit my answer to not mislead future readers, as I'm not looking at the AudioKit source-code at the moment).
Now that AVAudioSession is exposed, let's stick with the method offered by AudioKit since that's what you're dealing with.
Here's another example using AKSettings:
func start() {
do {
AKSettings.channelCount = 2
AKSettings.ioBufferDuration = 0.002
AKSettings.audioInputEnabled = true
AKSettings.bufferLength = .medium
AKSettings.defaultToSpeaker = true
// check docs for other options and settings
try AKSettings.setSession(category: .playAndRecord, with: [.defaultToSpeaker, .allowBluetooth])
try AudioKit.start()
} catch {
// your handler
}
}
Have in mind that you don't necessarily have to call it start, or run AudioKit's start method, I'm just exposing the initialisation phase, to make it readable to you and other use-cases.
Reference:
https://developer.apple.com/documentation/avfoundation/avaudiosession/categoryoptions
https://audiokit.io/docs/Classes/AKSettings.html

Synchronizing AKPlayer with AKSampleMetronome

I am trying to use AKSamplerMetronome as a master clock in my (sort of) multi audiofile playback project. I wanted AKPlayers to be started in sync with Metronome's downbeat. Mixing AKPlayer and AKSamplerMetronome as AudioKit.output was successful, however, I am struggling to connect AKPlayer.start with AKSamplerMetronome.beatTime(or something else I haven't figured out) so the playback starts with the Metronome's downbeat in sync (and repeat every time Metronome hits downbeat). Here's what I've written:
class ViewController: UIViewController {
let metronome = AKSamplerMetronome()
let player = AKPlayer(audioFile: try! AKAudioFile(readFileName: "loop.wav"))
let mixer = AKMixer()
func startAudioEngine() {
do {
try AudioKit.start()
} catch {
print(error)
fatalError()
}
}
func makeConnections() {
player >>> mixer
metronome >>> mixer
AudioKit.output = mixer
}
func startMetronome() {
metronome.tempo = 120.0
metronome.beatVolume = 1.0
metronome.play()
}
func preparePlayer() {
player.isLooping = true
player.buffering = .always
player.prepare()
// I wanted AKPlayer to be repeated based on Metronome's downbeat.
}
func startPlayer() {
let startTime = AVAudioTime.now() + 0.25
player.start(at: startTime)
}
override func viewDidLoad() {
super.viewDidLoad()
makeConnections()
startAudioEngine()
preparePlayer()
startPlayer()
startMetronome()
}
}
My problem is that, AKPlayer's start point(at:) doesn't recognize AKSamplerMetronome's properties, maybe because it's not compatible with AVAudioTime? I tried something like:
let startTime = metronome.beatTime + 0.25
player.start(at: startTime)
But this doesn't seem to be an answer (as "cannot convert value type 'Double' to expected argument type 'AVAudioTime?'). It would be extremely helpful if someone could help me exploring Swift/AudioKit. <3
you are calling the AVAudioTime playback function with a Double parameter. That's incorrect. If you want to start the AKPlayer with a seconds param, use player.play(when: time)
In general, you're close. This is how you do it:
let startTime: Double = 1
let hostTime = mach_absolute_time()
let now = AVAudioTime(hostTime: hostTime)
let avTime = now.offset(seconds: startTime)
metronome.setBeatTime(0, at: avTime)
player.play(at: avTime)
Basically you need to give a common clock to each unit (mach_absolute_time()), then use AVAudioTime to start them at the exact time. The metronome.setBeatTime is telling the metronome to reset it's 0 point at the passed in avTime.

Allowing background audio with Swift not working

I want to allow background audio while the app is not in focus. I currently have this code, which should allow that:
do {
try AKSettings.setSession(category: .playback, with: .mixWithOthers)
} catch {
print("error")
}
AKSettings.playbackWhileMuted = true
I also have the setting 'Audio, Airplay and Picture in Picture' enabled in capabilities settings. However, when I press the home button on my device the audio doesn't keep playing. What am I doing wrong? I am using AudioKit to produce sounds if that matters.
I am using a singleton to house all of the AudioKit components which I named AudioPlayer.swift. Here is what I have in my AudioPlayer.swift singleton file:
class AudioPlayer: NSObject {
var currentFrequency = String()
var soundIsPlaying = false
var leftOscillator = AKOscillator()
var rightOscillator = AKOscillator()
var rain = try! AKAudioFile()
var rainPlayer: AKAudioPlayer!
var envelope = AKAmplitudeEnvelope()
override init() {
super.init()
do {
try AKSettings.setSession(category: .playback, with: .mixWithOthers)
} catch {
print("error")
}
AKSettings.playbackWhileMuted = true
AudioKit.output = envelope
AudioKit.start()
}
func setupFrequency(left: AKOscillator, right: AKOscillator, frequency: String) {
currentFrequency = frequency
leftOscillator = left
rightOscillator = right
let leftPanner = AKPanner(leftOscillator)
leftPanner.pan = -1
let rightPanner = AKPanner(rightOscillator)
rightPanner.pan = 1
//Set up rain and rainPlayer
do {
rain = try AKAudioFile(readFileName: "rain.wav")
rainPlayer = try AKAudioPlayer(file: rain, looping: true, deferBuffering: false, completionHandler: nil)
} catch { print(error) }
let mixer = AKMixer(leftPanner, rightPanner, rainPlayer)
//Put mixer in sound envelope
envelope = AKAmplitudeEnvelope(mixer)
envelope.attackDuration = 2.0
envelope.decayDuration = 0
envelope.sustainLevel = 1
envelope.releaseDuration = 0.2
//Start AudioKit stuff
AudioKit.output = envelope
AudioKit.start()
leftOscillator.start()
rightOscillator.start()
rainPlayer.start()
envelope.start()
soundIsPlaying = true
}
}
And here is an example of one of my sound effect view controllers, which reference the AudioKit singleton to send it a certain frequency (I have about a dozen of these view controllers, each with its own frequency settings):
class CalmView: UIViewController {
let leftOscillator = AKOscillator()
let rightOscillator = AKOscillator()
override func viewDidLoad() {
super.viewDidLoad()
leftOscillator.amplitude = 0.3
leftOscillator.frequency = 220
rightOscillator.amplitude = 0.3
rightOscillator.frequency = 230
}
#IBAction func playSound(_ sender: Any) {
if shared.soundIsPlaying == false {
AudioKit.stop()
shared.setupFrequency(left: leftOscillator, right: rightOscillator, frequency: "Calm")
} else if shared.soundIsPlaying == true && shared.currentFrequency != "Calm" {
AudioKit.stop()
shared.leftOscillator.stop()
shared.rightOscillator.stop()
shared.rainPlayer.stop()
shared.envelope.stop()
shared.setupFrequency(left: leftOscillator, right: rightOscillator, frequency: "Calm")
} else {
shared.soundIsPlaying = false
shared.envelope.stop()
}
}
}
I instantiated the AudioPlayer singleton in my ViewController.swift file.
It depends on when you are doing your configuration in relation to when AudioKit is started. If you're using AudioKit you should be using its AKSettings to manage your session category. Basically not only the playback category but also mixWithOthers. By default, does this:
/// Set the audio session type
#objc open static func setSession(category: SessionCategory,
with options: AVAudioSessionCategoryOptions = [.mixWithOthers]) throws {
So you'd do something like this in your ViewController:
do {
if #available(iOS 10.0, *) {
try AKSettings.setSession(category: .playAndRecord, with: [.defaultToSpeaker, .allowBluetooth, .allowBluetoothA2DP])
} else {
// Fallback on earlier versions
}
} catch {
print("Errored setting category.")
}
So I think its a matter of getting that straight. It might also help to have inter-app audio set up. If you still have trouble and provide more information, I can help more, but this is as good an answer as I can muster based on the info you've given so far.

swift - game countdown timer almost working

I'm using script (below), to use as a countdown for my game start, the script I've used is from Gourav Nayyar's YouTube video and works great for the first time it is called. However once the game goes through the reset process and the script is called again I only see 5 rather than 5 - 4 - 3 - 2 - 1 - GO!. If I remove one of the cals from my script then it works fine either in the reset func or when gameScene loads.
Here is the two calls in GameScene.swift
override func didMoveToView(view: SKView) {
var gamelaunchTimerView:TimerView = TimerView.loadingCountDownTimerViewInView(self.view!)
gamelaunchTimerView.startTimer()
func resetScene() {
//code removed from here
return countdown()
}
func countdown() {
var gamelaunchTimerView:TimerView = TimerView.loadingCountDownTimerViewInView(self.view!)
gamelaunchTimerView.startTimer()
}
Here is the Timer Code in GameLaunchTimer.swift as this is set up the countdown only works when first called and hangs on the second call.
//
// TimerView.swift
// GameLaunchTimer
//
// Created by Gourav Nayyar on 7/3/14.
// Copyright (c) 2014 Gourav Nayyar. All rights reserved.
//
let VIEW_ALPHA:CGFloat = 0.5
let TIMERVIEW_RADIUS:CGFloat = 50
let TIMER_LABEL_INITIAL_VAL:Int = 5
let BORDER_WIDTH:CGFloat = 2
var timerVal:Int = TIMER_LABEL_INITIAL_VAL;
var timer:NSTimer!
import UIKit
import QuartzCore
class TimerView :UIView {
struct Stored {
static var timerLbl:UILabel!
}
class func loadingCountDownTimerViewInView (_superView:UIView)-> TimerView
{
var timerView:TimerView = TimerView(frame:_superView.frame)
// timerView.backgroundColor = UIColor.blackColor().colorWithAlphaComponent(VIEW_ALPHA)
_superView.addSubview(timerView)
/* add a custom Circle view */
let refFrame:CGRect = CGRectMake(_superView.center.x-TIMERVIEW_RADIUS, _superView.center.y-TIMERVIEW_RADIUS, 2*TIMERVIEW_RADIUS, 2*TIMERVIEW_RADIUS)
var circleView:UIView = UIView(frame:refFrame)
circleView.layer.cornerRadius = TIMERVIEW_RADIUS
circleView.layer.borderColor = UIColor.whiteColor().CGColor
circleView.layer.borderWidth = BORDER_WIDTH
/* add a custom Label */
Stored.timerLbl = UILabel(frame:circleView.bounds)
Stored.timerLbl.text = "\(TIMER_LABEL_INITIAL_VAL)"
Stored.timerLbl.textColor = UIColor.whiteColor()
Stored.timerLbl.font = UIFont(name: "MarkerFelt-Thin", size: 40)
Stored.timerLbl.textAlignment = NSTextAlignment.Center
circleView.addSubview(Stored.timerLbl)
timerView.addSubview(circleView)
return timerView
}
func startTimer()
{
timer = NSTimer.scheduledTimerWithTimeInterval(1.0
, target: self, selector: Selector("updateTimer:"), userInfo: nil, repeats: true)
}
func updateTimer(dt:NSTimer)
{
timerVal--
if timerVal==0{
Stored.timerLbl.text = "GO!"
}else if timerVal<0{
timer.invalidate()
removeCountDownTimerView()
} else{
Stored.timerLbl.text = "\(timerVal)"
}
}
func removeCountDownTimerView()
{
var mySuperView:UIView = self.superview!
mySuperView.userInteractionEnabled = true
super.removeFromSuperview()
}
}
Define your variables under the class body;
import UIKit
import QuartzCore
class TimerView :UIView {
let VIEW_ALPHA:CGFloat = 0.5
let TIMERVIEW_RADIUS:CGFloat = 50
let TIMER_LABEL_INITIAL_VAL:Int = 5
let BORDER_WIDTH:CGFloat = 2
var timerVal:Int = TIMER_LABEL_INITIAL_VAL;
var timer:NSTimer!
... // other code
or, may be
let VIEW_ALPHA:CGFloat = 0.5
let TIMERVIEW_RADIUS:CGFloat = 50
let TIMER_LABEL_INITIAL_VAL:Int = 5
let BORDER_WIDTH:CGFloat = 2
import UIKit
import QuartzCore
class TimerView :UIView {
var timerVal:Int = TIMER_LABEL_INITIAL_VAL;
var timer:NSTimer!
... //other code
Assign nil to the timer after invalidating it: even though you are invalidating, the object state is still kept, resulting on states conflicts when creating a new instance of the timer, once that it runs in a different thread.

Resources