Glitch when stopping AKOperationGenerator - audiokit

When I stop a AKOperationGenerator I hear a glitch.
binauralGenerator = AKOperationGenerator(numberOfChannels: 2) { _ in
let bin = SoundEngineer.binaural(left: Binaural.leftFrequency,
right: Binaural.rightFrequency,
amplitude: Isochronic.amplitude*0.67)
return bin
}
AudioKit.output = binauralGenerator
AudioKit.start()
// This is how I stop
generator.stop()
AudioKit 4.0.4 running on iPhone6.

Related

Audiokit AKSampler not playing sounds

currently trying to get my AKSampler to play sounds that I send it but not having much luck getting audio to output. My AKMidiCallbackInstrument is properly logging the notes playing (although I'm seeing the print for each note twice..) However, the call to my sampler is not producing any audio and I can't figure out why.
class Sequencer {
var sampler: AKSampler
var sequencer: AKAppleSequencer
var mixer: AKMixer
init() {
sampler = AKSampler()
sequencer = AKAppleSequencer()
mixer=AKMixer(sampler)
let midicallback = AKMIDICallbackInstrument()
let url = Bundle.main.url(forResource: "UprightPianoKW-20190703", withExtension: "sfz")!;
let track = sequencer.newTrack()
track?.setMIDIOutput(midicallback.midiIn)
sampler.loadSFZ(url: url)
//generate some notes and add thtem to the track
generateSequence()
midicallback >>> mixer
AudioKit.output = mixer
AKSettings.playbackWhileMuted = true
AKSettings.audioInputEnabled = true
midicallback.callback = { status, note, vel in
guard let status = AKMIDIStatus(byte: status),
let type = status.type,
type == .noteOn else { return print("note off: \(note)") }
print("note on: \(note)")
self.sampler.play(noteNumber: note, velocity: vel) }
}
func play() {
try? AudioKit.start()
sequencer.rewind()
sequencer.play()
try? AudioKit.stop()
}
func stop() {
sequencer.stop()
}
you need to connect your sampler to the mixer:
sampler >>> mixer
Fwiw,
midicallback >>> mixer isn't necessary with AKAppleSequencer/AKMIDICallbackInstrument although it would be with AKSequencer/AKCallbackInstrument

Swift - AudioKit Sequencer with Oscillator (AKOscillatorBank). Frequencies wont play at higher range (MidiNote 120+)

I'm learning how to use AudioKit. I'm trying to play around with the sequencer and an oscillator. Everything is working dandy but i noticed when i feed a higher frequency to an oscillator that is in a sequencer track, it will render the same for such frequency (MidiNote) and all that are higher. if passed the same frequency to just the oscillator you can see the variance.
my initial setup
let oscillator = AKOscillatorBank()
let oscillatorTrackIndex = 0
let sequencer = AKAppleSequencer()
let midi = AKMIDI()
var scale: [Int] = []
let sequenceLength = AKDuration(beats: 8.0)
func setupTracks() {
let midiNode = AKMIDINode(node: oscillator)
_ = sequencer.newTrack()
sequencer.setLength(trueLength)
AudioKit.output = midiNode
try! AudioKit.start()
midiNode.enableMIDI(midi.client, name: "midiNode midi in")
sequencer.setTempo(currentTempo)
sequencer.enableLooping()
sequencer.play()
}
my method
func generateSequence(_ stepSize: Float = 1/4, clear: Bool = true) {
if clear { sequencer.tracks[oscillatorTrackIndex].clear() }
let numberOfSteps = Int(Float(sequenceLength.beats) / stepSize)
for i in 0 ..< numberOfSteps { //4
if i%4 == 0 {
sequencer.tracks[0].add(noteNumber: 140, velocity: 127, position: AKDuration(beats: Double(i)), duration: AKDuration(beats: 0.5))
} else {
sequencer.tracks[0].add(noteNumber: 200, velocity: 127, position: AKDuration(beats: Double(i)), duration: AKDuration(beats: 0.5))
}
}
}
as you can see i'm using note number 140 and 200. when the sequencer plays these notes, they render out the same audio. if i use .midiNoteToFrequency() and feed these through the oscillator by itself then you can hear the difference.
Thanks!
In the MIDI spec, there are only 7 bits for note number, allowing values between 0-127. Presumably (and this might happening internally in Apple's MusicSequence, since I don't think that AKAppleSequencer or AKMusicTrack do this explicitly), values outside of this range are clamped into this range to avoid unexpected crashes.

Synchronizing AKPlayer with AKSampleMetronome

I am trying to use AKSamplerMetronome as a master clock in my (sort of) multi audiofile playback project. I wanted AKPlayers to be started in sync with Metronome's downbeat. Mixing AKPlayer and AKSamplerMetronome as AudioKit.output was successful, however, I am struggling to connect AKPlayer.start with AKSamplerMetronome.beatTime(or something else I haven't figured out) so the playback starts with the Metronome's downbeat in sync (and repeat every time Metronome hits downbeat). Here's what I've written:
class ViewController: UIViewController {
let metronome = AKSamplerMetronome()
let player = AKPlayer(audioFile: try! AKAudioFile(readFileName: "loop.wav"))
let mixer = AKMixer()
func startAudioEngine() {
do {
try AudioKit.start()
} catch {
print(error)
fatalError()
}
}
func makeConnections() {
player >>> mixer
metronome >>> mixer
AudioKit.output = mixer
}
func startMetronome() {
metronome.tempo = 120.0
metronome.beatVolume = 1.0
metronome.play()
}
func preparePlayer() {
player.isLooping = true
player.buffering = .always
player.prepare()
// I wanted AKPlayer to be repeated based on Metronome's downbeat.
}
func startPlayer() {
let startTime = AVAudioTime.now() + 0.25
player.start(at: startTime)
}
override func viewDidLoad() {
super.viewDidLoad()
makeConnections()
startAudioEngine()
preparePlayer()
startPlayer()
startMetronome()
}
}
My problem is that, AKPlayer's start point(at:) doesn't recognize AKSamplerMetronome's properties, maybe because it's not compatible with AVAudioTime? I tried something like:
let startTime = metronome.beatTime + 0.25
player.start(at: startTime)
But this doesn't seem to be an answer (as "cannot convert value type 'Double' to expected argument type 'AVAudioTime?'). It would be extremely helpful if someone could help me exploring Swift/AudioKit. <3
you are calling the AVAudioTime playback function with a Double parameter. That's incorrect. If you want to start the AKPlayer with a seconds param, use player.play(when: time)
In general, you're close. This is how you do it:
let startTime: Double = 1
let hostTime = mach_absolute_time()
let now = AVAudioTime(hostTime: hostTime)
let avTime = now.offset(seconds: startTime)
metronome.setBeatTime(0, at: avTime)
player.play(at: avTime)
Basically you need to give a common clock to each unit (mach_absolute_time()), then use AVAudioTime to start them at the exact time. The metronome.setBeatTime is telling the metronome to reset it's 0 point at the passed in avTime.

AudioKit Draw line for volume level in AKNodeOutputPlot

I am using AudioKit to display recording level with AKNodeOutputPlot. I have to display a line which will say proper audio level to record audio.
I have to draw line at -10 dB. How can I achieve that? this is my sample code (Swift 4).
AKSettings.defaultToSpeaker = true
// Patching
outputPlot.node = mic
micMixer = AKMixer(mic)
micBooster = AKBooster(micMixer)
// Microphone monitoring is muted
micBooster.gain = 0 // Silent
recorder = try? AKNodeRecorder(node: micMixer)
if let file = recorder?.audioFile {
player = try? AKAudioPlayer(file:file)
}
player?.completionHandler = playingEnded
moogLadder = AKMoogLadder(player)
mainMixer = AKMixer(moogLadder, micBooster)
AudioKit.output = mainMixer
if isMicPresent {
AudioKit.stop()
AudioKit.start()
}
currentSeconds = 0
timer?.invalidate()
timer = nil
The output looks like following
Whereas I want to display graph like this line on 10 dB.
How can I draw line at 10dB and graph like waveform?

AudioKit - Play sound files at specific position using sequencer

I'd like to use the AudioKit framework to generate a small sound sequence of some high and low sounds.
So what I'm starting with is the message that could look like this: "1100011010"
--> Every column should be looped through and if it's value is "1" AudioKit should play a (short) high frequency sound and if not it should play a (short) lower frequency sound.
Because a simple timer-loop that triggers every 0.15s the .play()-function for running a 0.1s sound (high/low) doesn't seems to be very accurate I decided to use the *AudioKit Sequencer*:
(o) audiokit:
enum Sequence: Int {
case snareDrum
}
var snareDrum = AKSynthSnare()
var sequencer = AKSequencer()
var pumper: AKCompressor?
var mixer = AKMixer()
public init() {
snareDrum >>> mixer
pumper = AKCompressor(mixer)
AudioKit.output = pumper
AudioKit.start()
}
func setupTracks() {
_ = sequencer.newTrack()
sequencer.tracks[Sequence.snareDrum.rawValue].setMIDIOutput(snareDrum.midiIn)
generateMessageSequence()
sequencer.enableLooping()
sequencer.setTempo(2000)
sequencer.play()
}
(o) play:
var message="1100011010"
var counter=0
for i in message {
counter+=0.15
if (i=="1") {
// play high sound at specific position
}
else {
// play low sound at specific position
}
}
(o) play low sound at specific position:
sequencer.tracks[Sequence.snareDrum.rawValue].add(noteNumber: 20,
velocity: 10,
position: AKDuration(beats: counter),
duration: AKDuration(beats: 1))
My question: How is it possible to play local sound files at specific positions using (position: AKDuration(beats: counter)) //the code from above instead of using default instruments like in this case AKSynthSnare()?
You could create two tracks, each with an AKMIDISampler. One plays a 'low' sample, and the other plays a 'high' sample. Assign the high notes to the high track, and low notes to the low track.
let sequencer = AKSequencer()
let lowTrack = sequencer.newTrack()
let lowSampler = AKMIDISampler()
try! lowSampler.loadWav("myLowSoundFile")
lowTrack?.setMIDIOutput(lowSampler.midiIn)
let highTrack = sequencer.newTrack()
let highSampler = AKMIDISampler()
try! highSampler.loadWav("myHighSoundFile")
highTrack?.setMIDIOutput(highSampler.midiIn)
sequencer.setLength(AKDuration(beats: 4.0))
sequencer.enableLooping()
then assign the high and low sounds to each track
let message = "1100011010"
let dur = 4.0 / Double(message.count)
var position: Double = 0
for i in message {
position += dur
if (i == "1") {
highTrack?.add(noteNumber: 60, velocity: 100, position: AKDuration(beats: position), duration: AKDuration(beats: dur * (2/3)))
} else {
lowTrack?.add(noteNumber: 60, velocity: 100, position: AKDuration(beats: position), duration: AKDuration(beats: dur * (2/34)))
}
}
(I haven't run the code, but something like this should work)

Resources