Apply amplitude modulation to pink noise operation - audiokit

AudioKit provides documentation on creating white noise with panning as follows:
let generator = AKOperationGenerator { _ in
let white = AKOperation.whiteNoise()
let pink = AKOperation.pinkNoise()
let lfo = AKOperation.sineWave(frequency: 0.3)
let balance = lfo.scale(minimum: 0, maximum: 1)
let noise = mixer(white, pink, balance: balance)
return noise.pan(lfo)
}
However rather than panning, I'm looking to change the amplitude with the following parameters (from SoundForge Pro):
// AmplitudeModulation -> Sine
// 0.15 (s) -> Modulation frequency
// Minimum amplitude: Up to -30.0
// Stereo pan: Up to 20
// Dry out -30db
Is this possible using AudioKit?

You could use AKTremolo.
class ViewController: UIViewController {
let whiteNoise = AKWhiteNoise()
let tremolo = AKTremolo()
let mixer = AKMixer()
override func viewDidLoad() {
AudioKit.output = mixer
AudioKit.start()
whiteNoise >>> tremolo >>> mixer
tremolo.frequency = 0
whiteNoise.start()
let slider = AKSlider(property: "Tremolo") { value in
self.tremolo.frequency = 100 * value
}
slider.frame = CGRect(x: 0, y: 100, width: view.bounds.width, height: 100)
view.addSubview(slider)
}
}

You can do amplitude modulation by using AKOperationEffect. For example:
let Amplfo = AKOperation.sineWave(frequency: freq, amplitude: 1.0)
let Output = AKOperationEffect(generator) { generator, _ in
let lfo = max(Amplfo,0)
return generator * lfo }

Related

Create a flickering/variable SKLightNode in SpriteKit - simulate campfire lighting

I have an animated campfire using an texture atlas in SpriteKit, I am trying to simulate the variable lighting that a fire would produce. I was able to achieve a flicker by varying the falloff by passing in a random number form 0...1.5. It works but is a little too crazy - looking for a suggestion on smoothing it out to be more subtle and realistic - maybe pass an array of set values thru - not sure how I would do that? Or some sort of easing?
func buildCampfire() {
let campfireAtlas = SKTextureAtlas(named: "Campfire")
var fireFrames: [SKTexture] = []
let numImages = campfireAtlas.textureNames.count
for i in 1...numImages {
let fireTextureName = "campfire\(i)"
fireFrames.append(campfireAtlas.textureNamed(fireTextureName))
}
animatedCampfire = fireFrames
let firstFrameTexture = animatedCampfire[0]
campfire = SKSpriteNode(texture: firstFrameTexture)
campfire.size.height = 300
campfire.size.width = 300
campfire.position = CGPoint(x: 108, y: -188)
addChild(campfire)
}
func animateCampfire() {
campfire.run(SKAction.repeatForever(SKAction.animate(with: animatedCampfire, timePerFrame: 0.1, resize: false, restore: true)), withKey: "campfireAnimated")
}
func flickerCampfire() {
if let campfireLight = self.childNode(withName: "//campfireLight") as? SKLightNode {
campfireLight.falloff = CGFloat.random(in: 0..<1.5)
} else {
print("cannot find light node")
}
}
override func update(_ currentTime: TimeInterval) {
flickerCampfire()
}
}

Set left and right headphone volume using two different sliders

I am generating a wave sound for different frequencies and user should hear this wave sound using headphones only and he/she will set left and right headphone volumes using two different sliders. To achieve wave sound I wrote below code which works perfect.
But problem is: From last 5 days I am trying to set volume for left and right headphones separately, but no luck.
class Synth {
// MARK: Properties
public static let shared = Synth()
public var volume: Float {
set {
audioEngine.mainMixerNode.outputVolume = newValue
}
get {
audioEngine.mainMixerNode.outputVolume
}
}
public var frequencyRampValue: Float = 0
public var frequency: Float = 440 {
didSet {
if oldValue != 0 {
frequencyRampValue = frequency - oldValue
} else {
frequencyRampValue = 0
}
}
}
private var audioEngine: AVAudioEngine
private lazy var sourceNode = AVAudioSourceNode { _, _, frameCount, audioBufferList in
let ablPointer = UnsafeMutableAudioBufferListPointer(audioBufferList)
let localRampValue = self.frequencyRampValue
let localFrequency = self.frequency - localRampValue
let period = 1 / localFrequency
for frame in 0..<Int(frameCount) {
let percentComplete = self.time / period
let sampleVal = self.signal(localFrequency + localRampValue * percentComplete, self.time)
self.time += self.deltaTime
self.time = fmod(self.time, period)
for buffer in ablPointer {
let buf: UnsafeMutableBufferPointer<Float> = UnsafeMutableBufferPointer(buffer)
buf[frame] = sampleVal
}
}
self.frequencyRampValue = 0
return noErr
}
private var time: Float = 0
private let sampleRate: Double
private let deltaTime: Float
private var signal: Signal
// MARK: Init
init(signal: #escaping Signal = Oscillator.square) {
audioEngine = AVAudioEngine()
let mainMixer = audioEngine.mainMixerNode
let outputNode = audioEngine.outputNode
let format = outputNode.inputFormat(forBus: 0)
sampleRate = format.sampleRate
deltaTime = 1 / Float(sampleRate)
self.signal = signal
let inputFormat = AVAudioFormat(commonFormat: format.commonFormat,
sampleRate: format.sampleRate,
channels: 1,
interleaved: format.isInterleaved)
audioEngine.attach(sourceNode)
audioEngine.connect(sourceNode, to: mainMixer, format: inputFormat)
audioEngine.connect(mainMixer, to: outputNode, format: nil)
mainMixer.outputVolume = 0
audioEngine.mainMixerNode.pan = 100 // this does not work,
//audioEngine.mainMixerNode.pan = 1.0 // this also does not work
do {
try audioEngine.start()
} catch {
print("Could not start engine: \(error.localizedDescription)")
}
}
//This function will be called in view controller to generate sound
public func setWaveformTo(_ signal: #escaping Signal) {
self.signal = signal
}
}
With the above code I can hear the wave sound as normal in left and right headphone.
I tried to use audioEngine.mainMixerNode.pan for value 100 and -100 also -1.0 and 1.0 but this did not make any change.
I tried to use audioEngine.mainMixerNode.pan for value 100 and -100 but this did not make any change.
The allowable range for the pan value is {-1.0, 1.0}. The values that you say you used are outside that range, so it's not surprising that they had no effect. Try 0.75 or -0.75 instead.

AudioKit playback cracks

I want to analyze the microphone input frequency and then play the correct note which is near the frequency which was determined. I did that with of AudioKit.
This is working right now but since I implemented AudioKit to get the frequency feature the sound which plays after the frequency detection cracks sometimes during playback. Thats happened after I implemented AudioKit. Everything was fine before that...
var mic: AKMicrophone!
var tracker: AKFrequencyTracker!
var silence: AKBooster!
func initFrequencyTracker() {
AKSettings.channelCount = 2
AKSettings.audioInputEnabled = true
AKSettings.defaultToSpeaker = true
AKSettings.allowAirPlay = true
AKSettings.useBluetooth = true
AKSettings.allowHapticsAndSystemSoundsDuringRecording = true
mic = AKMicrophone()
tracker = AKFrequencyTracker(mic)
silence = AKBooster(tracker, gain: 0)
}
func deinitFrequencyTracker() {
AKSettings.audioInputEnabled = false
plotTimer.invalidate()
do {
try AudioKit.stop()
AudioKit.output = nil
} catch {
print(error)
}
}
func initPlotTimer() {
AudioKit.output = silence
do {
try AKSettings.setSession(category: .playAndRecord, with: [.defaultToSpeaker, .allowBluetooth, .allowAirPlay, .allowBluetoothA2DP])
try AudioKit.start()
} catch {
AKLog("AudioKit did not start!")
}
setupPlot()
plotTimer = Timer.scheduledTimer(timeInterval: 0.1, target: self, selector: #selector(updatePlotUI), userInfo: nil, repeats: true)
}
func setupPlot() {
let plot = AKNodeOutputPlot(mic, frame: audioInputPlot.bounds)
plot.translatesAutoresizingMaskIntoConstraints = false
plot.alpha = 0.3
plot.plotType = .rolling
plot.shouldFill = true
plot.shouldCenterYAxis = false
plot.shouldMirror = true
plot.color = UIColor(named: uiFarbe)
audioInputPlot.addSubview(plot)
// Pin the AKNodeOutputPlot to the audioInputPlot
var constraints = [plot.leadingAnchor.constraint(equalTo: audioInputPlot.leadingAnchor)]
constraints.append(plot.trailingAnchor.constraint(equalTo: audioInputPlot.trailingAnchor))
constraints.append(plot.topAnchor.constraint(equalTo: audioInputPlot.topAnchor))
constraints.append(plot.bottomAnchor.constraint(equalTo: audioInputPlot.bottomAnchor))
constraints.forEach { $0.isActive = true }
}
#objc func updatePlotUI() {
if tracker.amplitude > 0.3 {
let trackerFrequency = Float(tracker.frequency)
guard trackerFrequency < 7_000 else {
// This is a bit of hack because of modern Macbooks giving super high frequencies
return
}
var frequency = trackerFrequency
while frequency > Float(noteFrequencies[noteFrequencies.count - 1]) {
frequency /= 2.0
}
while frequency < Float(noteFrequencies[0]) {
frequency *= 2.0
}
var minDistance: Float = 10_000.0
var index = 0
for i in 0..<noteFrequencies.count {
let distance = fabsf(Float(noteFrequencies[i]) - frequency)
if distance < minDistance {
index = i
minDistance = distance
}
print(minDistance, distance)
}
// let octave = Int(log2f(trackerFrequency / frequency))
frequencyLabel.text = String(format: "%0.1f", tracker.frequency)
if frequencyTranspose(note: notesToTanspose[index]) != droneLabel.text {
momentaneNote = frequencyTranspose(note: notesToTanspose[index])
droneLabel.text = momentaneNote
stopSinglePlayer()
DispatchQueue.main.asyncAfter(deadline: .now() + 0.03, execute: {
self.prepareSinglePlayerFirstForStart(note: self.momentaneNote)
self.startSinglePlayer()
})
}
}
}
func frequencyTranspose(note: String) -> String {
var indexNote = notesToTanspose.firstIndex(of: note)!
let chosenInstrument = UserDefaults.standard.object(forKey: "whichInstrument") as! String
if chosenInstrument == "Bb" {
if indexNote + 2 >= notesToTanspose.count {
indexNote -= 12
}
return notesToTanspose[indexNote + 2]
} else if chosenInstrument == "Eb" {
if indexNote - 3 < 0 {
indexNote += 12
}
return notesToTanspose[indexNote - 3]
} else {
return note
}
}
Appears that your implementation can be improved slightly by putting the multithreading principles of iOS into practice. Now, I'm not an expert in the subject, but if we look into the statement: "the sound which plays after the frequency detection cracks sometimes during playback".
I'd like to point out that the "frequency" of the "crack" is random or unpredictable and this happens during computation.
So, move code that doesn't need to be computed in the main thread to a background thread (https://developer.apple.com/documentation/DISPATCH)
While refactoring, you can test your implementation by increasing the frequency of calls to the callback computation of your Timer, so reduce the value to 0.05 for example. Which means that if you increase the frequency to, let's say 0.2, you'll probably hear less random crackles.
Now, this is easier said than done when considering concurrency but that's what you need to improve.

How can I make an iOS device play music programatically?

I'm trying to make my iphone play a tune without using prerecorded files. What are my options here? AVAudioEngine, AudioKit? I've looked at them, but the learning curve is relatively steep for something I'm hoping is easy. They also seem like tools for creating sound effect given a PCM buffer window.
I'd like to be able to do something like
pitchCreator.play(["C4", "E4", "G4"], durations: [1, 1, 1])
Preferrably sounding like an instrument or at least not like a pure sine wave.
EDIT: The below code has been replaced by AudioKit
To anyone wondering this; I did make it work (kind of) using code similar to the one below.
class PitchCreator {
var engine: AVAudioEngine
var player: AVAudioPlayerNode
var mixer: AVAudioMixerNode
var buffer: AVAudioPCMBuffer
init() {
engine = AVAudioEngine()
player = AVAudioPlayerNode()
mixer = engine.mainMixerNode;
buffer = AVAudioPCMBuffer(PCMFormat: player.outputFormatForBus(0), frameCapacity: 100)
buffer.frameLength = 4096
engine.attachNode(player)
engine.connect(player, to: mixer, format: player.outputFormatForBus(0))
}
func play(frequency: Float) {
let signal = self.createSignal(frequency, amplitudes: [1.0, 0.5, 0.3, 0.1], bufferSize: Int(buffer.frameLength), sampleRate: Float(mixer.outputFormatForBus(0).sampleRate))
for i in 0 ..< signal.count {
buffer.floatChannelData.memory[i] = 0.5 * signal[i]
}
do {
try engine.start()
player.play()
player.scheduleBuffer(buffer, atTime: nil, options: .Loops, completionHandler: nil)
} catch {}
}
func stop() {
engine.stop()
player.stop()
}
func createSignal(frequency: Float, amplitudes: [Float], bufferSize: Int, sampleRate: Float) -> [Float] {
let π = Float(M_PI)
let T = sampleRate / frequency
var x = [Float](count: bufferSize, repeatedValue: 0.0)
for k in 0 ..< x.count {
for h in 0 ..< amplitudes.count {
x[k] += amplitudes[h] * sin(2.0 * π * Float(h + 1) * Float(k) / T)
}
}
return x
}
}
But it doesn't sound good enough so I've gone with sampling the notes I need and just use AVAudioPlayer instead to play them.

How to lower Renderer Utilization?

I am having sever performance issues with a SceneKit game. Total vertices count 4000. Renderer Utilization is 100% on ipad3, framerate 16fps, even though Tiler utilization is only about 8%. This is screenshot from OPENGL ES Analysis.
This is the entire code that is used to specify what the object should look like. As you can see there is no custom shaders. Just a box that falls on the floor because of physics applied to it. Textures and 2 lights are applied in the .scn file. Everything is simple, basic but the performance is terrible. How do I lower Renderer Utilization on the device? I am guessing some default settings that sceneKit uses aren't suitable for my app and cause severe performance problems. Which one should I double check?
//called in viewDidLoad:
func setupScene(){
scene = SCNScene(named: "GameScene.scn")
sceneView.scene = scene
sceneView.delegate = self
scene.physicsWorld.contactDelegate = self
scene.physicsWorld.gravity = SCNVector3Make(0, 0, 0)
scene.physicsWorld.speed = 1.8
sceneView.jitteringEnabled = true
sceneView.autoenablesDefaultLighting = false
sceneView.antialiasingMode = SCNAntialiasingMode.None
let valueVector: NSValue = NSValue(SCNVector3: SCNVector3Make(0.7, 0.7, 0.7))
objectNode = scene.rootNode.childNodeWithName("object", recursively: true)!
let physicsShape = SCNPhysicsShape(node: objectNode, options: [SCNPhysicsShapeTypeKey:SCNPhysicsShapeTypeBoundingBox, SCNPhysicsShapeScaleKey:valueVector])
objectNode.physicsBody = SCNPhysicsBody(type: SCNPhysicsBodyType.Dynamic, shape: physicsShape)
let floorNode = scene.rootNode.childNodeWithName("floor", recursively: true)
floorNode?.physicsBody = SCNPhysicsBody(type: SCNPhysicsBodyType.Static, shape: nil)
objectNode.categoryBitMask = 1
floorNode?.categoryBitMask = 1
objectNode.physicsBody?.categoryBitMask = 1
floorNode?.physicsBody?.categoryBitMask = 1
objectNode.physicsBody?.collisionBitMask = 1
floorNode?.physicsBody?.collisionBitMask = 1
floorNode?.physicsBody?.restitution = 0.7
objectNode.physicsBody?.restitution = 0.7
}
func speed(velocity:SCNVector3) -> Float
{
let dx = Float(velocity.x)
let dy = Float(velocity.y)
let dz = Float(velocity.z)
return sqrtf(dx*dx+dy*dy+dz*dz)
}
func angularSpeed(angularSpeed: SCNVector4)->Float{
let x = angularSpeed.x
let y = angularSpeed.y
let z = angularSpeed.z
return sqrtf(x*x+y*y+z*z)
}
func nearlyAtRest(node:SCNNode) -> Bool
{
return speed((node.physicsBody?.velocity)!) < 0.05 && ( self.resting == false) && (angularSpeed((node.physicsBody?.angularVelocity)!) < 1)
}
// PHYSICS CONTACT DELEGATE DELEGATE METHOD
func renderer(renderer: SCNSceneRenderer, didSimulatePhysicsAtTime time: NSTimeInterval) {
if nearlyAtRest(objectNode) && speed((objectNode.physicsBody?.velocity)!) != 0 {
print("it stopped")
}

Resources