How to modify Engine's output on iOS AudioKit - ios

I want to choose different sound effects to listen to when playing music
this is my code:
class EffectConductor: ObservableObject, ProcessesPlayerInput {
var engine = AudioEngine()
var player = AudioPlayer()
// var dryWetMixer : DryWetMixer
var isPlaying = false
init(path:String) {
let mediaUrl = URL.init(fileURLWithPath: path)
let file = try! AVAudioFile(forReading: mediaUrl)
try! player.load(file: file, buffered: true)
engine.output = player
}
#Published var effectType: Int = 0 {
didSet {
var node : Node? = nil
switch effectType {
case 1:
node = AutoPanner(player)
case 2:
node = AutoWah(player)
case 3:
node = Compressor(player)
case 4:
node = DynamicRangeCompressor(player)
case 5:
node = Expander(player)
case 6:
node = Phaser(player)
case 7:
node = StringResonator(player)
default:
node = nil
}
if node==nil
{
print("effect nil")
engine.output = player
}else{
engine.output = DryWetMixer(player, node!)
}
}
}
}
when i call this code:
engine.start() player.start()
Can play music. but when i click a button and call this code:
effectType = 2(or ever value)
to change engine.output value
it's stop playing
i try this when i click a button
let progress = player.getCurrentTime()
if(isPlaying){
player.stop()
}
self.effectType = 3
if(isPlaying){
player.seek(time: progress)
player.play()
}
But there will be a pause in the playback process
how to solve this problem?
thanks everyone!

Related

AudioKit / AVAudioEngine play nodes in sequence

I have some audio playing with Audiokit / AVAudioEngine and I would like to be able to play one node after another without any gaps. Here is what I have so far.
var file:AKAudioFile!
var node:AKPlayer!
var node1:AKPlayer!
var mixer:AKMixer!
func playInSequence()
{
do
{
file = try AKAudioFile(readFileName: "Song.m4a", baseDir: .resources)
node = AKPlayer(audioFile: file)
node.startTime = 10
node.endTime = 20
node.buffering = .dynamic
node.prepare()
node.completionHandler =
{
print("node complete");
}
node1 = AKPlayer(audioFile: file)
node1.startTime = 20
node1.endTime = 30
node1.buffering = .dynamic
node1.prepare()
node1.completionHandler =
{
print("node1 complete");
}
mixer = AKMixer(node, node1)
AudioKit.output = mixer
try AudioKit.start()
let future = AVAudioTime.now()
node.play(at: future)
let future2 = AVAudioTime.now() + 10
node1.play(at: future2)
}
catch
{
}
}
Please forgive me if I'm doing it all wrong. I really don't know what I'm doing.

Clicks / Distortion in AudioKit

When I add a bunch (20-40) samples playing and overlapping eachother simultaneously sometimes it starts getting distorted and then some waving, oscillating, and clicking begins to happen. A similar sound happens when the samples are playing the the app crashes - sounds like an abrupt, crunchy halt.
Notice the waviness begins between 0:05 and 0:10; nasty clicks start around 0:15.
Listen Here
How can I make it smoother? I am spawning AKPlayer objects (from 4.1) that play 4-8 second .wav files. Those go into AKBoosters which go into AKMixers which go into the final AKMixer for output.
Edit:
Many PenAudioNodes get plugged into the mixer of the AudioReceiver singleton.
Here's my AudioReceiver singleton:
class AudioReceiver {
static var sharedInstance = AudioReceiver()
private var audioNodes = [UUID : AudioNode]()
private let mixer = AKMixer()
private let queue = DispatchQueue(label: "audio-queue")
//MARK: - Setup & Teardown
init() {
AudioKit.output = mixer //peakLimiter
AudioKit.start()
}
//MARK: - Public
func audioNodeBegan(_ message : AudioNodeMessage) {
queue.async {
var audioNode: AudioNode?
switch message.senderType {
case .pen:
audioNode = PenAudioNode()
case .home:
audioNode = LoopingAudioNode(with: AudioHelper.homeLoopFile())
default:
break
}
if let audioNode = audioNode {
self.audioNodes[message.senderId] = audioNode
self.mixer.connect(input: audioNode.output)
audioNode.start(message)
}
}
}
func audioNodeMoved(_ message : AudioNodeMessage) {
queue.async {
if let audioNode = self.audioNodes[message.senderId] {
audioNode.update(message)
}
}
}
func audioNodeEnded(_ message : AudioNodeMessage) {
queue.async {
if let audioNode = self.audioNodes[message.senderId] {
audioNode.stop(message)
}
self.audioNodes[message.senderId] = nil
}
}
}
Here's my PenAudioNode:
class PenAudioNode {
fileprivate var mixer: AKMixer?
fileprivate var playersBoosters = [AKPlayer : AKBooster]()
fileprivate var finalOutput: AKNode?
fileprivate let file: AKAudioFile = AudioHelper.randomBellSampleFile()
//MARK: - Setup & Teardown
init() {
mixer = AKMixer()
finalOutput = mixer!
}
}
extension PenAudioNode: AudioNode {
var output: AKNode {
return finalOutput!
}
func start(_ message: AudioNodeMessage) {
}
func update(_ message: AudioNodeMessage) {
if let velocity = message.velocity {
let newVolume = Swift.min((velocity / 50) + 0.1, 1)
mixer!.volume = newVolume
}
if let isClimactic = message.isClimactic, isClimactic {
let player = AKPlayer(audioFile: file)
player.completionHandler = { [weak self] in
self?.playerCompleted(player)
}
let booster = AKBooster(player)
playersBoosters[player] = booster
booster.rampTime = 1
booster.gain = 0
mixer!.connect(input: booster)
player.play()
booster.gain = 1
}
}
func stop(_ message: AudioNodeMessage) {
for (_, booster) in playersBoosters {
booster.gain = 0
}
DispatchQueue.global().asyncAfter(deadline: DispatchTime.now() + 1) {
self.mixer!.stop()
self.output.disconnectOutput()
}
}
private func playerCompleted(_ player: AKPlayer) {
playersBoosters.removeValue(forKey: player)
}
}
This sounds like you are not releasing objects and you are eventually overloading the audio engine with too many instances of processing nodes connected in the graph. In particular not releasing AKBoosters will cause an issue like this. I can't really tell what your code is doing, but if you are spawning objects without releasing them properly, it will lead to garbled audio.
You want to conserve objects as much as possible and make sure you are using the absolute minimum amount of AKNode based processing.
There are various ways to debug this, but you can start by printing out the current state of the AVAudioEngine:
AudioKit.engine.description
That will show how many nodes you have connected in the graph at any given moment.

Allowing background audio with Swift not working

I want to allow background audio while the app is not in focus. I currently have this code, which should allow that:
do {
try AKSettings.setSession(category: .playback, with: .mixWithOthers)
} catch {
print("error")
}
AKSettings.playbackWhileMuted = true
I also have the setting 'Audio, Airplay and Picture in Picture' enabled in capabilities settings. However, when I press the home button on my device the audio doesn't keep playing. What am I doing wrong? I am using AudioKit to produce sounds if that matters.
I am using a singleton to house all of the AudioKit components which I named AudioPlayer.swift. Here is what I have in my AudioPlayer.swift singleton file:
class AudioPlayer: NSObject {
var currentFrequency = String()
var soundIsPlaying = false
var leftOscillator = AKOscillator()
var rightOscillator = AKOscillator()
var rain = try! AKAudioFile()
var rainPlayer: AKAudioPlayer!
var envelope = AKAmplitudeEnvelope()
override init() {
super.init()
do {
try AKSettings.setSession(category: .playback, with: .mixWithOthers)
} catch {
print("error")
}
AKSettings.playbackWhileMuted = true
AudioKit.output = envelope
AudioKit.start()
}
func setupFrequency(left: AKOscillator, right: AKOscillator, frequency: String) {
currentFrequency = frequency
leftOscillator = left
rightOscillator = right
let leftPanner = AKPanner(leftOscillator)
leftPanner.pan = -1
let rightPanner = AKPanner(rightOscillator)
rightPanner.pan = 1
//Set up rain and rainPlayer
do {
rain = try AKAudioFile(readFileName: "rain.wav")
rainPlayer = try AKAudioPlayer(file: rain, looping: true, deferBuffering: false, completionHandler: nil)
} catch { print(error) }
let mixer = AKMixer(leftPanner, rightPanner, rainPlayer)
//Put mixer in sound envelope
envelope = AKAmplitudeEnvelope(mixer)
envelope.attackDuration = 2.0
envelope.decayDuration = 0
envelope.sustainLevel = 1
envelope.releaseDuration = 0.2
//Start AudioKit stuff
AudioKit.output = envelope
AudioKit.start()
leftOscillator.start()
rightOscillator.start()
rainPlayer.start()
envelope.start()
soundIsPlaying = true
}
}
And here is an example of one of my sound effect view controllers, which reference the AudioKit singleton to send it a certain frequency (I have about a dozen of these view controllers, each with its own frequency settings):
class CalmView: UIViewController {
let leftOscillator = AKOscillator()
let rightOscillator = AKOscillator()
override func viewDidLoad() {
super.viewDidLoad()
leftOscillator.amplitude = 0.3
leftOscillator.frequency = 220
rightOscillator.amplitude = 0.3
rightOscillator.frequency = 230
}
#IBAction func playSound(_ sender: Any) {
if shared.soundIsPlaying == false {
AudioKit.stop()
shared.setupFrequency(left: leftOscillator, right: rightOscillator, frequency: "Calm")
} else if shared.soundIsPlaying == true && shared.currentFrequency != "Calm" {
AudioKit.stop()
shared.leftOscillator.stop()
shared.rightOscillator.stop()
shared.rainPlayer.stop()
shared.envelope.stop()
shared.setupFrequency(left: leftOscillator, right: rightOscillator, frequency: "Calm")
} else {
shared.soundIsPlaying = false
shared.envelope.stop()
}
}
}
I instantiated the AudioPlayer singleton in my ViewController.swift file.
It depends on when you are doing your configuration in relation to when AudioKit is started. If you're using AudioKit you should be using its AKSettings to manage your session category. Basically not only the playback category but also mixWithOthers. By default, does this:
/// Set the audio session type
#objc open static func setSession(category: SessionCategory,
with options: AVAudioSessionCategoryOptions = [.mixWithOthers]) throws {
So you'd do something like this in your ViewController:
do {
if #available(iOS 10.0, *) {
try AKSettings.setSession(category: .playAndRecord, with: [.defaultToSpeaker, .allowBluetooth, .allowBluetoothA2DP])
} else {
// Fallback on earlier versions
}
} catch {
print("Errored setting category.")
}
So I think its a matter of getting that straight. It might also help to have inter-app audio set up. If you still have trouble and provide more information, I can help more, but this is as good an answer as I can muster based on the info you've given so far.

How to loop AVPlayer from 4 second to 8 second in swift 3?

I have an AVPlayer in swift 3 that plays video - the problem is that I want to use loop from A to B seconds (for example from 4 to 8 second)here is my codes for loop but didn't work
NotificationCenter.default.addObserver(forName: .AVPlayerItemDidPlayToEndTime, object: self.Player.currentItem, queue: nil, using: { (_) in
DispatchQueue.main.async {
self.Player.seek(to: kCMTimeZero)
self.Player.play()
DispatchQueue.main.asyncAfter(deadline: DispatchTime.now() + 4.0) {
// check if player is still playing
if self.Player.rate != 0 {
print("OK")
print("Player reached 4.0 seconds")
let timeScale = self.Player.currentItem?.asset.duration.timescale;
// let seconds = kCMTimeZero
let time = CMTimeMakeWithSeconds( 8.0 , timeScale!)
self.Player.seek(to: time, toleranceBefore: kCMTimeZero, toleranceAfter: kCMTimeZero)
self.Player.play()
}
}
}
})
the problem is that this loop doesn't work and because of AVPlayerItemDidPlayToEndTime the print("OK") won't work until the player has finished the movie
There are a few options:
If you want gapless playback, you can start off by using:
Pre iOS 10: https://developer.apple.com/library/content/samplecode/avloopplayer/Introduction/Intro.html
iOS 10+:
https://developer.apple.com/documentation/avfoundation/avplayerlooper
The pre-ios10 "solution" from apple does work, and is the only way I have gotten gapless looping since I target ios9.
If you are using that solution, you also need to either feed it an avplayeritem the right length or add to the solution to cut it up as you send it to the player.
For that, you can do something like how I changed apples code (sorry if its a bit sparse - just trying to show the main changes) - Basically adding in sending the track and the chunk of time to use, then make that an AVMutableCompositionTrack (I got rid of all the stuff for video - you will want to keep that in) :
class myClass: someClass {
var loopPlayer:QueuePlayerLooper!
var avAssetLength:Int64!
var avAssetTimescale:CMTimeScale!
var avAssetTimeRange:CMTimeRange!
let composition = AVMutableComposition()
var playerItem:AVPlayerItem!
var avAssetrack:AVAssetTrack!
var compAudioTrack:AVMutableCompositionTrack!
var uurl:URL!
var avAsset:AVURLAsset!
func createCMTimeRange(start:TimeInterval, end:TimeInterval) -> CMTimeRange {
avAssetTimescale = avAssetTrack.naturalTimeScale
let a:CMTime = CMTime(seconds: start, preferredTimescale: avAssetTimescale)
let b:CMTime = CMTime(seconds: end, preferredTimescale: avAssetTimescale)
return CMTimeRange(start: a, end: b)
}
func startLoopingSection() {
loopPlayer = QueuePlayerLooper(audioURL: uurl, loopCount: -1, timeRange: createCMTimeRange(start: a_playbackPosition, end: b_playbackPosition))
loopPlayer.start()
}
}
//--==--==--==--==--==--==--==--==--
/*
Copyright (C) 2016 Apple Inc. All Rights Reserved.
See LICENSE.txt for this sample’s licensing information
Abstract:
An object that uses AVQueuePlayer to loop a video.
*/
// Marked changed code with ++
class QueuePlayerLooper : NSObject, Looper {
// MARK: Types
private struct ObserverContexts {
static var playerStatus = 0
static var playerStatusKey = "status"
static var currentItem = 0
static var currentItemKey = "currentItem"
static var currentItemStatus = 0
static var currentItemStatusKey = "currentItem.status"
static var urlAssetDurationKey = "duration"
static var urlAssetPlayableKey = "playable"
}
// MARK: Properties
private var player: AVQueuePlayer?
private var playerLayer: AVPlayerLayer?
private var isObserving = false
private var numberOfTimesPlayed = 0
private let numberOfTimesToPlay: Int
private let videoURL: URL
++var assetTimeRange:CMTimeRange!
++let composition = AVMutableComposition()
++var currentTrack:AVAssetTrack!
++var assetTimeRange:CMTimeRange!
// MARK: Looper
required init(videoURL: URL, loopCount: Int, ++timeRange:CMTimeRange) {
self.videoURL = videoURL
self.numberOfTimesToPlay = loopCount
++self.assetTimeRange = timeRange
super.init()
super.init()
}
func start(in parentLayer: CALayer) {
stop()
player = AVQueuePlayer()
playerLayer = AVPlayerLayer(player: player)
guard let playerLayer = playerLayer else { fatalError("Error creating player layer") }
playerLayer.frame = parentLayer.bounds
parentLayer.addSublayer(playerLayer)
let videoAsset = AVURLAsset(url: videoURL)
++currentTrack = composition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
++currentTrack = videoAsset.tracks(withMediaType: AVMediaTypeVideo)
++try! compositionTrack.insertTimeRange(assetTimeRange, of: currentTrack, at: CMTimeMake(0, 1))
videoAsset.loadValuesAsynchronously(forKeys: [ObserverContexts.urlAssetDurationKey, ObserverContexts.urlAssetPlayableKey]) {
/*
The asset invokes its completion handler on an arbitrary queue
when loading is complete. Because we want to access our AVQueuePlayer
in our ensuing set-up, we must dispatch our handler to the main
queue.
*/
DispatchQueue.main.async(execute: {
var durationError: NSError?
let durationStatus = videoAsset.statusOfValue(forKey: ObserverContexts.urlAssetDurationKey, error: &durationError)
guard durationStatus == .loaded else { fatalError("Failed to load duration property with error: \(durationError)") }
var playableError: NSError?
let playableStatus = videoAsset.statusOfValue(forKey: ObserverContexts.urlAssetPlayableKey, error: &playableError)
guard playableStatus == .loaded else { fatalError("Failed to read playable duration property with error: \(playableError)") }
guard videoAsset.isPlayable else {
print("Can't loop since asset is not playable")
return
}
guard CMTimeCompare(videoAsset.duration, CMTime(value:1, timescale:100)) >= 0 else {
print("Can't loop since asset duration too short. Duration is(\(CMTimeGetSeconds(videoAsset.duration)) seconds")
return
}
/*
Based on the duration of the asset, we decide the number of player
items to add to demonstrate gapless playback of the same asset.
*/
let numberOfPlayerItems = (Int)(1.0 / CMTimeGetSeconds(videoAsset.duration)) + 2
for _ in 1...numberOfPlayerItems {
let loopItem = AVPlayerItem(asset: ++self.composition)
self.player?.insert(loopItem, after: nil)
}
self.startObserving()
self.numberOfTimesPlayed = 0
self.player?.play()
})
}
}
}}
You can add periodic time observer to monitor current time
let timeObserverToken = player.addPeriodicTimeObserver(forInterval: someInterval, queue: DispatchQueue.main) { [unowned self] time in
let seconds = CMTimeGetSeconds(cmTime)
if seconds >= 8.0 {
// jump back to 4 seconds
// do stuff
}
}

AudioServicesPlaySystemSound sound volume dependency

I'm making iOS keyboard extension.
To play click sound, I used AudioServicesPlaySystemSound.
However some users reported that the click sound sometimes depends on 'bell sound volume(bell icon)' and sometimes 'normal sound volume(speaker icon)'
I tested on Apple memo app and found there are cases about inconstant dependency.
Here is my code to init
func initTypeSound(soundIndex: Int) {
let bundle = NSBundle.mainBundle()
for var i = 0 ; i < MAX_TYPE_SOUND_COUNT ; i++ {
if let url = bundle.URLForResource(NSString(format: "type%d_%d", soundIndex, i) as String, withExtension: "wav") {
// file exist
var soundID : SystemSoundID = 0
AudioServicesCreateSystemSoundID(url, &soundID)
mTypeSoundIDs.insert(soundID, atIndex: i)
} else {
// no file
}
}
}
and code to play
func play(soundType: KKSoundType) {
if (!mHasPermission || !mIsSound) {
return
}
var session = AVAudioSession.sharedInstance()
let systemVolumn = session.outputVolume
if (systemVolumn == 0) {
return
}
var soundId : SystemSoundID!
switch (soundType) {
case .Type:
let rand = Int(arc4random_uniform(UInt32(mTypeSoundIDs.count)));
soundId = mTypeSoundIDs[rand];
break
case .Space:
soundId = mSpaceSoundID;
break
default:
return
}
if mIsSound {
AudioServicesPlaySystemSound(soundId)
}
}

Resources