Here is the method inside a class:
import UIKIt
import Foundation
class notMoving {
var drumPlayerObject = drumPlayer()
var fileManagerObject = fileManager1()
let drumStrength = 1
var bassStrength = 1
var synthStrength = 1
var indexToPlay: Int = 0
// here we start the drum player.
func startToPlay() {
fileManagerObject.clearPlayedListDrum(drumStrength, KeyNoteOfInstDrum: "C")
if let indexToPlay = fileManager1().randomizeTheNextInstrument(fileManager1().drums, Strength: drumStrength, KeyNote: "C") {
fileManager1().drums[indexToPlay].4 = true
self.indexToPlay = indexToPlay
}
let instrument = fileManager1().drums[self.indexToPlay].0
let name = fileManager1().drums[self.indexToPlay].1
let length = fileManager1().drums[self.indexToPlay].2
let power = fileManager1().drums[self.indexToPlay].3
let ifplayed = fileManager1().drums[self.indexToPlay].4
let tempo = Double(fileManager1().drums[self.indexToPlay].5)
let bridge: Bool = false
let extention = fileManagerObject.extentionOfFile
let loops = fileManager1().drumNumberOfLoops()
drumPlayerObject.play(instrument, name: name, extentionOfFile: extention,
length: length, power: power, ifplayed: ifplayed, tempo: tempo, loops:
loops, bridge: bridge)
fileManager1().clearPlayedListDrum(drumStrength, KeyNoteOfInstDrum: "C")
}
}
And here is AVAudioPlayerDelegate extension for a drumPlayer class.
extension drumPlayer : AVAudioPlayerDelegate {
func audioPlayerDidFinishPlaying(player: AVAudioPlayer!, successfully flag: Bool) {
println("finished playing \(flag)")
var notMovingObject = notMoving()
notMovingObject.startToPlay()
}
func audioPlayerDecodeErrorDidOccur(player: AVAudioPlayer!, error: NSError!) {
println("\(error.localizedDescription)")
}
}
But audioPlayerDidFinishPlaying doesn't call the startToPlay method after the file is finished.It just only prints "finished playing true"
What I'm doing wrong?
Your notMovingObject is not retained anywhere, thus after program exits audioPlayerDidFinishPlaying() object is deallocated.
I found a solution by creating an instance self.notMovingObject = NotMoving() inside the "func play" of my player. So the instance is created only after the player starts.
Related
When I add a bunch (20-40) samples playing and overlapping eachother simultaneously sometimes it starts getting distorted and then some waving, oscillating, and clicking begins to happen. A similar sound happens when the samples are playing the the app crashes - sounds like an abrupt, crunchy halt.
Notice the waviness begins between 0:05 and 0:10; nasty clicks start around 0:15.
Listen Here
How can I make it smoother? I am spawning AKPlayer objects (from 4.1) that play 4-8 second .wav files. Those go into AKBoosters which go into AKMixers which go into the final AKMixer for output.
Edit:
Many PenAudioNodes get plugged into the mixer of the AudioReceiver singleton.
Here's my AudioReceiver singleton:
class AudioReceiver {
static var sharedInstance = AudioReceiver()
private var audioNodes = [UUID : AudioNode]()
private let mixer = AKMixer()
private let queue = DispatchQueue(label: "audio-queue")
//MARK: - Setup & Teardown
init() {
AudioKit.output = mixer //peakLimiter
AudioKit.start()
}
//MARK: - Public
func audioNodeBegan(_ message : AudioNodeMessage) {
queue.async {
var audioNode: AudioNode?
switch message.senderType {
case .pen:
audioNode = PenAudioNode()
case .home:
audioNode = LoopingAudioNode(with: AudioHelper.homeLoopFile())
default:
break
}
if let audioNode = audioNode {
self.audioNodes[message.senderId] = audioNode
self.mixer.connect(input: audioNode.output)
audioNode.start(message)
}
}
}
func audioNodeMoved(_ message : AudioNodeMessage) {
queue.async {
if let audioNode = self.audioNodes[message.senderId] {
audioNode.update(message)
}
}
}
func audioNodeEnded(_ message : AudioNodeMessage) {
queue.async {
if let audioNode = self.audioNodes[message.senderId] {
audioNode.stop(message)
}
self.audioNodes[message.senderId] = nil
}
}
}
Here's my PenAudioNode:
class PenAudioNode {
fileprivate var mixer: AKMixer?
fileprivate var playersBoosters = [AKPlayer : AKBooster]()
fileprivate var finalOutput: AKNode?
fileprivate let file: AKAudioFile = AudioHelper.randomBellSampleFile()
//MARK: - Setup & Teardown
init() {
mixer = AKMixer()
finalOutput = mixer!
}
}
extension PenAudioNode: AudioNode {
var output: AKNode {
return finalOutput!
}
func start(_ message: AudioNodeMessage) {
}
func update(_ message: AudioNodeMessage) {
if let velocity = message.velocity {
let newVolume = Swift.min((velocity / 50) + 0.1, 1)
mixer!.volume = newVolume
}
if let isClimactic = message.isClimactic, isClimactic {
let player = AKPlayer(audioFile: file)
player.completionHandler = { [weak self] in
self?.playerCompleted(player)
}
let booster = AKBooster(player)
playersBoosters[player] = booster
booster.rampTime = 1
booster.gain = 0
mixer!.connect(input: booster)
player.play()
booster.gain = 1
}
}
func stop(_ message: AudioNodeMessage) {
for (_, booster) in playersBoosters {
booster.gain = 0
}
DispatchQueue.global().asyncAfter(deadline: DispatchTime.now() + 1) {
self.mixer!.stop()
self.output.disconnectOutput()
}
}
private func playerCompleted(_ player: AKPlayer) {
playersBoosters.removeValue(forKey: player)
}
}
This sounds like you are not releasing objects and you are eventually overloading the audio engine with too many instances of processing nodes connected in the graph. In particular not releasing AKBoosters will cause an issue like this. I can't really tell what your code is doing, but if you are spawning objects without releasing them properly, it will lead to garbled audio.
You want to conserve objects as much as possible and make sure you are using the absolute minimum amount of AKNode based processing.
There are various ways to debug this, but you can start by printing out the current state of the AVAudioEngine:
AudioKit.engine.description
That will show how many nodes you have connected in the graph at any given moment.
I want to allow background audio while the app is not in focus. I currently have this code, which should allow that:
do {
try AKSettings.setSession(category: .playback, with: .mixWithOthers)
} catch {
print("error")
}
AKSettings.playbackWhileMuted = true
I also have the setting 'Audio, Airplay and Picture in Picture' enabled in capabilities settings. However, when I press the home button on my device the audio doesn't keep playing. What am I doing wrong? I am using AudioKit to produce sounds if that matters.
I am using a singleton to house all of the AudioKit components which I named AudioPlayer.swift. Here is what I have in my AudioPlayer.swift singleton file:
class AudioPlayer: NSObject {
var currentFrequency = String()
var soundIsPlaying = false
var leftOscillator = AKOscillator()
var rightOscillator = AKOscillator()
var rain = try! AKAudioFile()
var rainPlayer: AKAudioPlayer!
var envelope = AKAmplitudeEnvelope()
override init() {
super.init()
do {
try AKSettings.setSession(category: .playback, with: .mixWithOthers)
} catch {
print("error")
}
AKSettings.playbackWhileMuted = true
AudioKit.output = envelope
AudioKit.start()
}
func setupFrequency(left: AKOscillator, right: AKOscillator, frequency: String) {
currentFrequency = frequency
leftOscillator = left
rightOscillator = right
let leftPanner = AKPanner(leftOscillator)
leftPanner.pan = -1
let rightPanner = AKPanner(rightOscillator)
rightPanner.pan = 1
//Set up rain and rainPlayer
do {
rain = try AKAudioFile(readFileName: "rain.wav")
rainPlayer = try AKAudioPlayer(file: rain, looping: true, deferBuffering: false, completionHandler: nil)
} catch { print(error) }
let mixer = AKMixer(leftPanner, rightPanner, rainPlayer)
//Put mixer in sound envelope
envelope = AKAmplitudeEnvelope(mixer)
envelope.attackDuration = 2.0
envelope.decayDuration = 0
envelope.sustainLevel = 1
envelope.releaseDuration = 0.2
//Start AudioKit stuff
AudioKit.output = envelope
AudioKit.start()
leftOscillator.start()
rightOscillator.start()
rainPlayer.start()
envelope.start()
soundIsPlaying = true
}
}
And here is an example of one of my sound effect view controllers, which reference the AudioKit singleton to send it a certain frequency (I have about a dozen of these view controllers, each with its own frequency settings):
class CalmView: UIViewController {
let leftOscillator = AKOscillator()
let rightOscillator = AKOscillator()
override func viewDidLoad() {
super.viewDidLoad()
leftOscillator.amplitude = 0.3
leftOscillator.frequency = 220
rightOscillator.amplitude = 0.3
rightOscillator.frequency = 230
}
#IBAction func playSound(_ sender: Any) {
if shared.soundIsPlaying == false {
AudioKit.stop()
shared.setupFrequency(left: leftOscillator, right: rightOscillator, frequency: "Calm")
} else if shared.soundIsPlaying == true && shared.currentFrequency != "Calm" {
AudioKit.stop()
shared.leftOscillator.stop()
shared.rightOscillator.stop()
shared.rainPlayer.stop()
shared.envelope.stop()
shared.setupFrequency(left: leftOscillator, right: rightOscillator, frequency: "Calm")
} else {
shared.soundIsPlaying = false
shared.envelope.stop()
}
}
}
I instantiated the AudioPlayer singleton in my ViewController.swift file.
It depends on when you are doing your configuration in relation to when AudioKit is started. If you're using AudioKit you should be using its AKSettings to manage your session category. Basically not only the playback category but also mixWithOthers. By default, does this:
/// Set the audio session type
#objc open static func setSession(category: SessionCategory,
with options: AVAudioSessionCategoryOptions = [.mixWithOthers]) throws {
So you'd do something like this in your ViewController:
do {
if #available(iOS 10.0, *) {
try AKSettings.setSession(category: .playAndRecord, with: [.defaultToSpeaker, .allowBluetooth, .allowBluetoothA2DP])
} else {
// Fallback on earlier versions
}
} catch {
print("Errored setting category.")
}
So I think its a matter of getting that straight. It might also help to have inter-app audio set up. If you still have trouble and provide more information, I can help more, but this is as good an answer as I can muster based on the info you've given so far.
I'm creating an app which needs an offline map. I'm testing with MapBox, which supports offline maps since today (yay!). The code I have now seems to work for downloading the map, but the delegate to report on progress never triggers, and I don't have a clue why this is.
I have this class for my mapView:
import UIKit
import Mapbox
class MapController: UIViewController, MGLMapViewDelegate, UIPopoverPresentationControllerDelegate {
#IBOutlet var mapView: MGLMapView!
override func viewDidLoad() {
super.viewDidLoad()
downloadIfNeeded()
mapView.maximumZoomLevel = 18
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
}
func downloadIfNeeded() {
MGLOfflineStorage.sharedOfflineStorage().getPacksWithCompletionHandler { (packs, error) in guard error == nil else {
return
}
for pack in packs {
let userInfo = NSKeyedUnarchiver.unarchiveObjectWithData(pack.context) as! [String: String]
if userInfo["name"] == "London" {
// allready downloaded
return
}
}
// define the download region
let sw = CLLocationCoordinate2DMake(51.212120, 4.415906)
let ne = CLLocationCoordinate2DMake(51.223781, 4.442401)
let bounds = MGLCoordinateBounds(sw: sw, ne: ne)
let region = MGLTilePyramidOfflineRegion(styleURL: MGLStyle.streetsStyleURL(), bounds: bounds, fromZoomLevel: 10, toZoomLevel: 12)
let userInfo = ["name": "London"]
let context = NSKeyedArchiver.archivedDataWithRootObject(userInfo)
MGLOfflineStorage.sharedOfflineStorage().addPackForRegion(region, withContext: context) { (pack, error) in
guard error == nil else {
return
}
// create popup window with delegate
let storyboard : UIStoryboard = UIStoryboard(name: "Main", bundle: nil)
let downloadProgress: MapDownloadController = storyboard.instantiateViewControllerWithIdentifier("MapDownloadController") as! MapDownloadController
downloadProgress.modalPresentationStyle = .Popover
downloadProgress.preferredContentSize = CGSizeMake(300, 150)
let popoverMapDownloadController = downloadProgress.popoverPresentationController
popoverMapDownloadController?.permittedArrowDirections = .Any
popoverMapDownloadController?.delegate = self
popoverMapDownloadController?.sourceView = self.mapView
popoverMapDownloadController?.sourceRect = CGRect(x: self.mapView.frame.midX, y: self.mapView.frame.midY, width: 1, height: 1)
self.presentViewController(downloadProgress, animated: true, completion: nil)
// set popup as delegate <----
pack!.delegate = downloadProgress
// start downloading
pack!.resume()
}
}
}
}
And the MapDownloadController is a View which is displayed as popup (see code above) and has the MGLOfflinePackDelegate:
import UIKit
import Mapbox
class MapDownloadController: UIViewController, MGLOfflinePackDelegate {
#IBOutlet var progress: UIProgressView!
#IBOutlet var progressText: UILabel!
override func viewDidLoad() {
super.viewDidLoad()
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
}
func offlinePack(pack: MGLOfflinePack, progressDidChange progress: MGLOfflinePackProgress) {
// this function is never called, but why? <----
let completed = progress.countOfResourcesCompleted
let expected = progress.countOfResourcesExpected
let bytes = progress.countOfBytesCompleted
let MB = bytes / 1024
let str: String = "\(completed)/\(expected) voltooid (\(MB)MB)"
progressText.text = str
self.progress.setProgress(Float(completed) / Float(expected), animated: true)
}
func offlinePack(pack: MGLOfflinePack, didReceiveError error: NSError) {
// neither is this one... <----
let userInfo = NSKeyedUnarchiver.unarchiveObjectWithData(pack.context) as! [String: String]
let strError = error.localizedFailureReason
}
func offlinePack(pack: MGLOfflinePack, didReceiveMaximumAllowedMapboxTiles maximumCount: UInt64) {
// .. or this one <----
let userInfo = NSKeyedUnarchiver.unarchiveObjectWithData(pack.context) as! [String: String]
}
}
This is all pretty much taken from the documentation, so why are the delegate's functions (func offlinePack) never called? I did test with breakpoints so i am sure it is not. Still, the popup is shown and the region gets downloaded. (Checked with observing network traffic and with other code which lists the offline packs.)
Here’s an extremely simple implementation of Minh’s answer, using the current v3.2.0b1 example code. Expect this answer to become outdated quickly, as we’re still working on the v3.2.0 release.
import UIKit
import Mapbox
class ViewController: UIViewController, UIPopoverPresentationControllerDelegate, MGLOfflinePackDelegate {
#IBOutlet var mapView: MGLMapView!
// Array of offline packs for the delegate work around (and your UI, potentially)
var offlinePacks = [MGLOfflinePack]()
override func viewDidLoad() {
super.viewDidLoad()
mapView.maximumZoomLevel = 2
downloadOffline()
}
func downloadOffline() {
// Create a region that includes the current viewport and any tiles needed to view it when zoomed further in.
let region = MGLTilePyramidOfflineRegion(styleURL: mapView.styleURL, bounds: mapView.visibleCoordinateBounds, fromZoomLevel: mapView.zoomLevel, toZoomLevel: mapView.maximumZoomLevel)
// Store some data for identification purposes alongside the downloaded resources.
let userInfo = ["name": "My Offline Pack"]
let context = NSKeyedArchiver.archivedDataWithRootObject(userInfo)
// Create and register an offline pack with the shared offline storage object.
MGLOfflineStorage.sharedOfflineStorage().addPackForRegion(region, withContext: context) { (pack, error) in
guard error == nil else {
print("The pack couldn’t be created for some reason.")
return
}
// Set the pack’s delegate (assuming self conforms to the MGLOfflinePackDelegate protocol).
pack!.delegate = self
// Start downloading.
pack!.resume()
// Retain reference to pack to work around it being lost and not sending delegate messages
self.offlinePacks.append(pack!)
}
}
func offlinePack(pack: MGLOfflinePack, progressDidChange progress: MGLOfflinePackProgress) {
let userInfo = NSKeyedUnarchiver.unarchiveObjectWithData(pack.context) as! [String: String]
let completed = progress.countOfResourcesCompleted
let expected = progress.countOfResourcesExpected
print("Offline pack “\(userInfo["name"])” has downloaded \(completed) of \(expected) resources.")
}
func offlinePack(pack: MGLOfflinePack, didReceiveError error: NSError) {
let userInfo = NSKeyedUnarchiver.unarchiveObjectWithData(pack.context) as! [String: String]
print("Offline pack “\(userInfo["name"])” received error: \(error.localizedFailureReason)")
}
func offlinePack(pack: MGLOfflinePack, didReceiveMaximumAllowedMapboxTiles maximumCount: UInt64) {
let userInfo = NSKeyedUnarchiver.unarchiveObjectWithData(pack.context) as! [String: String]
print("Offline pack “\(userInfo["name"])” reached limit of \(maximumCount) tiles.")
}
}
(Cross-posted from this GitHub issue.)
This is a bug in the SDK. The workaround is for the completion handler to assign the passed-in MGLOfflinePack object to an ivar or other strong reference in the surrounding MapDownloadController class (example).
I'm using script (below), to use as a countdown for my game start, the script I've used is from Gourav Nayyar's YouTube video and works great for the first time it is called. However once the game goes through the reset process and the script is called again I only see 5 rather than 5 - 4 - 3 - 2 - 1 - GO!. If I remove one of the cals from my script then it works fine either in the reset func or when gameScene loads.
Here is the two calls in GameScene.swift
override func didMoveToView(view: SKView) {
var gamelaunchTimerView:TimerView = TimerView.loadingCountDownTimerViewInView(self.view!)
gamelaunchTimerView.startTimer()
func resetScene() {
//code removed from here
return countdown()
}
func countdown() {
var gamelaunchTimerView:TimerView = TimerView.loadingCountDownTimerViewInView(self.view!)
gamelaunchTimerView.startTimer()
}
Here is the Timer Code in GameLaunchTimer.swift as this is set up the countdown only works when first called and hangs on the second call.
//
// TimerView.swift
// GameLaunchTimer
//
// Created by Gourav Nayyar on 7/3/14.
// Copyright (c) 2014 Gourav Nayyar. All rights reserved.
//
let VIEW_ALPHA:CGFloat = 0.5
let TIMERVIEW_RADIUS:CGFloat = 50
let TIMER_LABEL_INITIAL_VAL:Int = 5
let BORDER_WIDTH:CGFloat = 2
var timerVal:Int = TIMER_LABEL_INITIAL_VAL;
var timer:NSTimer!
import UIKit
import QuartzCore
class TimerView :UIView {
struct Stored {
static var timerLbl:UILabel!
}
class func loadingCountDownTimerViewInView (_superView:UIView)-> TimerView
{
var timerView:TimerView = TimerView(frame:_superView.frame)
// timerView.backgroundColor = UIColor.blackColor().colorWithAlphaComponent(VIEW_ALPHA)
_superView.addSubview(timerView)
/* add a custom Circle view */
let refFrame:CGRect = CGRectMake(_superView.center.x-TIMERVIEW_RADIUS, _superView.center.y-TIMERVIEW_RADIUS, 2*TIMERVIEW_RADIUS, 2*TIMERVIEW_RADIUS)
var circleView:UIView = UIView(frame:refFrame)
circleView.layer.cornerRadius = TIMERVIEW_RADIUS
circleView.layer.borderColor = UIColor.whiteColor().CGColor
circleView.layer.borderWidth = BORDER_WIDTH
/* add a custom Label */
Stored.timerLbl = UILabel(frame:circleView.bounds)
Stored.timerLbl.text = "\(TIMER_LABEL_INITIAL_VAL)"
Stored.timerLbl.textColor = UIColor.whiteColor()
Stored.timerLbl.font = UIFont(name: "MarkerFelt-Thin", size: 40)
Stored.timerLbl.textAlignment = NSTextAlignment.Center
circleView.addSubview(Stored.timerLbl)
timerView.addSubview(circleView)
return timerView
}
func startTimer()
{
timer = NSTimer.scheduledTimerWithTimeInterval(1.0
, target: self, selector: Selector("updateTimer:"), userInfo: nil, repeats: true)
}
func updateTimer(dt:NSTimer)
{
timerVal--
if timerVal==0{
Stored.timerLbl.text = "GO!"
}else if timerVal<0{
timer.invalidate()
removeCountDownTimerView()
} else{
Stored.timerLbl.text = "\(timerVal)"
}
}
func removeCountDownTimerView()
{
var mySuperView:UIView = self.superview!
mySuperView.userInteractionEnabled = true
super.removeFromSuperview()
}
}
Define your variables under the class body;
import UIKit
import QuartzCore
class TimerView :UIView {
let VIEW_ALPHA:CGFloat = 0.5
let TIMERVIEW_RADIUS:CGFloat = 50
let TIMER_LABEL_INITIAL_VAL:Int = 5
let BORDER_WIDTH:CGFloat = 2
var timerVal:Int = TIMER_LABEL_INITIAL_VAL;
var timer:NSTimer!
... // other code
or, may be
let VIEW_ALPHA:CGFloat = 0.5
let TIMERVIEW_RADIUS:CGFloat = 50
let TIMER_LABEL_INITIAL_VAL:Int = 5
let BORDER_WIDTH:CGFloat = 2
import UIKit
import QuartzCore
class TimerView :UIView {
var timerVal:Int = TIMER_LABEL_INITIAL_VAL;
var timer:NSTimer!
... //other code
Assign nil to the timer after invalidating it: even though you are invalidating, the object state is still kept, resulting on states conflicts when creating a new instance of the timer, once that it runs in a different thread.
Here is my playground code:
import AVFoundation
var speechsynth: AVSpeechSynthesizer = AVSpeechSynthesizer()
let wordsToSpeak = ["word one","word two","word three","word four"]
let endTime = NSDate().dateByAddingTimeInterval(10)
while endTime.timeIntervalSinceNow > 0 {
//workaround for iOS8 Bug
var beforeSpeechString : String = " "
var beforeSpeech:AVSpeechUtterance = AVSpeechUtterance(string: beforeSpeechString)
speechsynth.speakUtterance(beforeSpeech)
//realstring to speak
var speechString: String = wordsToSpeak[0]
var nextSpeech:AVSpeechUtterance = AVSpeechUtterance(string: speechString)
nextSpeech.voice = AVSpeechSynthesisVoice(language: "en-US")
nextSpeech.rate = AVSpeechUtteranceMinimumSpeechRate
speechsynth.speakUtterance(nextSpeech)
}
Currently, the speaking starts AFTER the while loop completes.
How can I make it speak DURING each iteration and finish speaking before moving on to the next iteration of the loop?
Treat each word as a task, trigger the next task in Delegate's didFinishSpeechUtterance method.
import UIKit
import AVFoundation
class ViewController: UIViewController, AVSpeechSynthesizerDelegate {
var queue: dispatch_queue_t = dispatch_queue_create(
"com.test.whatever.queue", DISPATCH_QUEUE_SERIAL)
var index: Int = 0
let words: [String] = ["word one","word two","word three","word four"]
var speechsynth: AVSpeechSynthesizer = AVSpeechSynthesizer()
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
speechsynth.delegate = self
self.speechCurrentWord()
}
func speechCurrentWord(){
dispatch_async(queue, { () -> Void in
var beforeSpeechString : String = " "
var beforeSpeech:AVSpeechUtterance = AVSpeechUtterance(string: beforeSpeechString)
self.speechsynth.speakUtterance(beforeSpeech)
var nextSpeech:AVSpeechUtterance = AVSpeechUtterance(string: self.words[self.index])
nextSpeech.voice = AVSpeechSynthesisVoice(language: "en-US")
nextSpeech.rate = AVSpeechUtteranceMinimumSpeechRate
self.speechsynth.speakUtterance(nextSpeech)
})
}
func speechSynthesizer(synthesizer: AVSpeechSynthesizer!, didFinishSpeechUtterance utterance: AVSpeechUtterance!) {
index++
if index < self.words.count {
self.speechCurrentWord()
}
}
}