I have background music in my game. I tried to make a method to play and pause the music.
My app crashes when I press on the play/pause music button.
I don't understand why its not working.
The method in the Main Scene: (Edited)
var SoundOnOff = SKSpriteNode(imageNamed: "MusicOn.png")
if (SoundOnOff.containsPoint(location)) {
if BackgroundMusic.sharedHelper.isMuted() {
//BackgroundMusic.sharedHelper.mute()
BackgroundMusic.sharedHelper.pause()
self.SoundOnOff.texture = SKTexture(imageNamed:"MusicOff.png")
print("Music Off!")
}
else {
//BackgroundMusic.sharedHelper.unmute()
BackgroundMusic.sharedHelper.resume()
self.SoundOnOff.texture = SKTexture(imageNamed:"MusicOn.png")
print("Music On!")
}
}
BackgroundMusic Class (Edited)
import AVFoundation
class BackgroundMusic: NSObject {
internal let localDefaults = NSUserDefaults.standardUserDefaults()
static let sharedHelper = BackgroundMusic()
var BgMusic: AVAudioPlayer?
/// Keys
internal struct Key {
static let muted = "MusicMuteState"
}
override init() {
super.init()
print("Music helper init")
playBackgroundMusic()
if isMuted() {
mute()
}
}
func playBackgroundMusic() {
let aSound = NSURL(fileURLWithPath: NSBundle.mainBundle().pathForResource("Secrets of the Schoolyard", ofType: "mp3")!)
do {
BgMusic = try AVAudioPlayer(contentsOfURL:aSound)
BgMusic!.numberOfLoops = -1
BgMusic!.prepareToPlay()
BgMusic!.play()
} catch {
print("Cannot play the file")
}
}
func mute() {
BgMusic!.volume = 0
localDefaults.setBool(true, forKey: Key.muted)
}
/// Unmute
func unmute() {
BgMusic!.volume = 1
localDefaults.setBool(false, forKey: Key.muted)
}
// Check mute state
func isMuted() -> Bool {
if localDefaults.boolForKey(Key.muted) {
return true
} else {
return false
}
}
}
Your class is a subclass of AVAudioPlayer but you're not actually using your own instance to play the music. When you use BackgroundMusic.sharedHelper.playing, you're referencing an instance of your class, not the BgMusic player that is actually playing. Since your class hasn't been initialized with any sound file, I presume that it cannot properly process .playing
I recently answered a similar question. I am also using my own helper for music which is similar to yours. There is a few things that you should do different in your helper.
On and off music/sfx for sprite kit
GitHub: https://github.com/crashoverride777/Swift2-SpriteKit-Music-Helper
Related
The environment for this is iOS 13.6 and Swift 5. I have a very simple app that successfully plays an MP3 file in the foreground or background. I added MPRemoteCommandCenter play and pause command handlers to it. I play the sound file in the foreground and then pause it.
When I tap the play button from the lock screen, my code calls audioPlayer.play(), which returns true. I hear the sound start playing again, but the currentTime of the player does not advance. After that, the play and pause buttons on the lock screen do nothing. When I foreground the app again, the play button plays from where it was before I went to the lock screen.
Here is my AudioPlayer class:
import AVFoundation
import MediaPlayer
class AudioPlayer: RemoteAudioCommandDelegate {
var audioPlayer = AVAudioPlayer()
let remoteCommandHandler = RemoteAudioCommandHandler()
var timer:Timer!
func play(title: String) {
let path = Bundle.main.path(forResource: title, ofType: "mp3")!
let url = URL(fileURLWithPath: path)
do {
try AVAudioSession.sharedInstance().setCategory(AVAudioSession.Category.playback)
try AVAudioSession.sharedInstance().setActive(true)
audioPlayer = try AVAudioPlayer(contentsOf: url)
remoteCommandHandler.delegate = self
remoteCommandHandler.enableDisableRemoteCommands(true)
timer = Timer.scheduledTimer(timeInterval: 1.0, target: self, selector: #selector(updateNowPlayingInfo), userInfo: nil, repeats: true)
} catch let error as NSError {
print("error = \(error)")
}
}
func play() {
print ("audioPlayer.play() returned \(audioPlayer.play())")
}
func pause() {
audioPlayer.pause()
}
func stop() {
audioPlayer.stop()
}
func currentTime() -> TimeInterval {
return audioPlayer.currentTime
}
func setCurrentTime(_ time:TimeInterval) {
audioPlayer.currentTime = time
}
#objc func updateNowPlayingInfo() {
// Hard-code the nowPlayingInfo since this is a simple test app
var nowPlayingDict =
[MPMediaItemPropertyTitle: "Tin Man",
MPMediaItemPropertyAlbumTitle: "The Complete Greatest Hits",
MPMediaItemPropertyAlbumTrackNumber: NSNumber(value: UInt(10) as UInt),
MPMediaItemPropertyArtist: "America",
MPMediaItemPropertyPlaybackDuration: 208,
MPNowPlayingInfoPropertyPlaybackRate: NSNumber(value: 1.0 as Float)] as [String : Any]
nowPlayingDict[MPNowPlayingInfoPropertyElapsedPlaybackTime] = NSNumber(value: audioPlayer.currentTime as Double)
MPNowPlayingInfoCenter.default().nowPlayingInfo = nowPlayingDict
}
}
Here is my RemoteCommandHandler class:
import Foundation
import MediaPlayer
protocol RemoteAudioCommandDelegate: class {
func play()
func pause()
}
class RemoteAudioCommandHandler: NSObject {
weak var delegate: RemoteAudioCommandDelegate?
var remoteCommandCenter = MPRemoteCommandCenter.shared()
var playTarget: Any? = nil
var pauseTarget: Any? = nil
func enableDisableRemoteCommands(_ enabled: Bool) {
print("Called with enabled = \(enabled)")
remoteCommandCenter.playCommand.isEnabled = enabled
remoteCommandCenter.pauseCommand.isEnabled = enabled
if enabled {
addRemoteCommandHandlers()
} else {
removeRemoteCommandHandlers()
}
}
fileprivate func addRemoteCommandHandlers() {
print( "Entered")
if playTarget == nil {
print( "Adding playTarget")
playTarget = remoteCommandCenter.playCommand.addTarget { (event) -> MPRemoteCommandHandlerStatus in
print("addRemoteCommandHandlers calling delegate play")
self.delegate?.play()
return .success
}
}
if pauseTarget == nil {
print( "Adding pauseTarget")
pauseTarget = remoteCommandCenter.pauseCommand.addTarget { (event) -> MPRemoteCommandHandlerStatus in
print("addRemoteCommandHandlers calling delegate pause")
self.delegate?.pause()
return .success
}
}
}
fileprivate func removeRemoteCommandHandlers() {
print( "Entered")
if playTarget != nil {
print( "Removing playTarget")
remoteCommandCenter.playCommand.removeTarget(playTarget)
playTarget = nil
}
if pauseTarget != nil {
print( "Removing pauseTarget")
remoteCommandCenter.pauseCommand.removeTarget(pauseTarget)
pauseTarget = nil
}
}
}
I will gladly supply further required info, because I'm baffled at why this relatively straightforward code (in my mind) code doesn't work.
Assistance is much appreciated!
After some more debugging, I found that the AVAudioPlayer started to play the sound from the lock screen, but stopped again shortly after.
I mitigated the problem by adding a Timer. The timer checks if the last command by the user was play, but the sound is not playing. I also change the status when the user selects pause or the song stops playing at its natural end.
I am still at a loss for an actual fix for this problem.
I want to know how to get the state of my player (AVPlayer) (buffering, playing, stopped, error) and update the ui according to those states (including the player on the lock screen). How exactly should I do it?
I have a label that may contain:
"Buffering...", "Playing", "Stopped" or "Error".
Basically, I have the following:
MediaPlayer:
import Foundation
import AVFoundation
class MediaPlayer {
static let sharedInstance = MediaPlayer()
fileprivate var player = AVPlayer(url: URL(string: "my_hls_stream_url_here")!)
fileprivate var isPlaying = false
func play() {
player.play()
isPlaying = true
}
func pause() {
player.pause()
isPlaying = false
}
func toggle() {
if isPlaying == true {
pause()
} else {
play()
}
}
func currentlyPlaying() -> Bool {
return isPlaying
}
}
PlayerViewController:
class PlayerViewController: UIViewController {
#IBOutlet weak var label: UILabel!
#IBAction func playStopButtonAction(_ sender: UIButton) {
MediaPlayer.sharedInstance.toggle()
}
override func viewDidLoad() {
super.viewDidLoad()
label.text = "Disconnected"
do {
try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayback)
try AVAudioSession.sharedInstance().setActive(true)
print("Audio session ok\n")
} catch {
print("Error: Audio session.\n")
}
// Show only play/pause button on the lock screen
if #available(iOS 9.1, *) {
let center = MPRemoteCommandCenter.shared()
[center.previousTrackCommand, center.nextTrackCommand, center.seekForwardCommand, center.seekBackwardCommand, center.skipForwardCommand, center.skipBackwardCommand, center.ratingCommand, center.changePlaybackRateCommand, center.likeCommand, center.dislikeCommand, center.bookmarkCommand, center.changePlaybackPositionCommand].forEach {
$0.isEnabled = false
}
center.togglePlayPauseCommand.addTarget { (commandEvent) -> MPRemoteCommandHandlerStatus in
MediaPlayer.sharedInstance.toggle()
return MPRemoteCommandHandlerStatus.success
}
center.playCommand.addTarget { (commandEvent) -> MPRemoteCommandHandlerStatus in
MediaPlayer.sharedInstance.play()
return MPRemoteCommandHandlerStatus.success
}
center.pauseCommand.addTarget { (commandEvent) -> MPRemoteCommandHandlerStatus in
MediaPlayer.sharedInstance.pause()
return MPRemoteCommandHandlerStatus.success
}
} else {
// Fallback on earlier versions
print("Error (MPRemoteCommandCenter)")
}
}
override func remoteControlReceived(with event: UIEvent?) {
guard let event = event else {
print("No event\n")
return
}
guard event.type == UIEventType.remoteControl else {
print("Another event received\n")
return
}
switch event.subtype {
case UIEventSubtype.remoteControlPlay:
print("'Play' event received\n")
case UIEventSubtype.remoteControlPause:
print("'Pause' event received\n")
case UIEventSubtype.remoteControlTogglePlayPause:
print("'Toggle' event received\n")
default:
print("\(event.subtype)\n")
}
}
}
I think you could use the timeControlStatus property of AVPlayer. According to the doc it can be paused, waitingToPlayAtSpecifiedRate which is basically what you call buffering or playing.
If you really need the error state, you could observe the error property or whether the status property is set to failed.
A simple KVO observer on these properties would do the trick.
A place to start could be through using the AVPlayer's "status" property. It is an enumeration that contains the following values (this is taken directly from the documentation):
'unknown': Indicates that the status of the player is not yet known because it has not tried to load new media resources for playback.
'readyToPlay': Indicates that the player is ready to play AVPlayerItem instances.
'failed': Indicates that the player can no longer play AVPlayerItem instances because of an error.
As to how you could tell if the content is actually playing, you could just use boolean checks as it seems you have partially implemented. For pausing and stopping, you could just keep the file loaded for pause, and delete the file for stop that way you could differentiate the two.
For buffering, if the enum is not unknown or readyToPlay then that theoretically should mean that there is a file being attached but is not quite ready to play (i.e. buffering).
How can I stop 2 different playing audio at the same time? When music is playing, another button pressed playing it's audio at the same time. Sorry for my english.
NoiseMaker:
import AVFoundation
class NoiseMaker {
let audioFileNames = ["guitar", "applause", "monster", "bubbles"]
let players: [AVAudioPlayer?]
init() {
players = audioFileNames.map { filename in
if let url = NSBundle.mainBundle().URLForResource(filename, withExtension: "wav") {
return try? AVAudioPlayer(contentsOfURL: url)
} else {
return nil
}
}
}
func play(index: Int) {
if !players.isEmpty && index >= 0 && index < players.count {
players[index]?.play()
}
}
}
ViewController:
import UIKit
class ViewController: UIViewController {
let noiseMaker = NoiseMaker()
#IBAction func playSound(sender: UIButton) {
noiseMaker.play(sender.tag)
}
}
Add an instance variable for the currently playing sound. When you tap a button, send a stop message to the currently playing sound, copy the new sound to the instance variable, and then start that sound playing.
I'm trying to get multiple sounds files to play on an AVAudioPlayer instance, however when one sound plays, the other stops. I can't get more than one sound to play at a time. Here is my code:
import AVFoundation
class GSAudio{
static var instance: GSAudio!
var soundFileNameURL: NSURL = NSURL()
var soundFileName = ""
var soundPlay = AVAudioPlayer()
func playSound (soundFile: String){
GSAudio.instance = self
soundFileName = soundFile
soundFileNameURL = NSURL(fileURLWithPath: NSBundle.mainBundle().pathForResource(soundFileName, ofType: "aif", inDirectory:"Sounds")!)
do{
try soundPlay = AVAudioPlayer(contentsOfURL: soundFileNameURL)
} catch {
print("Could not play sound file!")
}
soundPlay.prepareToPlay()
soundPlay.play ()
}
}
Can anyone please help me by telling me how to get more than one sound file to play at a time? Any help is much appreciated.
Many thanks,
Kai
The reason the audio stops is because you only have one AVAudioPlayer set up, so when you ask the class to play another sound you are currently replacing the old instance with a new instance of AVAudioPlayer. You are overwriting it basically.
You can either create two instances of the GSAudio class, and then call playSound on each of them, or make the class a generic audio manager that uses a dictionary of audioPlayers.
I much prefer the latter option, as it allows for cleaner code and is also more efficient. You can check to see if you have already made a player for the sound before, rather than making a new player for example.
Anyways, I re-made your class for you so that it will play multiple sounds at once. It can also play the same sound over itself (it doesn't replace the previous instance of the sound) Hope it helps!
The class is a singleton, so to access the class use:
GSAudio.sharedInstance
for example, to play a sound you would call:
GSAudio.sharedInstance.playSound("AudioFileName")
and to play a number of sounds at once:
GSAudio.sharedInstance.playSounds("AudioFileName1", "AudioFileName2")
or you could load up the sounds in an array somewhere and call the playSounds function that accepts an array:
let sounds = ["AudioFileName1", "AudioFileName2"]
GSAudio.sharedInstance.playSounds(sounds)
I also added a playSounds function that allows you to delay each sound being played in a cascade kind of format. So:
let soundFileNames = ["SoundFileName1", "SoundFileName2", "SoundFileName3"]
GSAudio.sharedInstance.playSounds(soundFileNames, withDelay: 1.0)
would play sound2 a second after sound1, then sound3 would play a second after sound2 etc.
Here is the class:
class GSAudio: NSObject, AVAudioPlayerDelegate {
static let sharedInstance = GSAudio()
private override init() {}
var players = [NSURL:AVAudioPlayer]()
var duplicatePlayers = [AVAudioPlayer]()
func playSound (soundFileName: String){
let soundFileNameURL = NSURL(fileURLWithPath: NSBundle.mainBundle().pathForResource(soundFileName, ofType: "aif", inDirectory:"Sounds")!)
if let player = players[soundFileNameURL] { //player for sound has been found
if player.playing == false { //player is not in use, so use that one
player.prepareToPlay()
player.play()
} else { // player is in use, create a new, duplicate, player and use that instead
let duplicatePlayer = try! AVAudioPlayer(contentsOfURL: soundFileNameURL)
//use 'try!' because we know the URL worked before.
duplicatePlayer.delegate = self
//assign delegate for duplicatePlayer so delegate can remove the duplicate once it's stopped playing
duplicatePlayers.append(duplicatePlayer)
//add duplicate to array so it doesn't get removed from memory before finishing
duplicatePlayer.prepareToPlay()
duplicatePlayer.play()
}
} else { //player has not been found, create a new player with the URL if possible
do{
let player = try AVAudioPlayer(contentsOfURL: soundFileNameURL)
players[soundFileNameURL] = player
player.prepareToPlay()
player.play()
} catch {
print("Could not play sound file!")
}
}
}
func playSounds(soundFileNames: [String]){
for soundFileName in soundFileNames {
playSound(soundFileName)
}
}
func playSounds(soundFileNames: String...){
for soundFileName in soundFileNames {
playSound(soundFileName)
}
}
func playSounds(soundFileNames: [String], withDelay: Double) { //withDelay is in seconds
for (index, soundFileName) in soundFileNames.enumerate() {
let delay = withDelay*Double(index)
let _ = NSTimer.scheduledTimerWithTimeInterval(delay, target: self, selector: #selector(playSoundNotification(_:)), userInfo: ["fileName":soundFileName], repeats: false)
}
}
func playSoundNotification(notification: NSNotification) {
if let soundFileName = notification.userInfo?["fileName"] as? String {
playSound(soundFileName)
}
}
func audioPlayerDidFinishPlaying(player: AVAudioPlayer, successfully flag: Bool) {
duplicatePlayers.removeAtIndex(duplicatePlayers.indexOf(player)!)
//Remove the duplicate player once it is done
}
}
Here's a Swift 4 version of #Oliver Wilkinson code with some safechecks and improved code formatting:
import Foundation
import AVFoundation
class GSAudio: NSObject, AVAudioPlayerDelegate {
static let sharedInstance = GSAudio()
private override init() { }
var players: [URL: AVAudioPlayer] = [:]
var duplicatePlayers: [AVAudioPlayer] = []
func playSound(soundFileName: String) {
guard let bundle = Bundle.main.path(forResource: soundFileName, ofType: "aac") else { return }
let soundFileNameURL = URL(fileURLWithPath: bundle)
if let player = players[soundFileNameURL] { //player for sound has been found
if !player.isPlaying { //player is not in use, so use that one
player.prepareToPlay()
player.play()
} else { // player is in use, create a new, duplicate, player and use that instead
do {
let duplicatePlayer = try AVAudioPlayer(contentsOf: soundFileNameURL)
duplicatePlayer.delegate = self
//assign delegate for duplicatePlayer so delegate can remove the duplicate once it's stopped playing
duplicatePlayers.append(duplicatePlayer)
//add duplicate to array so it doesn't get removed from memory before finishing
duplicatePlayer.prepareToPlay()
duplicatePlayer.play()
} catch let error {
print(error.localizedDescription)
}
}
} else { //player has not been found, create a new player with the URL if possible
do {
let player = try AVAudioPlayer(contentsOf: soundFileNameURL)
players[soundFileNameURL] = player
player.prepareToPlay()
player.play()
} catch let error {
print(error.localizedDescription)
}
}
}
func playSounds(soundFileNames: [String]) {
for soundFileName in soundFileNames {
playSound(soundFileName: soundFileName)
}
}
func playSounds(soundFileNames: String...) {
for soundFileName in soundFileNames {
playSound(soundFileName: soundFileName)
}
}
func playSounds(soundFileNames: [String], withDelay: Double) { //withDelay is in seconds
for (index, soundFileName) in soundFileNames.enumerated() {
let delay = withDelay * Double(index)
let _ = Timer.scheduledTimer(timeInterval: delay, target: self, selector: #selector(playSoundNotification(_:)), userInfo: ["fileName": soundFileName], repeats: false)
}
}
#objc func playSoundNotification(_ notification: NSNotification) {
if let soundFileName = notification.userInfo?["fileName"] as? String {
playSound(soundFileName: soundFileName)
}
}
func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool) {
if let index = duplicatePlayers.index(of: player) {
duplicatePlayers.remove(at: index)
}
}
}
I have created a helper library that simplifies playing sounds in Swift. It creates multiple instances of AVAudioPlayer to allow playing the same sound multiple times concurrently. You can download it from Github or import with Cocoapods.
Here is the link: SwiftySound
The usage is as simple as it can be:
Sound.play(file: "sound.mp3")
All answers are posting pages of code; it doesn't need to be that complicated.
// Create a new player for the sound; it doesn't matter which sound file this is
let soundPlayer = try AVAudioPlayer( contentsOf: url )
soundPlayer.numberOfLoops = 0
soundPlayer.volume = 1
soundPlayer.play()
soundPlayers.append( soundPlayer )
// In an timer based loop or other callback such as display link, prune out players that are done, thus deallocating them
checkSfx: for player in soundPlayers {
if player.isPlaying { continue } else {
if let index = soundPlayers.index(of: player) {
soundPlayers.remove(at: index)
break checkSfx
}
}
}
Swift 5+
Compiling some of the previous answers, improving code style and reusability
I usually avoid loose strings throughout my projects and use, instead, custom protocols for objects that will hold those string properties.
I prefer this to the enum approach simply because enumerations tend to couple your project together quite quickly. Everytime you add a new case you must edit the same file with the enumeration, breaking somewhat the Open-Closed principle from SOLID and increasing chances for error.
In this particular case, you could have a protocol that defines sounds:
protocol Sound {
func getFileName() -> String
func getFileExtension() -> String
func getVolume() -> Float
func isLoop() -> Bool
}
extension Sound {
func getVolume() -> Float { 1 }
func isLoop() -> Bool { false }
}
And when you need a new sound you can simply create a new structure or class that implements this protocol (It will even be suggested on autocomplete if your IDE, just like Xcode, supports it, giving you similar benefits to those of the enumeration... and it works way better in medium to large multi framework projects).
(Usually I leave volume and other configurations with default implementations as they are less frequently customized).
For instance, you could have a coin drop sound:
struct CoinDropSound: Sound {
func getFileName() -> String { "coin_drop" }
func getFileExtension() -> String { "wav" }
}
Then, you could use a singleton SoundManager that would take care of managing playing audio files
import AVFAudio
final class SoundManager: NSObject, AVAudioPlayerDelegate {
static let shared = SoundManager()
private var audioPlayers: [URL: AVAudioPlayer] = [:]
private var duplicateAudioPlayers: [AVAudioPlayer] = []
private override init() {}
func play(sound: Sound) {
let fileName = sound.getFileName()
let fileExtension = sound.getFileExtension()
guard let url = Bundle.main.url(forResource: fileName, withExtension: fileExtension),
let player = getAudioPlayer(for: url) else { return }
player.volume = sound.getVolume()
player.numberOfLoops = numberOfLoops
player.prepareToPlay()
player.play()
}
private func getAudioPlayer(for url: URL) -> AVAudioPlayer? {
guard let player = audioPlayers[url] else {
let player = try? AVAudioPlayer(contentsOf: url)
audioPlayers[url] = player
return player
}
guard player.isPlaying else { return player }
guard let duplicatePlayer = try? AVAudioPlayer(contentsOf: url) else { return nil }
duplicatePlayer.delegate = self
duplicateAudioPlayers.append(duplicatePlayer)
return duplicatePlayer
}
func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool) {
duplicateAudioPlayers.removeAll { $0 == player }
}
}
Here I created a helper getAudioPlayer to be able to return early from code execution and make use of the guard let.
Using guard let more often and preferring less nested code can, most of the time, highly improve readability.
To use this SoundManager from anywhere in your project, simply access its shared instance and pass an object that conforms to Sound.
For example, given the previous CoinDropSound:
SoundManager.shared.play(sound: CoinDropSound())
You could maybe omit the sound parameter as it may improve readability
class SoundManager {
// ...
func play(_ sound: Sound) {
// ...
}
// ...
}
And then:
SoundManager.shared.play(CoinDropSound())
I am trying to create a game in iOS without using SpriteKit.
I am stuck in getting the sound effects to play in a timely manner. I've been using the following code which I have found online and the background music plays great. However, when I use the
"playSoundEffect" method it plays ok the first time but then starts to lag behind and becomes out of sync. I guess that happens because it initializes an AVAudioPlayer every time.
Anyone have a good idea in how to play sound effects in a timely manner, while also playing background music? Thanks!
import AVFoundation
public class SKTAudio: NSObject, AVAudioPlayerDelegate {
public var backgroundMusicPlayer: AVAudioPlayer?
public var soundEffectPlayer: AVAudioPlayer?
private var mainLoopFileName:String! {
let randomSong = Int(arc4random_uniform(3))
switch randomSong {
//case 0: return "Test.mp3"
//case 1: return "Test2.mp3"
case 0: return "SneakySnitch.mp3"
case 1: return "FasterDoesIt.mp3"
case 2: return "MonkeysSpinningMonkeys.mp3"
default:
break
}
return "SneakySnitch.mp3"
}
public class func sharedInstance() -> SKTAudio {
return SKTAudioInstance
}
public func playBackgroundMusic() {
let filename = mainLoopFileName
let url = NSBundle.mainBundle().URLForResource(filename, withExtension: nil)
if (url == nil) {
println("Could not find file: \(filename)")
return
}
var error: NSError? = nil
backgroundMusicPlayer = AVAudioPlayer(contentsOfURL: url, error: &error)
if let player = backgroundMusicPlayer {
player.numberOfLoops = 0
player.delegate = self
player.prepareToPlay()
player.play()
} else {
println("Could not create audio player: \(error!)")
}
}
public func pauseBackgroundMusic() {
if let player = backgroundMusicPlayer {
if player.playing {
player.pause()
}
}
}
public func resumeBackgroundMusic() {
if let player = backgroundMusicPlayer {
if !player.playing {
player.play()
}
}
}
public func playSoundEffect(filename: String) {
let url = NSBundle.mainBundle().URLForResource(filename, withExtension: nil)
if (url == nil) {
println("Could not find file: \(filename)")
return
}
var error: NSError? = nil
soundEffectPlayer = AVAudioPlayer(contentsOfURL: url, error: &error)
if let player = soundEffectPlayer {
player.numberOfLoops = 0
player.prepareToPlay()
player.play()
} else {
println("Could not create audio player: \(error!)")
}
}
// MARK: AVAudioPlayerDelegate
public func audioPlayerDidFinishPlaying(player: AVAudioPlayer!, successfully flag: Bool) {
println("finished playing \(flag)")
delay(5.0, {
self.playBackgroundMusic()
})
}
public func audioPlayerDecodeErrorDidOccur(player: AVAudioPlayer!, error: NSError!) {
println("\(error.localizedDescription)")
}
}
You could using AVPlayer to play your sound file. Keep one player, but change its AVPlayerItem to a new item when you need to play a new sound. It might be faster than recreating the player every time.
While AVAudioPlayer/AVPlayer is the simplest option, it will not give you the shortest delay or perfect synchronization when playing audio files. You should look into Audio Queues or Audio Units within Core Audio for more accurate sound playback.
The problem is that you are playing the second sound effect before the first one is finished. You are killing the reference to the first when you set the new AVAudioPlayer to your soundEffectPlayer variable and the first will stop playing.
If you don't mind loosing the availability of controlling the volume of the sound you could use this:
var mySound: SystemSoundID = 0
AudioServicesCreateSystemSoundID(url, &mySound)
// Play
AudioServicesPlaySystemSound(mySound)
Otherwise you can use AVAudioPlayer if you add each sound that you create into an array, keeping the reference. Then you can delete it when the sound is done by implementing AVAudioPlayer delegate.
func playSound(){
let path : NSString? = NSBundle.mainBundle().pathForResource("sound", ofType: "wav")!
let url = NSURL(fileURLWithPath: path!)
var error : NSError?
let sound = AVAudioPlayer(contentsOfURL: url, error: &error)
sound.delegate = self
sound.volume = 0.5
self.soundsEffectsArray.addObject(sound)
sound.prepareToPlay()
sound.play()
}
func audioPlayerDidFinishPlaying(player: AVAudioPlayer!, successfully flag: Bool) {
self.soundsEffectsArray.removeObject(player)
}