Pausing and Resuming Timer in Swift - ios

I am playing an audio which can be pausing, resuming and stoped I have also a slider which can be use to change the play the audio current point and a label that show duration of the audio and I want to pause the timer when the user pause the audio and I read If I want that I can invalidate and nill the timer then start it again , but the issue with that it will replay the audio from the start, is there is way to start the timer at the last point it was paused?
func startTimer() {
if replayTimer == nil {
replayTimer = Timer.scheduledTimer(timeInterval: 0.1, target: self, selector: #selector(updateSlider), userInfo: nil, repeats: true)
}
}
#objc func updateSlider() {
progressBar.value = Float(audio.audio!.currentTime)
}
#IBAction func playReceiverVoicenote(_ sender: Any) {
if replayTimer == nil {
audioBtn.setImage(#imageLiteral(resourceName: "pause"), for: .normal)
audio.playAudio(filePath: filePath!)
startTimer()
receiverProgressBar.maximumValue = audio.getAudioDuration()
} else if audio.isAudioPlaying() {
audioBtn.setImage(#imageLiteral(resourceName: "playAudio"), for: .normal)
audio.pauseAudio()
replayTimer?.invalidate()
replayTimer = nil
} else {
audioBtn.setImage(#imageLiteral(resourceName: "pause"), for: .normal)
audio.replayAudio()
startTimer()
}
}
func playAudio(filePath:URL){
do {
audio = try AVAudioPlayer(contentsOf: filePath)
audio!.delegate = self
audio!.prepareToPlay()
audio!.volume = 1.0
audio!.play()
} catch {
print(error.localizedDescription)
}
}
func pauseAudio() {
audio!.pause()
}
func replayAudio() {
audio!.play()
}
func stopAudio() {
audio!.stop()
}

When the audio is going to pause save audio.currentTime in a variable and invalidate the timer.
When the audio is going to resume get the saved currentTime, call play(atTime:) passing the time interval and restart the timer

You can also use this Audio Manager in this way:
let player = AudioPlayer()
player.loadAudio(url: URL(string: "myAudioUrl.mp3")!, name: "", img: "")
And the action of the button to play and pause:
if player.isPlaying {
player.pauseAudio()
} else {
player.playAudio { isFinish, player, currentTimeInSec, remainingTimeInSec in
if isFinish {
// Audio finish to play
}
}
}
Who return in block closure:
isFinish: Bool // If the audio has finished playing
player: AVAudioPlayer // The player
currentTimeInSec: Int // The current time in seconds of the audio playing
remainingTimeInsec: Int // The remaining time
AudioPlayer:
import Foundation
import AVFoundation
import MediaPlayer
class AudioPlayer {
var audioPlayer: AVAudioPlayer?
var hasBeenPaused = false
var songName = ""
var songImage = ""
var timer: Timer?
var isPlaying: Bool {
return audioPlayer?.isPlaying ?? false
}
public func loadAudio(url: URL, name: String, img: String) {
songName = name
songImage = img
setupRemoteTransportControls()
do {
audioPlayer = try AVAudioPlayer(contentsOf: url)
audioPlayer?.prepareToPlay()
let audioSession = AVAudioSession.sharedInstance()
do {
try audioSession.setCategory(AVAudioSession.Category.playback, mode: .default)
} catch let sessionError {
print(sessionError)
}
} catch let songPlayerError {
print(songPlayerError)
}
}
public func playAudio(completion: ((_ isFinish: Bool, _ player: AVAudioPlayer, _ currentTimeInSec: Int, _ restTimeInSec: Int) -> ())? = nil) {
guard let audioPlayer = audioPlayer else { return }
audioPlayer.play()
setupNowPlaying()
if timer == nil {
timer = Timer.scheduledTimer(withTimeInterval: 1, repeats: true) { _ in
let currentTime = Int(audioPlayer.currentTime)
let remainingTime = Int(audioPlayer.duration) - Int(currentTime)
if remainingTime == 0 {
completion?(true, audioPlayer, currentTime, remainingTime)
if self.timer != nil {
self.timer!.invalidate()
self.timer = nil
}
} else {
completion?(false, audioPlayer, currentTime, remainingTime)
}
}
}
}
public func pauseAudio() {
guard let audioPlayer = audioPlayer else { return }
if audioPlayer.isPlaying {
audioPlayer.pause()
hasBeenPaused = true
} else {
hasBeenPaused = false
}
setupNowPlaying()
if timer != nil {
timer!.invalidate()
timer = nil
}
}
public func replayAudio() {
guard let audioPlayer = audioPlayer else { return }
if audioPlayer.isPlaying || hasBeenPaused {
audioPlayer.stop()
audioPlayer.currentTime = 0
audioPlayer.play()
} else {
audioPlayer.play()
}
setupNowPlaying()
}
public func stopAudio() {
guard let audioPlayer = audioPlayer else { return }
audioPlayer.stop()
setupNowPlaying()
if timer != nil {
timer!.invalidate()
timer = nil
}
}
func setupRemoteTransportControls() {
let commandCenter = MPRemoteCommandCenter.shared()
commandCenter.previousTrackCommand.isEnabled = false
commandCenter.nextTrackCommand.isEnabled = false
commandCenter.skipBackwardCommand.isEnabled = false
commandCenter.skipForwardCommand.isEnabled = false
commandCenter.playCommand.isEnabled = true
commandCenter.playCommand.addTarget { (event) -> MPRemoteCommandHandlerStatus in
//Update your button here for the play command
self.playAudio()
NotificationCenter.default.post(name: NSNotification.Name(rawValue: "updatedControlCenterAudio"), object: nil)
return .success
}
commandCenter.pauseCommand.isEnabled = true
commandCenter.pauseCommand.addTarget { (event) -> MPRemoteCommandHandlerStatus in
//Update your button here for the pause command
self.pauseAudio()
NotificationCenter.default.post(name: NSNotification.Name(rawValue: "updatedControlCenterAudio"), object: nil)
return .success
}
}
func setupNowPlaying() {
guard let audioPlayer = audioPlayer else { return }
var nowPlayingInfo = [String: Any]()
nowPlayingInfo[MPMediaItemPropertyTitle] = songName
if let image = UIImage(named: songImage) {
nowPlayingInfo[MPMediaItemPropertyArtwork] =
MPMediaItemArtwork(boundsSize: image.size) { size in
return image
}
}
nowPlayingInfo[MPNowPlayingInfoPropertyElapsedPlaybackTime] = audioPlayer.currentTime
nowPlayingInfo[MPMediaItemPropertyPlaybackDuration] = audioPlayer.duration
nowPlayingInfo[MPNowPlayingInfoPropertyPlaybackRate] = audioPlayer.rate
// Set the metadata
MPNowPlayingInfoCenter.default().nowPlayingInfo = nowPlayingInfo
}
}

Related

In Swift 5 how would you add Audio Controls to Control Center via background audio?

I'm playing audio from a JSON source that I built to feed the audio URL and it's audio information to my app (Such as title, description, and cover art URL). The audio is a radio station feed. Audio works, play/stop controls work, cover art works, and background audio works. I'm stumped on adding the controls to Control Center to play/stop the audio while outside the app. I've taken a look at Apple's Documentation on this and it's very straight forward. I've also enabled the build setting to allow for background fetch, background audio, and Bluetooth. But it doesn't seem to work on my iPhone attached to Xcode via USB-C (I'm assuming iOS Simulator doesn't support Control Center). Below is my working code. Any thoughts on how to get this working? Do I need to pass on the audio to setupRemoteTransportControls()? I'm assuming though Title and Cover Art will need to be passed from data source, but overall the Control Center will not recognize that the app is playing audio.
ViewController.swift
import UIKit
import MediaPlayer
import AVFoundation
import Foundation
class ViewController: UIViewController {
var player: AVPlayer!
var playerItem: AVPlayerItem!
var audioCheck: Timer?
var timer: Timer?
var passText: String? = "Test"
#IBOutlet weak var trackTitle: UILabel!
#IBOutlet weak var togglePlay: UIButton!
#IBOutlet weak var coverPhoto: UIImageView!
func loadAudio() {
let audioURL = URL.init(string: "AUDIO_URL_GOES_HERE")
player = AVPlayer.init(url: audioURL!)
}
let minutes = 60
func playAudio() {
let audioURL = URL.init(string: "AUDIO_URL_GOES_HERE")
player = AVPlayer.init(url: audioURL!)
player.play()
}
#IBAction func OpenPlayer(_ sender: Any) {
performSegue(withIdentifier: "PlayerSegue", sender: self)
}
override func viewDidLoad() {
super.viewDidLoad()
loadAudio()
// Audio in the background
let audioSession = AVAudioSession.sharedInstance()
do {
try audioSession.setCategory(AVAudioSession.Category.playback)
} catch {
print("Error: Audio is not paying in the background.")
}
// Timer checking if other audio sources are running
audioCheck = Timer.scheduledTimer(timeInterval: 0, target: self, selector: #selector(timedAudioCheck), userInfo: nil, repeats: true)
struct currentTrack: Codable {
let title: String
let artwork_url: String
}
struct getTrack: Codable {
// let name: String
// let status: String
let current_track: currentTrack
}
let jsonURL = URL(string: "JSON_URL_GOES_HERE")!
URLSession.shared.dataTask(with: jsonURL) {data, _, _ in
if let data = data {
let trackData = try? JSONDecoder().decode([getTrack].self, from: data)
// print(users)
// print(trackData![0].current_track.title)
// print(trackData![0].current_track.artwork_url)
DispatchQueue.main.async {
self.trackTitle.text = trackData![0].current_track.title
let coverPhotoURL = trackData![0].current_track.artwork_url
if let coverPhotoConverted = URL(string: coverPhotoURL) {
do {
let coverPhotoData = try Data(contentsOf: coverPhotoConverted)
self.coverPhoto.image = UIImage(data: coverPhotoData)
} catch {
}
}
}
}
}.resume()
_ = Timer.scheduledTimer(withTimeInterval: 10.0, repeats: true) { timer in
// print("Data updated!")
let jsonURL = URL(string: "JSON_URL_GOES_HERE")!
URLSession.shared.dataTask(with: jsonURL) {data, _, _ in
if let data = data {
let trackData = try? JSONDecoder().decode([getTrack].self, from: data)
// print(users)
// print(trackData![0].current_track.title)
// print(trackData![0].current_track.artwork_url)
DispatchQueue.main.async {
self.trackTitle.text = trackData![0].current_track.title
let coverPhotoURL = trackData![0].current_track.artwork_url
if let coverPhotoConverted = URL(string: coverPhotoURL) {
do {
let coverPhotoData = try Data(contentsOf: coverPhotoConverted)
self.coverPhoto.image = UIImage(data: coverPhotoData)
} catch {
}
}
}
}
}.resume()
}
func setupRemoteTransportControls() {
// Get the shared MPRemoteCommandCenter
let commandCenter = MPRemoteCommandCenter.shared()
// Add handler for Play Command
commandCenter.playCommand.addTarget { [unowned self] event in
if self.player.rate == 0.0 {
self.player.play()
return .success
}
return .commandFailed
}
// Add handler for Pause Command
commandCenter.pauseCommand.addTarget { [unowned self] event in
if self.player.rate == 1.0 {
self.player.pause()
return .success
}
return .commandFailed
}
}
// Apple's Documentation on playing audio in control center
func setupRemoteTransportControls() {
// Get the shared MPRemoteCommandCenter
let commandCenter = MPRemoteCommandCenter.shared()
// Add handler for Play Command
commandCenter.playCommand.addTarget { [unowned self] event in
if self.player.rate == 0.0 {
self.player.play()
return .success
}
return .commandFailed
}
// Add handler for Pause Command
commandCenter.pauseCommand.addTarget { [unowned self] event in
if self.player.rate == 1.0 {
self.player.pause()
return .success
}
return .commandFailed
}
}
func setupNowPlaying() {
// Define Now Playing Info
var nowPlayingInfo = [String : Any]()
nowPlayingInfo[MPMediaItemPropertyTitle] = "My Movie"
if let image = UIImage(named: "lockscreen") {
nowPlayingInfo[MPMediaItemPropertyArtwork] =
MPMediaItemArtwork(boundsSize: image.size) { size in
return image
}
}
nowPlayingInfo[MPNowPlayingInfoPropertyElapsedPlaybackTime] = playerItem.currentTime().seconds
nowPlayingInfo[MPMediaItemPropertyPlaybackDuration] = playerItem.asset.duration.seconds
nowPlayingInfo[MPNowPlayingInfoPropertyPlaybackRate] = player.rate
// Set the metadata
MPNowPlayingInfoCenter.default().nowPlayingInfo = nowPlayingInfo
}
}
// Along with timer, checks if other audio sources are playing and resets the audio in the app
#objc func timedAudioCheck() {
if (AVAudioSession.sharedInstance().secondaryAudioShouldBeSilencedHint) {
togglePlay.setTitle("Play", for: .normal)
} else if (player.rate == 0) {
togglePlay.setTitle("Play", for: .normal)
} else {
togglePlay.setTitle("Puase", for: .normal)
}
}
#IBAction func togglePlay(_ sender: UIButton) {
if player.rate == 0 {
// Plays the audio stream
sender.setTitle("Pause", for: .normal)
playAudio()
} else {
sender.setTitle("Play", for: .normal)
player.pause()
}
}
}
Apple's suggestion to add Control Center Support
func setupRemoteTransportControls() {
// Get the shared MPRemoteCommandCenter
let commandCenter = MPRemoteCommandCenter.shared()
// Add handler for Play Command
commandCenter.playCommand.addTarget { [unowned self] event in
if self.player.rate == 0.0 {
self.player.play()
return .success
}
return .commandFailed
}
// Add handler for Pause Command
commandCenter.pauseCommand.addTarget { [unowned self] event in
if self.player.rate == 1.0 {
self.player.pause()
return .success
}
return .commandFailed
}
}
func setupNowPlaying() {
// Define Now Playing Info
var nowPlayingInfo = [String : Any]()
nowPlayingInfo[MPMediaItemPropertyTitle] = "My Movie"
if let image = UIImage(named: "lockscreen") {
nowPlayingInfo[MPMediaItemPropertyArtwork] =
MPMediaItemArtwork(boundsSize: image.size) { size in
return image
}
}
nowPlayingInfo[MPNowPlayingInfoPropertyElapsedPlaybackTime] = playerItem.currentTime().seconds
nowPlayingInfo[MPMediaItemPropertyPlaybackDuration] = playerItem.asset.duration.seconds
nowPlayingInfo[MPNowPlayingInfoPropertyPlaybackRate] = player.rate
// Set the metadata
MPNowPlayingInfoCenter.default().nowPlayingInfo = nowPlayingInfo
}

Media Player not moving seek bar when fast forwarding

So I am having this small issue, where the seek bar is not being moved forward when the user taps on the fast forward or rewind buttons on the native iOS controller.
See video
https://youtu.be/CJP131GpSYI
The section of code that makes it possible is printed below.
func setupRemoteTransportControls() {
// Get the shared MPRemoteCommandCenter
let commandCenter = MPRemoteCommandCenter.shared()
let changePlaybackPositionCommand = commandCenter.changePlaybackPositionCommand
changePlaybackPositionCommand.isEnabled = true
changePlaybackPositionCommand.addTarget { event in
let seconds = (event as? MPChangePlaybackPositionCommandEvent)?.positionTime ?? 0
let time = CMTime(seconds: seconds, preferredTimescale: 1)
self.player?.seek(to: time)
return .success
}
let skipBackwardCommand = commandCenter.skipBackwardCommand
if(MusicPlayer.mediatype == "podcast")
{
skipBackwardCommand.isEnabled = true
}
else{
skipBackwardCommand.isEnabled = false
}
skipBackwardCommand.preferredIntervals = [NSNumber(value: 10)]
skipBackwardCommand.addTarget(handler: skipBackward)
let skipForwardCommand = commandCenter.skipForwardCommand
if(MusicPlayer.mediatype == "podcast")
{
skipForwardCommand.isEnabled = true
}
else{
skipForwardCommand.isEnabled = false
}
skipForwardCommand.addTarget(handler: skipForward)
// Add handler for Play Command
commandCenter.playCommand.addTarget { [unowned self] event in
if self.player?.rate == 0.0 {
self.player?.play()
return .success
}
return .commandFailed
}
// Add handler for Pause Command
commandCenter.pauseCommand.addTarget { [unowned self] event in
if self.player?.rate == 1.0 {
self.player?.pause()
return .success
}
return .commandFailed
}
func skipBackward(_ event: MPRemoteCommandEvent) -> MPRemoteCommandHandlerStatus {
//self.player?.seek(to: CMTimeMakeWithSeconds(CMTimeGetSeconds((self.player?.currentTime())!).advanced(by: -30), preferredTimescale: 1))
// print(CMTimeGetSeconds((self.player?.currentTime())!)) //Output: 42
//print(event.interval)
self.player!.seek(to: CMTimeMakeWithSeconds(CMTimeGetSeconds((self.player?.currentTime())!).advanced(by: -30), preferredTimescale: 1))
return .success
}
func skipForward(_ event: MPRemoteCommandEvent) -> MPRemoteCommandHandlerStatus {
self.player?.seek(to: CMTimeMakeWithSeconds(CMTimeGetSeconds((self.player?.currentTime())!).advanced(by: 30), preferredTimescale: 1))
return .success
}
}
Full Media Player code:
import SwiftUI
//import Foundation
import AVFoundation
import MediaPlayer
import AVKit
struct NowPlayingData: Codable , Identifiable {
var id,artist,song,cover:String
private enum CodingKeys : String, CodingKey {
case id = "_id", artist , song , cover
}
}
class MusicPlayer {
static let shared = MusicPlayer()
static var mediatype = ""
static var artist = ""
static var song = ""
static var cover = ""
static var urls = ""
static var dur = 0
static var uuid = UIDevice.current.identifierForVendor?.uuidString
var player: AVPlayer?
let playerViewController = AVPlayerViewController()
func gettype(completion: #escaping (String) -> Void){
completion(MusicPlayer.mediatype)
}
func getPodCastPlayerNP(completion: #escaping (NowPlayingData) -> ()) {
// Timer.scheduledTimer(withTimeInterval: 15, repeats: true) { (timer) in
let songdata = "{\"_id\": \"1\",\"song\": \"\(MusicPlayer.song)\",\"artist\": \"\(MusicPlayer.artist)\", \"cover\": \"\(MusicPlayer.cover)\"}"
let data: Foundation.Data = songdata.data(using: .utf8)!
let podcast = try! JSONDecoder().decode(NowPlayingData.self, from: data)
//print(data!)
// let episode = podcast.programs
DispatchQueue.main.async{
// The array is stored under programs now
//print(podcast)
completion(podcast)
}
// }
}
func startBackgroundMusic(url: String, type:String) {
MusicPlayer.mediatype = String(type)
//let urlString = "http://stream.radiomedia.com.au:8003/stream"
let urlString = url+"?uuid="+MusicPlayer.uuid!
print(urlString)
guard let url = URL.init(string: urlString) else { return }
let playerItem = AVPlayerItem.init(url: url)
player = AVPlayer.init(playerItem: playerItem)
do {
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default, options: [.duckOthers, .defaultToSpeaker, .mixWithOthers, .allowAirPlay])
print("Playback OK")
// let defaults = UserDefaults.standard
// defaults.set("1", forKey: defaultsKeys.musicplayer_connected)
try AVAudioSession.sharedInstance().setActive(true)
print("Session is Active")
} catch {
// let defaults = UserDefaults.standard
// defaults.set("0", forKey: defaultsKeys.musicplayer_connected)
print(error)
}
#if targetEnvironment(simulator)
self.playerViewController.player = player
self.playerViewController.player?.play()
print("SIMULATOR")
#else
self.setupRemoteTransportControls()
player?.play()
#endif
}
func startBackgroundMusicTwo() {
let urlString = "http://stream.radiomedia.com.au:8003/stream"
//let urlString = url
guard let url = URL.init(string: urlString) else { return }
let playerItem = AVPlayerItem.init(url: url)
player = AVPlayer.init(playerItem: playerItem)
do {
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default, options: [.duckOthers, .defaultToSpeaker, .mixWithOthers, .allowAirPlay])
print("Playback OK")
// let defaults = UserDefaults.standard
// defaults.set("1", forKey: defaultsKeys.musicplayer_connected)
try AVAudioSession.sharedInstance().setActive(true)
print("Session is Active")
} catch {
// let defaults = UserDefaults.standard
// defaults.set("0", forKey: defaultsKeys.musicplayer_connected)
print(error)
}
#if targetEnvironment(simulator)
self.playerViewController.player = player
self.playerViewController.player?.play()
print("SIMULATOR")
#else
self.setupRemoteTransportControls()
player?.play()
#endif
}
func setupRemoteTransportControls() {
// Get the shared MPRemoteCommandCenter
let commandCenter = MPRemoteCommandCenter.shared()
let changePlaybackPositionCommand = commandCenter.changePlaybackPositionCommand
changePlaybackPositionCommand.isEnabled = true
changePlaybackPositionCommand.addTarget { event in
let seconds = (event as? MPChangePlaybackPositionCommandEvent)?.positionTime ?? 0
let time = CMTime(seconds: seconds, preferredTimescale: 1)
self.player?.seek(to: time)
return .success
}
let skipBackwardCommand = commandCenter.skipBackwardCommand
if(MusicPlayer.mediatype == "podcast")
{
skipBackwardCommand.isEnabled = true
}
else{
skipBackwardCommand.isEnabled = false
}
skipBackwardCommand.preferredIntervals = [NSNumber(value: 10)]
skipBackwardCommand.addTarget(handler: skipBackward)
let skipForwardCommand = commandCenter.skipForwardCommand
if(MusicPlayer.mediatype == "podcast")
{
skipForwardCommand.isEnabled = true
}
else{
skipForwardCommand.isEnabled = false
}
skipForwardCommand.addTarget(handler: skipForward)
// Add handler for Play Command
commandCenter.playCommand.addTarget { [unowned self] event in
if self.player?.rate == 0.0 {
self.player?.play()
return .success
}
return .commandFailed
}
// Add handler for Pause Command
commandCenter.pauseCommand.addTarget { [unowned self] event in
if self.player?.rate == 1.0 {
self.player?.pause()
return .success
}
return .commandFailed
}
func skipBackward(_ event: MPRemoteCommandEvent) -> MPRemoteCommandHandlerStatus {
//self.player?.seek(to: CMTimeMakeWithSeconds(CMTimeGetSeconds((self.player?.currentTime())!).advanced(by: -30), preferredTimescale: 1))
// print(CMTimeGetSeconds((self.player?.currentTime())!)) //Output: 42
//print(event.interval)
self.player!.seek(to: CMTimeMakeWithSeconds(CMTimeGetSeconds((self.player?.currentTime())!).advanced(by: -30), preferredTimescale: 1))
return .success
}
func skipForward(_ event: MPRemoteCommandEvent) -> MPRemoteCommandHandlerStatus {
self.player?.seek(to: CMTimeMakeWithSeconds(CMTimeGetSeconds((self.player?.currentTime())!).advanced(by: 30), preferredTimescale: 1))
return .success
}
}
func nowplaying(with artwork: MPMediaItemArtwork, artist: String, song: String, duration: Int){
if(duration == 0){
MPNowPlayingInfoCenter.default().nowPlayingInfo = [
MPMediaItemPropertyTitle:song,
MPMediaItemPropertyArtist:artist,
MPMediaItemPropertyArtwork: artwork,
MPNowPlayingInfoPropertyIsLiveStream: true
]
}
else{
MPNowPlayingInfoCenter.default().nowPlayingInfo = [
MPMediaItemPropertyTitle:song,
MPMediaItemPropertyArtist:artist,
MPMediaItemPropertyArtwork: artwork,
MPNowPlayingInfoPropertyIsLiveStream: false,
MPMediaItemPropertyPlaybackDuration: duration,
MPNowPlayingInfoPropertyPlaybackRate: 1.0,
MPNowPlayingInfoPropertyElapsedPlaybackTime: CMTimeGetSeconds((self.player?.currentTime())!)
]
}
// self.getArtBoard();
}
func setupNowPlayingInfo(with artwork: MPMediaItemArtwork) {
MPNowPlayingInfoCenter.default().nowPlayingInfo = [
MPMediaItemPropertyTitle: "Some name",
MPMediaItemPropertyArtist: "Some name",
MPMediaItemPropertyArtwork: artwork,
//MPMediaItemPropertyPlaybackDuration: CMTimeGetSeconds(currentItem.duration),
//MPNowPlayingInfoPropertyPlaybackRate: 1,
//MPNowPlayingInfoPropertyElapsedPlaybackTime: CMTimeGetSeconds(currentItem.currentTime())
]
}
func getData(from url: URL, completion: #escaping (UIImage?) -> Void) {
URLSession.shared.dataTask(with: url, completionHandler: {(data, response, error) in
if let data = data {
completion(UIImage(data:data))
}
})
.resume()
}
func getArtBoard(artist: String, song: String, cover: String, urls: String, duration: Int) {
// MusicPlayer.JN = "[{'artist': \(artist), 'song':\(song), 'cover': \(cover)}]"
MusicPlayer.artist = artist
MusicPlayer.song = song
MusicPlayer.cover = cover
MusicPlayer.urls = urls
guard let url = URL(string: cover) else { return }
getData(from: url) { [weak self] image in
guard let self = self,
let downloadedImage = image else {
return
}
let artwork = MPMediaItemArtwork.init(boundsSize: downloadedImage.size, requestHandler: { _ -> UIImage in
return downloadedImage
})
self.nowplaying(with: artwork, artist: artist, song: song, duration: duration)
}
}
func stopBackgroundMusic() {
guard let player = player else { return }
player.pause()
}
}
Full GitHub code:
https://github.com/redimongo/iOS-Radio-App
func skipBackward(_ event: MPRemoteCommandEvent) -> MPRemoteCommandHandlerStatus {
//self.player?.seek(to: CMTimeMakeWithSeconds(CMTimeGetSeconds((self.player?.currentTime())!).advanced(by: -30), preferredTimescale: 1))
// print(CMTimeGetSeconds((self.player?.currentTime())!)) //Output: 42
//print(event.interval)
//self.player!.seek(to: CMTimeMakeWithSeconds(CMTimeGetSeconds((self.player?.currentTime())!).advanced(by: -30), preferredTimescale: 1))
let currentTime = self.player?.currentTime()
self.player?.seek(to: CMTime(seconds: currentTime!.seconds - 30, preferredTimescale: 1), completionHandler: { isCompleted in
if isCompleted {
}
})
return .success
}
func skipForward(_ event: MPRemoteCommandEvent) -> MPRemoteCommandHandlerStatus {
//self.player?.seek(to: CMTimeMakeWithSeconds(CMTimeGetSeconds((self.player?.currentTime())!).advanced(by: 30), preferredTimescale: 1))
let currentTime = self.player?.currentTime()
self.player?.seek(to: CMTime(seconds: currentTime!.seconds + 30, preferredTimescale: 1), completionHandler: { isCompleted in
if isCompleted {
}
})
return .success
}
Change this 2 function with above code and let me know. what you face here.

Problems resuming recording in the background using AVAudioRecorder in Swift

To put it in context, I'm developing an app that is recording a lot of time while in background or in the app. I'm facing two problems using AVAudioRecorder to record in my app:
My recording resumes fine when, for example, I play an audio in WhatsApp or a video in Instagram. But when I play a song in Apple Music or a play a voice note, it doesn't resume the recording again.
If I'm in a Phone Call and I start the recording, my app crashes and outputs the following:
(Error de OSStatus 561017449.)
I think also need to handle when the input/output device changes so my app doesn't crash.
The code that I implemented for the class where I'm recording (and also playing the sound I use recorder so I can try it and see if it works) is this:
class RecordViewController: UIViewController, AVAudioPlayerDelegate , AVAudioRecorderDelegate {
#IBOutlet weak var principalLabel: UILabel!
#IBOutlet weak var loginLabel: UILabel!
#IBOutlet weak var recordBTN: UIButton!
#IBOutlet weak var playBTN: UIButton!
var audioRecorder : AVAudioRecorder!
var audioPlayer : AVAudioPlayer!
var isAudioRecordingGranted: Bool!
var isRecording = false
var isPlaying = false
override func viewDidLoad() {
super.viewDidLoad()
view.backgroundColor = .white
check_record_permission()
checkActivateMicrophone()
NotificationCenter.default.addObserver(self, selector: #selector(handleInterruption(notification:)), name: AVAudioSession.interruptionNotification, object: audioRecorder)
let tapRegister: UITapGestureRecognizer = UITapGestureRecognizer(target: self, action: #selector(self.goToLogin))
loginLabel.addGestureRecognizer(tapRegister)
loginLabel.isUserInteractionEnabled = true
}
#objc func goToLogin() {
self.performSegue(withIdentifier: "goToLogin", sender: self)
}
#objc func handleInterruption(notification: Notification) {
guard let userInfo = notification.userInfo,
let typeInt = userInfo[AVAudioSessionInterruptionTypeKey] as? UInt,
let type = AVAudioSession.InterruptionType(rawValue: typeInt) else {
return
}
switch type {
case .began:
if isRecording {
print("OOOOOOO")
audioRecorder.pause()
}
break
case .ended:
guard let optionsValue = userInfo[AVAudioSessionInterruptionOptionKey] as? UInt else { return }
let options = AVAudioSession.InterruptionOptions(rawValue: optionsValue)
if options.contains(.shouldResume) {
audioRecorder.record()
print("AAAAAAA")
} else {
print("III")
DispatchQueue.main.asyncAfter(deadline: .now() + 2.0) {
self.audioRecorder.record()
}
}
default: ()
}
}
func check_record_permission()
{
switch AVAudioSession.sharedInstance().recordPermission {
case AVAudioSessionRecordPermission.granted:
isAudioRecordingGranted = true
break
case AVAudioSessionRecordPermission.denied:
isAudioRecordingGranted = false
break
case AVAudioSessionRecordPermission.undetermined:
AVAudioSession.sharedInstance().requestRecordPermission({ (allowed) in
if allowed {
self.isAudioRecordingGranted = true
} else {
self.isAudioRecordingGranted = false
}
})
break
default:
break
}
}
func checkActivateMicrophone() {
if !isAudioRecordingGranted {
playBTN.isEnabled = false
playBTN.alpha = 0.5
recordBTN.isEnabled = false
recordBTN.alpha = 0.5
principalLabel.text = "Activa el micrófono desde ajustes"
} else {
playBTN.isEnabled = true
playBTN.alpha = 1
recordBTN.isEnabled = true
recordBTN.alpha = 1
principalLabel.text = "¡Prueba las grabaciones!"
}
}
func getDocumentsDirectory() -> URL
{
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
let documentsDirectory = paths[0]
return documentsDirectory
}
func getFileUrl() -> URL
{
let filename = "myRecording.m4a"
let filePath = getDocumentsDirectory().appendingPathComponent(filename)
return filePath
}
func setup_recorder()
{
if isAudioRecordingGranted
{
let session = AVAudioSession.sharedInstance()
do
{
try session.setCategory(AVAudioSession.Category.playAndRecord, options: .defaultToSpeaker)
try session.setActive(true)
let settings = [
AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
AVSampleRateKey: 44100,
AVNumberOfChannelsKey: 2,
AVEncoderAudioQualityKey:AVAudioQuality.high.rawValue
]
audioRecorder = try AVAudioRecorder(url: getFileUrl(), settings: settings)
audioRecorder.delegate = self
audioRecorder.isMeteringEnabled = true
audioRecorder.prepareToRecord()
}
catch let error {
print(error.localizedDescription)
}
}
else
{
print("AAAAA")
}
}
#IBAction func recordAct(_ sender: Any) {
if(isRecording) {
finishAudioRecording(success: true)
recordBTN.setTitle("Record", for: .normal)
playBTN.isEnabled = true
isRecording = false
}
else
{
setup_recorder()
audioRecorder.record()//aaaaaa
recordBTN.setTitle("Stop", for: .normal)
playBTN.isEnabled = false
isRecording = true
}
}
func finishAudioRecording(success: Bool)
{
if success
{
audioRecorder.stop()
audioRecorder = nil
print("recorded successfully.")
}
else
{
print("Recording failed.")
}
}
func prepare_play()
{
do
{
audioPlayer = try AVAudioPlayer(contentsOf: getFileUrl())
audioPlayer.delegate = self
audioPlayer.prepareToPlay()
}
catch{
print("Error")
}
}
#IBAction func playAct(_ sender: Any) {
if(isPlaying)
{
audioPlayer.stop()
recordBTN.isEnabled = true
playBTN.setTitle("Play", for: .normal)
isPlaying = false
}
else
{
if FileManager.default.fileExists(atPath: getFileUrl().path)
{
recordBTN.isEnabled = false
playBTN.setTitle("pause", for: .normal)
prepare_play()
audioPlayer.play()
isPlaying = true
}
else
{
print("Audio file is missing.")
}
}
}
func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully flag: Bool)
{
if !flag
{
print("UUU")
finishAudioRecording(success: false)
}
playBTN.isEnabled = true
}
func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool)
{
recordBTN.isEnabled = true
}
}
I implemented the "Background Mode" of Audio, AirPlay and Picture in Picture

How to automatically go to the next track when the one is currently playing ends?

I'm quite new to swift and I want to implement a piece of code that helps me reach the next song when the one that is currently playing ends.
I tried to copy the code inside my "#IBAction func nextAction" (which works fine):
#IBAction func nextAction(sender: AnyObject) {
self.nextTrack()
}
func nextTrack() {
if trackId == 0 || trackId < 4 {
if shuffle.on {
trackId = Int(arc4random_uniform(UInt32(library.count)))
}else {
trackId += 1
}
if let coverImage = library[trackId]["coverImage"]{
coverImageView.image = UIImage(named: "\(coverImage).jpg")
}
songTitleLabel.text = library[trackId]["title"]
artistLabel.text = library[trackId]["artist"]
audioPlayer.currentTime = 0
progressView.progress = 0
let path = NSBundle.mainBundle().pathForResource("\(trackId)", ofType: "mp3")
if let path = path {
let mp3URL = NSURL(fileURLWithPath: path)
do {
audioPlayer = try AVAudioPlayer(contentsOfURL: mp3URL)
audioPlayer.play()
NSTimer.scheduledTimerWithTimeInterval(1.0, target: self, selector: #selector(PlayerViewController.updateProgressView), userInfo: nil, repeats: true)
progressView.setProgress(Float(audioPlayer.currentTime/audioPlayer.duration), animated: false)
} catch let error as NSError {
print(error.localizedDescription)
}
}
}
}
And tried to put it inside an if condition like this (inside the viewDidLoad):
if audioPlayer.currentTime >= audioPlayer.duration {
self.nextTrack()
}
I don't have any errors but at runtime this method isn't working and the song ends without playing the next one.
To make the situation more clear here's my controller:
import UIKit
import AVFoundation
class PlayerViewController: UIViewController {
#IBOutlet weak var coverImageView: UIImageView!
#IBOutlet weak var progressView: UIProgressView!
#IBOutlet weak var songTitleLabel: UILabel!
#IBOutlet weak var artistLabel: UILabel!
#IBOutlet weak var shuffle: UISwitch!
var trackId: Int = 0
var library = MusicLibrary().library
var audioPlayer: AVAudioPlayer!
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
if let coverImage = library[trackId]["coverImage"]{
coverImageView.image = UIImage(named: "\(coverImage).jpg")
}
songTitleLabel.text = library[trackId]["title"]
artistLabel.text = library[trackId]["artist"]
let path = NSBundle.mainBundle().pathForResource("\(trackId)", ofType: "mp3")
if let path = path {
let mp3URL = NSURL(fileURLWithPath: path)
do {
audioPlayer = try AVAudioPlayer(contentsOfURL: mp3URL)
audioPlayer.play()
NSTimer.scheduledTimerWithTimeInterval(1.0, target: self, selector: #selector(PlayerViewController.updateProgressView), userInfo: nil, repeats: true)
progressView.setProgress(Float(audioPlayer.currentTime/audioPlayer.duration), animated: false)
} catch let error as NSError {
print(error.localizedDescription)
}
}
}
override func viewWillDisappear(animated: Bool) {
audioPlayer.stop()
}
func updateProgressView(){
if audioPlayer.playing {
progressView.setProgress(Float(audioPlayer.currentTime/audioPlayer.duration), animated: true)
}
}
#IBAction func playAction(sender: AnyObject) {
if !audioPlayer.playing {
audioPlayer.play()
}
}
#IBAction func stopAction(sender: AnyObject) {
audioPlayer.stop()
audioPlayer.currentTime = 0
progressView.progress = 0
}
#IBAction func pauseAction(sender: AnyObject) {
audioPlayer.pause()
}
#IBAction func fastForwardAction(sender: AnyObject) {
var time: NSTimeInterval = audioPlayer.currentTime
time += 5.0
if time > audioPlayer.duration {
stopAction(self)
}else {
audioPlayer.currentTime = time
}
}
#IBAction func rewindAction(sender: AnyObject) {
var time: NSTimeInterval = audioPlayer.currentTime
time -= 5.0
if time < 0 {
stopAction(self)
}else {
audioPlayer.currentTime = time
}
}
#IBAction func previousAction(sender: AnyObject) {
if trackId != 0 || trackId > 0 {
if shuffle.on {
trackId = Int(arc4random_uniform(UInt32(library.count)))
}else {
trackId -= 1
}
if let coverImage = library[trackId]["coverImage"]{
coverImageView.image = UIImage(named: "\(coverImage).jpg")
}
songTitleLabel.text = library[trackId]["title"]
artistLabel.text = library[trackId]["artist"]
audioPlayer.currentTime = 0
progressView.progress = 0
let path = NSBundle.mainBundle().pathForResource("\(trackId)", ofType: "mp3")
if let path = path {
let mp3URL = NSURL(fileURLWithPath: path)
do {
audioPlayer = try AVAudioPlayer(contentsOfURL: mp3URL)
audioPlayer.play()
NSTimer.scheduledTimerWithTimeInterval(1.0, target: self, selector: #selector(PlayerViewController.updateProgressView), userInfo: nil, repeats: true)
progressView.setProgress(Float(audioPlayer.currentTime/audioPlayer.duration), animated: false)
} catch let error as NSError {
print(error.localizedDescription)
}
}
}
}
#IBAction func swipeDownAction(sender: AnyObject) {
self.close()
}
#IBAction func closeAction(sender: AnyObject) {
self.close()
}
#IBAction func nextAction(sender: AnyObject) {
self.nextTrack()
}
func close() {
self.dismissViewControllerAnimated(true, completion: nil)
}
func nextTrack() {
if trackId == 0 || trackId < 4 {
if shuffle.on {
trackId = Int(arc4random_uniform(UInt32(library.count)))
}else {
trackId += 1
}
if let coverImage = library[trackId]["coverImage"]{
coverImageView.image = UIImage(named: "\(coverImage).jpg")
}
songTitleLabel.text = library[trackId]["title"]
artistLabel.text = library[trackId]["artist"]
audioPlayer.currentTime = 0
progressView.progress = 0
let path = NSBundle.mainBundle().pathForResource("\(trackId)", ofType: "mp3")
if let path = path {
let mp3URL = NSURL(fileURLWithPath: path)
do {
audioPlayer = try AVAudioPlayer(contentsOfURL: mp3URL)
audioPlayer.play()
NSTimer.scheduledTimerWithTimeInterval(1.0, target: self, selector: #selector(PlayerViewController.updateProgressView), userInfo: nil, repeats: true)
progressView.setProgress(Float(audioPlayer.currentTime/audioPlayer.duration), animated: false)
} catch let error as NSError {
print(error.localizedDescription)
}
}
}
}
}
All the code is written in Xcode 7.3.1
You should use AVAudioPlayer delegate method audioPlayerDidFinishPlaying(player: AVAudioPlayer, successfully flag: Bool) which is called when the audio player finishes playing a sound.
Make your PlayerViewController confirm to AVAudioPlayerDelegate protocol like this:
class PlayerViewController: UIViewController, AVAudioPlayerDelegate {
Make sure to set self as the delegate of the audioPlayer you create, to do that in your viewDidLoad,previousAction and nextTrack method you need to add
audioPlayer.delegate = self
after this line:
audioPlayer = try AVAudioPlayer(contentsOfURL: mp3URL)
Now you can use the delegate method to know when the audio is finished playing and go to the next track, just add this inside your class:
func audioPlayerDidFinishPlaying(player: AVAudioPlayer, successfully flag: Bool) {
if flag {
self.nextTrack()
} else {
// did not finish successfully
}
}

How to buffer audio using AVPlayer in iOS?

I want to play stream audio from the Internet. I wrote code that plays stream but it don't have any buffer so if signal is weak application stop playing audio. This is my code:
import UIKit
import AVFoundation
import MediaPlayer
import AudioToolbox
class ViewController: UIViewController {
var playerItem:AVPlayerItem?
var player:AVPlayer?
#IBOutlet weak var PlayButton: UIButton!
override func viewDidLoad() {
super.viewDidLoad()
var buffer = AVAudioBuffer ()
let url = NSURL (string: "http://radio.afera.com.pl/afera64.aac")
playerItem = AVPlayerItem(URL: url!)
player = AVPlayer(playerItem: playerItem!)
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
#IBAction func PlayButtonTapped(sender: AnyObject)
{
if ((player!.rate != 0) && (player!.error == nil))
{
player!.pause()
PlayButton.setImage(UIImage(named:"Play"), forState: UIControlState.Normal)
}
else
{
player!.play()
PlayButton.setImage(UIImage(named:"Stop"), forState:UIControlState.Normal)
}
}
}
I have no idea how to buffer this stream. I searched apple documentation but can't find anything in Swift.
I find something like AVAudioBuffer but I don't know how to use it, and if its correct to resolve this problem.
P.S. C# have documentation on MSDN, is something similar on Apple in case of Swift?
The answer is in creating an error delegate that launched a selector every time the player stopped (The error changes when the network connection is interrupted or the stream didn't load properly):
Here are delegates, just outside and above my RadioPlayer class:
protocol errorMessageDelegate {
func errorMessageChanged(newVal: String)
}
class:
import Foundation
import AVFoundation
import UIKit
class RadioPlayer : NSObject {
static let sharedInstance = RadioPlayer()
var instanceDelegate:sharedInstanceDelegate? = nil
var sharedInstanceBool = false {
didSet {
if let delegate = self.instanceDelegate {
delegate.sharedInstanceChanged(self.sharedInstanceBool)
}
}
}
private var player = AVPlayer(URL: NSURL(string: Globals.radioURL)!)
private var playerItem = AVPlayerItem?()
private var isPlaying = false
var errorDelegate:errorMessageDelegate? = nil
var errorMessage = "" {
didSet {
if let delegate = self.errorDelegate {
delegate.errorMessageChanged(self.errorMessage)
}
}
}
override init() {
super.init()
errorMessage = ""
let asset: AVURLAsset = AVURLAsset(URL: NSURL(string: Globals.radioURL)!, options: nil)
let statusKey = "tracks"
asset.loadValuesAsynchronouslyForKeys([statusKey], completionHandler: {
var error: NSError? = nil
dispatch_async(dispatch_get_main_queue(), {
let status: AVKeyValueStatus = asset.statusOfValueForKey(statusKey, error: &error)
if status == AVKeyValueStatus.Loaded{
let playerItem = AVPlayerItem(asset: asset)
self.player = AVPlayer(playerItem: playerItem)
self.sharedInstanceBool = true
} else {
self.errorMessage = error!.localizedDescription
print(error!)
}
})
})
NSNotificationCenter.defaultCenter().addObserverForName(
AVPlayerItemFailedToPlayToEndTimeNotification,
object: nil,
queue: nil,
usingBlock: { notification in
print("Status: Failed to continue")
self.errorMessage = "Stream was interrupted"
})
print("Initializing new player")
}
func resetPlayer() {
errorMessage = ""
let asset: AVURLAsset = AVURLAsset(URL: NSURL(string: Globals.radioURL)!, options: nil)
let statusKey = "tracks"
asset.loadValuesAsynchronouslyForKeys([statusKey], completionHandler: {
var error: NSError? = nil
dispatch_async(dispatch_get_main_queue(), {
let status: AVKeyValueStatus = asset.statusOfValueForKey(statusKey, error: &error)
if status == AVKeyValueStatus.Loaded{
let playerItem = AVPlayerItem(asset: asset)
//playerItem.addObserver(self, forKeyPath: "status", options: NSKeyValueObservingOptions.New, context: &ItemStatusContext)
self.player = AVPlayer(playerItem: playerItem)
self.sharedInstanceBool = true
} else {
self.errorMessage = error!.localizedDescription
print(error!)
}
})
})
}
func bufferFull() -> Bool {
return bufferAvailableSeconds() > 45.0
}
func bufferAvailableSeconds() -> NSTimeInterval {
// Check if there is a player instance
if ((player.currentItem) != nil) {
// Get current AVPlayerItem
let item: AVPlayerItem = player.currentItem!
if (item.status == AVPlayerItemStatus.ReadyToPlay) {
let timeRangeArray: NSArray = item.loadedTimeRanges
if timeRangeArray.count < 1 { return(CMTimeGetSeconds(kCMTimeInvalid)) }
let aTimeRange: CMTimeRange = timeRangeArray.objectAtIndex(0).CMTimeRangeValue
//let startTime = CMTimeGetSeconds(aTimeRange.end)
let loadedDuration = CMTimeGetSeconds(aTimeRange.duration)
return (NSTimeInterval)(loadedDuration);
}
else {
return(CMTimeGetSeconds(kCMTimeInvalid))
}
}
else {
return(CMTimeGetSeconds(kCMTimeInvalid))
}
}
func play() {
player.play()
isPlaying = true
print("Radio is \(isPlaying ? "" : "not ")playing")
}
func pause() {
player.pause()
isPlaying = false
print("Radio is \(isPlaying ? "" : "not ")playing")
}
func currentlyPlaying() -> Bool {
return isPlaying
}
}
protocol sharedInstanceDelegate {
func sharedInstanceChanged(newVal: Bool)
}
RadioViewController:
import UIKit
import AVFoundation
class RadioViewController: UIViewController, errorMessageDelegate, sharedInstanceDelegate {
// MARK: Properties
var firstErrorSkip = true
var firstInstanceSkip = true
#IBOutlet weak var listenLabel: UILabel!
#IBOutlet weak var radioSwitch: UIImageView!
#IBAction func back(sender: AnyObject) {
print("Dismissing radio view")
if let navigationController = self.navigationController
{
navigationController.popViewControllerAnimated(true)
}
}
#IBAction func switched(sender: AnyObject) {
toggle()
}
override func viewDidLoad() {
super.viewDidLoad()
do {
try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayback)
print("AVAudioSession Category Playback OK")
do {
try AVAudioSession.sharedInstance().setActive(true)
print("AVAudioSession is Active")
} catch let error as NSError {
print(error.localizedDescription)
}
} catch let error as NSError {
print(error.localizedDescription)
}
RadioPlayer.sharedInstance.errorDelegate = self
RadioPlayer.sharedInstance.instanceDelegate = self
if RadioPlayer.sharedInstance.currentlyPlaying() {
radioSwitch.image = UIImage(named: "Radio_Switch_Active")
listenLabel.text = "Click to Pause Radio Stream:"
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
func toggle() {
if RadioPlayer.sharedInstance.currentlyPlaying() {
pauseRadio()
} else {
playRadio()
}
}
func playRadio() {
firstErrorSkip = false
firstInstanceSkip = false
if RadioPlayer.sharedInstance.errorMessage != "" || RadioPlayer.sharedInstance.bufferFull() {
resetStream()
} else {
radioSwitch.image = UIImage(named: "Radio_Switch_Active")
listenLabel.text = "Click to Pause Radio Stream:"
RadioPlayer.sharedInstance.play()
}
}
func pauseRadio() {
RadioPlayer.sharedInstance.pause()
radioSwitch.image = UIImage(named: "Radio_Switch_Inactive")
listenLabel.text = "Click to Play Radio Stream:"
}
func resetStream() {
print("Reloading interrupted stream");
RadioPlayer.sharedInstance.resetPlayer()
//RadioPlayer.sharedInstance = RadioPlayer();
RadioPlayer.sharedInstance.errorDelegate = self
RadioPlayer.sharedInstance.instanceDelegate = self
if RadioPlayer.sharedInstance.bufferFull() {
radioSwitch.image = UIImage(named: "Radio_Switch_Active")
listenLabel.text = "Click to Pause Radio Stream:"
RadioPlayer.sharedInstance.play()
} else {
playRadio()
}
}
func errorMessageChanged(newVal: String) {
if !firstErrorSkip {
print("Error changed to '\(newVal)'")
if RadioPlayer.sharedInstance.errorMessage != "" {
print("Showing Error Message")
let alertController = UIAlertController(title: "Stream Failure", message: RadioPlayer.sharedInstance.errorMessage, preferredStyle: UIAlertControllerStyle.Alert)
alertController.addAction(UIAlertAction(title: "Dismiss", style: UIAlertActionStyle.Default, handler: nil))
self.presentViewController(alertController, animated: true, completion: nil)
pauseRadio()
}
} else {
print("Skipping first init")
firstErrorSkip = false
}
}
func sharedInstanceChanged(newVal: Bool) {
if !firstInstanceSkip {
print("Detected New Instance")
if newVal {
RadioPlayer.sharedInstance.play()
}
} else {
firstInstanceSkip = false
}
}
}
Hope this will help :)
change
playerItem = AVPlayerItem?()
to
playerItem:AVPlayerItem?

Resources