MPRemoteCommandCenter - Remote controls on lock screen does not show up - ios

I've implemented two functions in View controller (setupRemoteTransportControls() and setupNowPlaying()) and added one function to AppDelegate, but I'm still unable to see background audio controls of my app on the lock screen and also audio interruption function isn't working. This is the live stream from url, as you can spot on in the code. In the general settings I have added background playing:
What I would like to do is to print on the Remote Command Center artist, title and albumArt labes and UIImage (labels an UIImage are taken from my station API) , but i was stuck just displaying the command center. Here is my code:
import UIKit
import AVKit
import MediaPlayer
class ViewController: UIViewController, AVAudioPlayerDelegate {
#IBAction func buttonPressed(_ sender: UIButton){
if isPlaying {
player.pause()
sender.setImage(playImage, for: .normal)
} else {
let url = "https://myradio.com/radio.mp3"
do {
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default, options: [.mixWithOthers, .allowAirPlay])
try AVAudioSession.sharedInstance().setCategory(AVAudioSession.Category.playback, options: [])
print("Playback OK")
try AVAudioSession.sharedInstance().setActive(true)
print("Session is Active")
} catch {
print(error)
}
player = AVPlayer(url: URL(string: url)!)
player.volume = 1.0
player.rate = 1.0
player.play()
sender.setImage(pauseImage, for: .normal)
}
isPlaying.toggle()
}
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
overrideUserInterfaceStyle = .light
setupRemoteTransportControls()
requestNowPlaying()
setupNowPlaying()
}
// Here is the API data downloading part, so i skipped it
//Command Center audio controls
func setupRemoteTransportControls() {
// Get the shared MPRemoteCommandCenter
let commandCenter = MPRemoteCommandCenter.shared()
// Add handler for Play Command
commandCenter.playCommand.addTarget { [unowned self] event in
if self.player.rate == 1.0 {
self.player.play()
return .success
}
return .commandFailed
}
// Add handler for Pause Command
commandCenter.pauseCommand.addTarget { [unowned self] event in
if self.player.rate == 1.0 {
self.player.pause()
return .success
}
return .commandFailed
}
}
func setupNowPlaying() {
// Define Now Playing Info
var nowPlayingInfo = [String : Any]()
nowPlayingInfo[MPMediaItemPropertyTitle] = "Test"
if let image = UIImage(named: "Default_albumart") {
nowPlayingInfo[MPMediaItemPropertyArtwork] = MPMediaItemArtwork(boundsSize: image.size) { size in
return image
}
}
nowPlayingInfo[MPNowPlayingInfoPropertyIsLiveStream] = true
// Set the metadata
MPNowPlayingInfoCenter.default().nowPlayingInfo = nowPlayingInfo
}
func updateNowPlaying(isPause: Bool) {
// Define Now Playing Info
let nowPlayingInfo = MPNowPlayingInfoCenter.default().nowPlayingInfo!
//nowPlayingInfo[MPNowPlayingInfoPropertyElapsedPlaybackTime] = player.currentTime
//nowPlayingInfo[MPNowPlayingInfoPropertyPlaybackRate] = isPause ? 0 : 1
// Set the metadata
MPNowPlayingInfoCenter.default().nowPlayingInfo = nowPlayingInfo
}
//audio interruption
#objc func handleInterruption(notification: Notification) {
guard let userInfo = notification.userInfo,
let typeValue = userInfo[AVAudioSessionInterruptionTypeKey] as? UInt,
let type = AVAudioSession.InterruptionType(rawValue: typeValue) else {
return
}
// Switch over the interruption type.
switch type {
case .began:
print("Interruption began")
case .ended:
// An interruption ended. Resume playback, if appropriate.
guard let optionsValue = userInfo[AVAudioSessionInterruptionOptionKey] as? UInt else { return }
let options = AVAudioSession.InterruptionOptions(rawValue: optionsValue)
if options.contains(.shouldResume) {
player.play()
} else {
// An interruption ended. Don't resume playback.
}
default: ()
}
}
}
Here's what I've added in My AppDelegate.swift:
func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool {
application.beginReceivingRemoteControlEvents()
// Override point for customization after application launch.
return true
}

Remove .mixWithOthers from your category options.
I think the reasoning is that only the primary iOS audio app can control the remote screen. .mixWithOthers is for secondary audio apps.

Identify yourself as .longForm audio content provider with this code:
try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayback, mode: AVAudioSessionModeDefault, routeSharingPolicy: .longForm)
For whole implementation of AirPlay2 check this Apple WWDC presentation: https://developer.apple.com/videos/play/wwdc2017/509/

Related

Swift - How to handle audio session interruptions while recording video

I'm trying to fix a behavior of my iOS application. There is a camera plug-in in the application that allows the user to record videos and take pictures. While recording a video, if an interruption occurs such as an incoming call or an alarm sound, the camera frezees and the video is not saved. I would like to handle this situation interrupting the video and saving it or removing the audio while it keeps recording.
But after the interruption starts and the event is recognized, the variables regarding the video already recorded are reinitialized, causing the app not to save anything.
Any help would be appreciated.
†
var captureSession: AVCaptureSession?
public func fileOutput(\_: AVCaptureFileOutput, didStartRecordingTo \_: URL, from \_: \[AVCaptureConnection\]) {
NotificationCenter.default.addObserver(self, selector: #selector(sessionInterruptionBegin), name: .AVCaptureSessionWasInterrupted, object: captureSession)
NotificationCenter.default.addObserver(self, selector: #selector(sessionInterruptionEnd),name: .AVCaptureSessionInterruptionEnded, object: captureSession)
NotificationCenter.default.addObserver(self, selector: #selector(audioSessionInterrupted), name: AVAudioSession.interruptionNotification, object: AVAudioSession.sharedInstance)
print("STO PASSANDO DA AVCaptureFileOutputRecordingDelegate")
captureSession?.beginConfiguration()
if flashMode != .off {
_updateIlluminationMode(flashMode)
}
captureSession?.commitConfiguration() //at this point captureSession starts collecting data about the video
}
extension CameraManager {
#objc func sessionInterruptionBegin(notification: Notification) {
print("Capture Session Interruption begin Notification!")
guard let reasonNumber = notification.userInfo?\ [AVCaptureSessionInterruptionReasonKey\] as? NSNumber else {
return
}
let reason = AVCaptureSession.InterruptionReason(rawValue: reasonNumber.intValue)
switch reason {
case .audioDeviceInUseByAnotherClient:
removeAudioInput()
default:
break
}
}
func addAudioInput() throws {
if audioDeviceInput != nil {
return
}
removeAudioInput()
print("Adding audio input...")
captureSession?.beginConfiguration()
guard let audioDevice = AVCaptureDevice.default(for: .audio) else {
throw NSError()
}
audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice)
guard captureSession!.canAddInput(audioDeviceInput!) else {
throw NSError()
}
captureSession?.addInput(audioDeviceInput!)
captureSession?.automaticallyConfiguresApplicationAudioSession = false
captureSession?.commitConfiguration()
}
func removeAudioInput() {
//when the code reaches this point audioDeviceInput is reinitialized so the audio session is not removed from the recording
//captureSession is not filled with the data about the video recorded
guard let audioInput = audioDeviceInput else {
return
}
captureSession?.beginConfiguration()
captureSession?.removeInput(audioInput)
audioDeviceInput = nil
captureSession?.commitConfiguration()
}
#objc func sessionInterruptionEnd(notification: Notification) {
print("Capture Session Interruption end Notification!")
guard let reasonNumber = notification.userInfo?[AVCaptureSessionInterruptionReasonKey] as? NSNumber else {
return
}
let reason = AVCaptureSession.InterruptionReason(rawValue: reasonNumber.intValue)
switch reason {
case .audioDeviceInUseByAnotherClient:
// add audio again because we removed it when we received the interruption.
configureAudioSession()
default:
// don't do anything, iOS will automatically resume session
break
}
}
#objc func audioSessionInterrupted(notification: Notification) {
print("Audio Session Interruption Notification!")
guard let userInfo = notification.userInfo,
let typeValue = userInfo[AVAudioSessionInterruptionTypeKey] as? UInt,
let type = AVAudioSession.InterruptionType(rawValue: typeValue) else {
return
}
switch type {
case .began:
print("The Audio Session was interrupted!")
removeAudioInput()
case .ended:
print("The Audio Session interruption has ended.")
guard let optionsValue = userInfo[AVAudioSessionInterruptionOptionKey] as? UInt else { return }
let options = AVAudioSession.InterruptionOptions(rawValue: optionsValue)
if options.contains(.shouldResume) {
print("Resuming interrupted Audio Session...")
// restart audio session because interruption is over
configureAudioSession()
} else {
print("Cannot resume interrupted Audio Session!")
}
#unknown default: ()
}
}
func configureAudioSession() {
let start = DispatchTime.now()
do {
try self.addAudioInput()
let audioSession = AVAudioSession.sharedInstance()
if audioSession.category != .playAndRecord {
// allow background music playback
try audioSession.setCategory(AVAudioSession.Category.playAndRecord, options: [.mixWithOthers, .allowBluetoothA2DP, .defaultToSpeaker])
}
// activate current audio session because camera is active
try audioSession.setActive(true)
} catch let error as NSError {
switch error.code {
case 561_017_449:
print(error.description)
default:
print(error.description)
}
self.removeAudioInput()
}
let end = DispatchTime.now()
let nanoTime = end.uptimeNanoseconds - start.uptimeNanoseconds
print("Configured Audio session in \(Double(nanoTime) / 1_000_000)ms!")
}
}

swift start music after user phone call

I am not quite sure if I am missing a function or something, but when a users phone rings and or they ask siri or anything that stops my app audio from playing. It does not re-start my app playing when the user has finished their task.
I am wondering is there a function I am missing, or can Apple iOS apps not do this?
I thought it would be something to do with:
func setupRemoteTransportControls() {
// Get the shared MPRemoteCommandCenter
let commandCenter = MPRemoteCommandCenter.shared()
// Add handler for Play Command
commandCenter.playCommand.addTarget { [unowned self] event in
if self.player?.rate == 0.0 {
self.player?.play()
return .success
}
return .commandFailed
}
// Add handler for Pause Command
commandCenter.pauseCommand.addTarget { [unowned self] event in
if self.player?.rate == 1.0 {
self.player?.pause()
return .success
}
return .commandFailed
}
// self.nowplaying(artist: "Anna", song: "test")
}
I have found that I need to add this part but how do I call it?
func handleInterruption(notification: Notification) {
guard let userInfo = notification.userInfo,
let interruptionTypeRawValue = userInfo[AVAudioSessionInterruptionTypeKey] as? UInt,
let interruptionType = AVAudioSession.InterruptionType(rawValue: interruptionTypeRawValue) else {
return
}
switch interruptionType {
case .began:
print("interruption began")
case .ended:
print("interruption ended")
default:
print("UNKNOWN")
}
}
You need to set your audio session to AVAudioSessionCategoryPlayback. If you don't set this mode, you will have the default mode AVAudioSessionCategorySoloAmbient.
You can set the mode in didFinishLaunching.
e.g.
func application(_ application: UIApplication,
didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool {
// Get the singleton instance.
let audioSession = AVAudioSession.sharedInstance()
do {
// Set the audio session category, mode, and options.
try audioSession.setCategory(.playback, mode: .default, options: [])
} catch {
print("Failed to set audio session category.")
}
// Other post-launch configuration.
return true
}
You will also need to implement interruption observation
func setupNotifications() {
// Get the default notification center instance.
let nc = NotificationCenter.default
nc.addObserver(self,
selector: #selector(handleInterruption),
name: AVAudioSession.interruptionNotification,
object: nil)
}
#objc func handleInterruption(notification: Notification) {
// To be implemented.
}

MPRemoteCommandCenter Not displaying in lock screen

May be it's a duplicate question, but none of the solutions working for me. I have tried out almost everything.
The now playing info is not getting updated in lock screen.
Swift Version : 5 &
iOS Version : 13
Here is my code
func setupRemoteCommandCenter() {
UIApplication.shared.beginReceivingRemoteControlEvents()
let commandCenter = MPRemoteCommandCenter.shared()
commandCenter.playCommand.addTarget { event in
return .success
}
commandCenter.pauseCommand.addTarget { event in
return .success
}
commandCenter.nextTrackCommand.addTarget { event in
return .success
}
commandCenter.previousTrackCommand.addTarget { event in
return .success
}
}
func updateLockScreen() {
var nowPlayingInfo = [String : Any]()
nowPlayingInfo[MPMediaItemPropertyArtist] = "Artist"
nowPlayingInfo[MPMediaItemPropertyTitle] = "title"
MPNowPlayingInfoCenter.default().nowPlayingInfo = nowPlayingInfo
}
One method is getting called from viewDidLoad i.e
override func viewDidLoad() {
super.viewDidLoad()
AudioManager.shared.audioManageDelegate = self
// Do any additional setup after loading the view.
setupRemoteCommandCenter()
}
And other one is getting called from playbuttonAction method i.e
#IBAction func togglePlayPauseButton(sender: UIButton) {
//play pause button
sender.isSelected = !sender.isSelected
//updateLockScreen() //I checked it from calling here also
if sender.isSelected {
AudioManager.shared.playMusic()
} else {
AudioManager.shared.pauseMusic()
}
updateLockScreen()
}
My Appdelegate is here
func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool {
// Override point for customization after application launch.
setUPforbackground()
return true
}
func setUPforbackground() {
do {
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default, options: [.mixWithOthers, .allowAirPlay])
print("Playback OK")
try AVAudioSession.sharedInstance().setActive(true)
print("Session is Active")
} catch {
print(error)
}
}
Calling the updateLockScreen() before calling playMusic() also no result. Do I miss anything here?
After analysing code carefully I have found that
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default, options: [.mixWithOthers, .allowAirPlay])
Throwing error So I have changed it to below
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default)
Hope it will help someone. Thank you.

Remote Transport Control does not show up for radio stream SwiftUI app with AVPlayer

I am developing simple app in SwiftUI for one internet radio. It uses AVPlayer for play the stream available at given url. And that works perfectly. I have also set up AVSession in AppDelegate, so the app plays in background, stops playing while the call is incoming and resumes playing after the call. This all works fine. However, I wasn't able neither to bring the remote control on lock screen nor showing app in Player tile in Control Center.
The app is written using SwiftUI, I am also moving from traditional completion blocks and targets into Combine. I have created separate class Player, which is ObservableObject (and observed by ContentView), where I set up AVPlayer, AVPlayerItem (with given URL for stream). And all works fine. App updates the state on change of player state. I am not using AVPlayerViewController, since I don't need one. On initialization of that Player object I am also setting up Remote Transport Controls using this method (I moved from setting targets to publishers).
func setupRemoteTransportControls() {
let commandCenter = MPRemoteCommandCenter.shared()
commandCenter.publisher(for: \.playCommand)
.sink(receiveValue: {_ in self.play() })
.store(in: &cancellables)
commandCenter.publisher(for: \.stopCommand)
.sink(receiveValue: {_ in self.stop() })
.store(in: &cancellables)
}
Either I use the original version of that method provided by Apple, or my own version (as shown above) the Remote Control doesn't show up, and the Control Center tile player is not updated.
Of course I use the method provided by Apple for updating NowPlaying
func setupNowPlaying() {
var nowPlayingInfo = [String : Any]()
nowPlayingInfo[MPMediaItemPropertyTitle] = "Radio"
if let image = UIImage(systemName: "radio") {
nowPlayingInfo[MPMediaItemPropertyArtwork] =
MPMediaItemArtwork(boundsSize: image.size) { size in
return image
}
}
nowPlayingInfo[MPNowPlayingInfoPropertyElapsedPlaybackTime] = player?.currentItem?.currentTime().seconds ?? ""
nowPlayingInfo[MPMediaItemPropertyPlaybackDuration] = player?.currentItem?.asset.duration.seconds ?? ""
nowPlayingInfo[MPNowPlayingInfoPropertyPlaybackRate] = isPlaying ? 1 : 0
MPNowPlayingInfoCenter.default().nowPlayingInfo = nowPlayingInfo
}
I don't know where is the problem. Is it the way I set up Remote Transport Controls? The flow is like this:
Observable Player object with AVPlayer and setup for Remote Transport Controls and NowPlaying -> observed by -> Content View.
Here is full listing for Player class:
import Foundation
import AVKit
import Combine
import MediaPlayer
class Player: ObservableObject {
private let streamURL = URL(string: "https://stream.rcs.revma.com/ypqt40u0x1zuv")!
#Published var status: Player.Status = .stopped
#Published var isPlaying = false
#Published var showError = false
#Published var isMuted = false
var player: AVPlayer?
var cancellables = Set<AnyCancellable>()
init() {
setupRemoteTransportControls()
}
func setupPlayer() {
let item = AVPlayerItem(url: streamURL)
player = AVPlayer(playerItem: item)
player?.allowsExternalPlayback = true
}
func play() {
handleInterruption()
handleRouteChange()
setupPlayer()
player?.play()
player?.currentItem?.publisher(for: \.status)
.sink(receiveValue: { status in
self.handle(status: status)
})
.store(in: &cancellables)
}
func stop() {
player?.pause()
player = nil
status = .stopped
}
func mute() {
player?.isMuted.toggle()
isMuted.toggle()
}
func handle(status: AVPlayerItem.Status) {
switch status {
case .unknown:
self.status = .waiting
self.isPlaying = false
case .readyToPlay:
self.status = .ready
self.isPlaying = true
self.setupNowPlaying()
case .failed:
self.status = .failed
self.isPlaying = false
self.showError = true
self.setupNowPlaying()
default:
self.status = .stopped
self.isPlaying = false
self.setupNowPlaying()
}
}
func handleInterruption() {
NotificationCenter.default.publisher(for: AVAudioSession.interruptionNotification)
.map(\.userInfo)
.compactMap {
$0?[AVAudioSessionInterruptionTypeKey] as? UInt
}
.map { AVAudioSession.InterruptionType(rawValue: $0)}
.sink { (interruptionType) in
self.handle(interruptionType: interruptionType)
}
.store(in: &cancellables)
}
func handle(interruptionType: AVAudioSession.InterruptionType?) {
switch interruptionType {
case .began:
self.stop()
case .ended:
self.play()
default:
break
}
}
typealias UInfo = [AnyHashable: Any]
func handleRouteChange() {
NotificationCenter.default.publisher(for: AVAudioSession.routeChangeNotification)
.map(\.userInfo)
.compactMap({ (userInfo) -> (UInfo?, UInt?) in
(userInfo, userInfo?[AVAudioSessionRouteChangeReasonKey] as? UInt)
})
.compactMap({ (result) -> (UInfo?, AVAudioSession.RouteChangeReason?) in
(result.0, AVAudioSession.RouteChangeReason(rawValue: result.1 ?? 0))
})
.sink(receiveValue: { (result) in
self.handle(reason: result.1, userInfo: result.0)
})
.store(in: &cancellables)
}
func handle(reason: AVAudioSession.RouteChangeReason?, userInfo: UInfo?) {
switch reason {
case .newDeviceAvailable:
let session = AVAudioSession.sharedInstance()
for output in session.currentRoute.outputs where output.portType == AVAudioSession.Port.headphones {
DispatchQueue.main.async {
self.play()
}
}
case .oldDeviceUnavailable:
if let previousRoute = userInfo?[AVAudioSessionRouteChangePreviousRouteKey] as? AVAudioSessionRouteDescription {
for output in previousRoute.outputs where output.portType == AVAudioSession.Port.headphones {
DispatchQueue.main.sync {
self.stop()
}
break
}
}
default:
break
}
}
}
extension Player {
enum Status {
case waiting, ready, failed, stopped
}
}
extension Player {
func setupRemoteTransportControls() {
let commandCenter = MPRemoteCommandCenter.shared()
commandCenter.publisher(for: \.playCommand)
.sink(receiveValue: {_ in self.play() })
.store(in: &cancellables)
commandCenter.publisher(for: \.stopCommand)
.sink(receiveValue: {_ in self.stop() })
.store(in: &cancellables)
}
func setupNowPlaying() {
var nowPlayingInfo = [String : Any]()
nowPlayingInfo[MPMediaItemPropertyTitle] = "Radio"
if let image = UIImage(systemName: "radio") {
nowPlayingInfo[MPMediaItemPropertyArtwork] =
MPMediaItemArtwork(boundsSize: image.size) { size in
return image
}
}
nowPlayingInfo[MPNowPlayingInfoPropertyElapsedPlaybackTime] = player?.currentItem?.currentTime().seconds ?? ""
nowPlayingInfo[MPMediaItemPropertyPlaybackDuration] = player?.currentItem?.asset.duration.seconds ?? ""
nowPlayingInfo[MPNowPlayingInfoPropertyPlaybackRate] = isPlaying ? 1 : 0
MPNowPlayingInfoCenter.default().nowPlayingInfo = nowPlayingInfo
}
}
It appeared that I need to add in AppDelegate in application(:didFinishLaunchWithOptions) one line of code:
UIApplication.shared.beginReceivingRemoteControlEvents()
That solved the problem. Now remote controller is visible on lock screen and it also works in Control Center.
Once additional fix. Changing targets to publisher in setupRemoteTransportControls() in my Player object didn't work. So I switched back to setting targets like this.
func setupRemoteTransportControls() {
let commandCenter = MPRemoteCommandCenter.shared()
// Add handler for Play Command
commandCenter.playCommand.addTarget { event in
self.play()
return .success
}
// Add handler for Pause Command
commandCenter.pauseCommand.addTarget { event in
self.stop()
return .success
}
}

Resuming AVPlayer after phone call

There appears to be many solutions on SO addressing this yet none of those solutions have worked for me. I'm currently using Swift 5. I have a AVPlayer playing an animation (that loops) in my ViewController. When a call comes in through CallKit, regardless of whether I answer or decline the call, the animation played by the AVPlayer does not resume after the call has been dealt with. The interruption handler seems to be called before an interruption but usually doesn't get called after the interruption.
override func viewDidLoad() {
super.viewDidLoad()
prepareBGVideo()
...
NotificationCenter.default.addObserver(
self,
selector: #selector(applicationWillEnterForeground(notification:)),
name: UIApplication.willEnterForegroundNotification,
object: nil)
...
}
func prepareBGVideo() {
guard let path = Bundle.main.path(forResource: "animation", ofType:"mp4") else {
print("video not found")
return
}
let item = AVPlayerItem(url: URL(fileURLWithPath: path))
avPlayer = AVPlayer(playerItem: item)
NotificationCenter.default.addObserver(self,
selector: #selector(loopVideoBG),
name: NSNotification.Name.AVPlayerItemDidPlayToEndTime,
object: item)
NotificationCenter.default.addObserver(self, selector: #selector(handleInterruption(notification:)), name: AVAudioSession.interruptionNotification, object: nil)
avPlayerLayer = AVPlayerLayer(player: avPlayer)
avPlayerLayer.backgroundColor = UIColor.black.cgColor
avPlayer.volume = 0
avPlayer.actionAtItemEnd = .none
avPlayer.play()
view.backgroundColor = .clear
avPlayerLayer.frame = view.layer.bounds
view.layer.insertSublayer(avPlayerLayer, at: 0)
avPlayerLayer.videoGravity = isIPAD ? AVLayerVideoGravity.resize : AVLayerVideoGravity.resizeAspectFill // Changed from AVLayerVideoGravity.resizeAspect to AVLayerVideoGravity.resize so that video fits iPad screen
NotificationCenter.default.addObserver(self,
selector: #selector(willEnterForeground),
name: UIApplication.willEnterForegroundNotification,
object: nil)
}
#objc func handleInterruption(notification: Notification) {
guard let info = notification.userInfo,
let typeValue = info[AVAudioSessionInterruptionTypeKey] as? UInt,
let type = AVAudioSession.InterruptionType(rawValue: typeValue) else {
return
}
if type == .began {
// Interruption began, take appropriate actions (save state, update user interface)
self.avPlayer.pause()
} else if type == .ended {
guard let optionsValue =
info[AVAudioSessionInterruptionOptionKey] as? UInt else {
return
}
let options = AVAudioSession.InterruptionOptions(rawValue: optionsValue)
if options.contains(.shouldResume) {
// Interruption Ended - playback should resume
self.avPlayer.play()
}
}
}
/// Resume video while app wake up from background
#objc func willEnterForeground() {
avPlayer.seek(to: CMTime.zero)
JPUtility.shared.performOperation(0.1) {
self.avPlayer.play()
}
}
#objc func loopVideoBG() {
avPlayer.seek(to: CMTime.zero)
avPlayer.play()
}
Here are all the solutions that I have tried:
Waiting two seconds before calling self.avPlayer.play() in if options.contains(.shouldResume){}
Setting AVAudioSession.sharedInstance().setActive to false when interruption begins and then setting it ot true when interruption ends. The issue with this approach is that the if interruption == .ended {} block doesn't always get invoked so setting setActive had no effect.
Setting AVAudioSession playback category to AVAudioSessionCategoryOptions.MixWithOthers. My animation doesn't have audio anyway.
I have seen mentions of resuming playback in applicationDidBecomeActive(_:) but some advised against this. Would this be considered good practice?
Is there a way to ensure that the else if type == .ended {} block gets executed? Or perhaps a workaround that works more reliably than observing AVAudioSession.interruptionNotification?
I solved this but creating a shared VideoPlayer class that contained references to all the screen that had animations.
import Foundation
import UIKit
import AVKit
class VideoPlayer: NSObject {
static var shared: VideoPlayer = VideoPlayer()
var avPlayer: AVPlayer!
var avPlayerLayer: AVPlayerLayer!
weak var vcForConnect:ConnectVC?
weak var vcForList:ListVC?
override init() {
super.init()
guard let path = Bundle.main.path(forResource: "animation", ofType:"mp4") else {
print("video not found")
return
}
avPlayer = AVPlayer(url: URL(fileURLWithPath: path))
avPlayerLayer = AVPlayerLayer(player: avPlayer)
avPlayerLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
avPlayer.volume = 0
avPlayer.actionAtItemEnd = .none
loopVideo(videoPlayer: avPlayer)
avPlayer.play()
NotificationCenter.default.addObserver(self, selector: #selector(handleInterruption(notification:)), name: AVAudioSession.interruptionNotification, object: nil)
}
deinit {
avPlayer.pause()
}
#objc func handleInterruption(notification: Notification) {
guard let info = notification.userInfo,
let typeValue = info[AVAudioSessionInterruptionTypeKey] as? UInt,
let type = AVAudioSession.InterruptionType(rawValue: typeValue) else {
return
}
if type == .began {
// Interruption began, take appropriate actions (save state, update user interface)
self.avPlayer.pause()
} else if type == .ended {
guard let optionsValue =
info[AVAudioSessionInterruptionOptionKey] as? UInt else {
return
}
let options = AVAudioSession.InterruptionOptions(rawValue: optionsValue)
if options.contains(.shouldResume) {
// Interruption Ended - playback should resume
self.avPlayer.play()
}
}
}
func resumeAllAnimations() {
self.avPlayer.play()
if vcForList?.avPlayer != nil {
vcForList?.avPlayer.play()
}
if vcForConnect?.avPlayer != nil {
vcForConnect?.avPlayer.play()
}
if vcForConnect?.avPlayerBG != nil {
vcForConnect?.avPlayerBG.play()
}
}
...
}
I then resume the animations by calling resumeAllAnimations() in applicationDidBecomeActive(_:) in AppDelegate.swift like so:
func applicationDidBecomeActive(_ application: UIApplication) {
VideoPlayer.shared.resumeAllAnimations()
...
}

Resources