swift start music after user phone call - ios

I am not quite sure if I am missing a function or something, but when a users phone rings and or they ask siri or anything that stops my app audio from playing. It does not re-start my app playing when the user has finished their task.
I am wondering is there a function I am missing, or can Apple iOS apps not do this?
I thought it would be something to do with:
func setupRemoteTransportControls() {
// Get the shared MPRemoteCommandCenter
let commandCenter = MPRemoteCommandCenter.shared()
// Add handler for Play Command
commandCenter.playCommand.addTarget { [unowned self] event in
if self.player?.rate == 0.0 {
self.player?.play()
return .success
}
return .commandFailed
}
// Add handler for Pause Command
commandCenter.pauseCommand.addTarget { [unowned self] event in
if self.player?.rate == 1.0 {
self.player?.pause()
return .success
}
return .commandFailed
}
// self.nowplaying(artist: "Anna", song: "test")
}
I have found that I need to add this part but how do I call it?
func handleInterruption(notification: Notification) {
guard let userInfo = notification.userInfo,
let interruptionTypeRawValue = userInfo[AVAudioSessionInterruptionTypeKey] as? UInt,
let interruptionType = AVAudioSession.InterruptionType(rawValue: interruptionTypeRawValue) else {
return
}
switch interruptionType {
case .began:
print("interruption began")
case .ended:
print("interruption ended")
default:
print("UNKNOWN")
}
}

You need to set your audio session to AVAudioSessionCategoryPlayback. If you don't set this mode, you will have the default mode AVAudioSessionCategorySoloAmbient.
You can set the mode in didFinishLaunching.
e.g.
func application(_ application: UIApplication,
didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool {
// Get the singleton instance.
let audioSession = AVAudioSession.sharedInstance()
do {
// Set the audio session category, mode, and options.
try audioSession.setCategory(.playback, mode: .default, options: [])
} catch {
print("Failed to set audio session category.")
}
// Other post-launch configuration.
return true
}
You will also need to implement interruption observation
func setupNotifications() {
// Get the default notification center instance.
let nc = NotificationCenter.default
nc.addObserver(self,
selector: #selector(handleInterruption),
name: AVAudioSession.interruptionNotification,
object: nil)
}
#objc func handleInterruption(notification: Notification) {
// To be implemented.
}

Related

Swift - How to handle audio session interruptions while recording video

I'm trying to fix a behavior of my iOS application. There is a camera plug-in in the application that allows the user to record videos and take pictures. While recording a video, if an interruption occurs such as an incoming call or an alarm sound, the camera frezees and the video is not saved. I would like to handle this situation interrupting the video and saving it or removing the audio while it keeps recording.
But after the interruption starts and the event is recognized, the variables regarding the video already recorded are reinitialized, causing the app not to save anything.
Any help would be appreciated.
†
var captureSession: AVCaptureSession?
public func fileOutput(\_: AVCaptureFileOutput, didStartRecordingTo \_: URL, from \_: \[AVCaptureConnection\]) {
NotificationCenter.default.addObserver(self, selector: #selector(sessionInterruptionBegin), name: .AVCaptureSessionWasInterrupted, object: captureSession)
NotificationCenter.default.addObserver(self, selector: #selector(sessionInterruptionEnd),name: .AVCaptureSessionInterruptionEnded, object: captureSession)
NotificationCenter.default.addObserver(self, selector: #selector(audioSessionInterrupted), name: AVAudioSession.interruptionNotification, object: AVAudioSession.sharedInstance)
print("STO PASSANDO DA AVCaptureFileOutputRecordingDelegate")
captureSession?.beginConfiguration()
if flashMode != .off {
_updateIlluminationMode(flashMode)
}
captureSession?.commitConfiguration() //at this point captureSession starts collecting data about the video
}
extension CameraManager {
#objc func sessionInterruptionBegin(notification: Notification) {
print("Capture Session Interruption begin Notification!")
guard let reasonNumber = notification.userInfo?\ [AVCaptureSessionInterruptionReasonKey\] as? NSNumber else {
return
}
let reason = AVCaptureSession.InterruptionReason(rawValue: reasonNumber.intValue)
switch reason {
case .audioDeviceInUseByAnotherClient:
removeAudioInput()
default:
break
}
}
func addAudioInput() throws {
if audioDeviceInput != nil {
return
}
removeAudioInput()
print("Adding audio input...")
captureSession?.beginConfiguration()
guard let audioDevice = AVCaptureDevice.default(for: .audio) else {
throw NSError()
}
audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice)
guard captureSession!.canAddInput(audioDeviceInput!) else {
throw NSError()
}
captureSession?.addInput(audioDeviceInput!)
captureSession?.automaticallyConfiguresApplicationAudioSession = false
captureSession?.commitConfiguration()
}
func removeAudioInput() {
//when the code reaches this point audioDeviceInput is reinitialized so the audio session is not removed from the recording
//captureSession is not filled with the data about the video recorded
guard let audioInput = audioDeviceInput else {
return
}
captureSession?.beginConfiguration()
captureSession?.removeInput(audioInput)
audioDeviceInput = nil
captureSession?.commitConfiguration()
}
#objc func sessionInterruptionEnd(notification: Notification) {
print("Capture Session Interruption end Notification!")
guard let reasonNumber = notification.userInfo?[AVCaptureSessionInterruptionReasonKey] as? NSNumber else {
return
}
let reason = AVCaptureSession.InterruptionReason(rawValue: reasonNumber.intValue)
switch reason {
case .audioDeviceInUseByAnotherClient:
// add audio again because we removed it when we received the interruption.
configureAudioSession()
default:
// don't do anything, iOS will automatically resume session
break
}
}
#objc func audioSessionInterrupted(notification: Notification) {
print("Audio Session Interruption Notification!")
guard let userInfo = notification.userInfo,
let typeValue = userInfo[AVAudioSessionInterruptionTypeKey] as? UInt,
let type = AVAudioSession.InterruptionType(rawValue: typeValue) else {
return
}
switch type {
case .began:
print("The Audio Session was interrupted!")
removeAudioInput()
case .ended:
print("The Audio Session interruption has ended.")
guard let optionsValue = userInfo[AVAudioSessionInterruptionOptionKey] as? UInt else { return }
let options = AVAudioSession.InterruptionOptions(rawValue: optionsValue)
if options.contains(.shouldResume) {
print("Resuming interrupted Audio Session...")
// restart audio session because interruption is over
configureAudioSession()
} else {
print("Cannot resume interrupted Audio Session!")
}
#unknown default: ()
}
}
func configureAudioSession() {
let start = DispatchTime.now()
do {
try self.addAudioInput()
let audioSession = AVAudioSession.sharedInstance()
if audioSession.category != .playAndRecord {
// allow background music playback
try audioSession.setCategory(AVAudioSession.Category.playAndRecord, options: [.mixWithOthers, .allowBluetoothA2DP, .defaultToSpeaker])
}
// activate current audio session because camera is active
try audioSession.setActive(true)
} catch let error as NSError {
switch error.code {
case 561_017_449:
print(error.description)
default:
print(error.description)
}
self.removeAudioInput()
}
let end = DispatchTime.now()
let nanoTime = end.uptimeNanoseconds - start.uptimeNanoseconds
print("Configured Audio session in \(Double(nanoTime) / 1_000_000)ms!")
}
}

MPRemoteCommandCenter - Remote controls on lock screen does not show up

I've implemented two functions in View controller (setupRemoteTransportControls() and setupNowPlaying()) and added one function to AppDelegate, but I'm still unable to see background audio controls of my app on the lock screen and also audio interruption function isn't working. This is the live stream from url, as you can spot on in the code. In the general settings I have added background playing:
What I would like to do is to print on the Remote Command Center artist, title and albumArt labes and UIImage (labels an UIImage are taken from my station API) , but i was stuck just displaying the command center. Here is my code:
import UIKit
import AVKit
import MediaPlayer
class ViewController: UIViewController, AVAudioPlayerDelegate {
#IBAction func buttonPressed(_ sender: UIButton){
if isPlaying {
player.pause()
sender.setImage(playImage, for: .normal)
} else {
let url = "https://myradio.com/radio.mp3"
do {
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default, options: [.mixWithOthers, .allowAirPlay])
try AVAudioSession.sharedInstance().setCategory(AVAudioSession.Category.playback, options: [])
print("Playback OK")
try AVAudioSession.sharedInstance().setActive(true)
print("Session is Active")
} catch {
print(error)
}
player = AVPlayer(url: URL(string: url)!)
player.volume = 1.0
player.rate = 1.0
player.play()
sender.setImage(pauseImage, for: .normal)
}
isPlaying.toggle()
}
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
overrideUserInterfaceStyle = .light
setupRemoteTransportControls()
requestNowPlaying()
setupNowPlaying()
}
// Here is the API data downloading part, so i skipped it
//Command Center audio controls
func setupRemoteTransportControls() {
// Get the shared MPRemoteCommandCenter
let commandCenter = MPRemoteCommandCenter.shared()
// Add handler for Play Command
commandCenter.playCommand.addTarget { [unowned self] event in
if self.player.rate == 1.0 {
self.player.play()
return .success
}
return .commandFailed
}
// Add handler for Pause Command
commandCenter.pauseCommand.addTarget { [unowned self] event in
if self.player.rate == 1.0 {
self.player.pause()
return .success
}
return .commandFailed
}
}
func setupNowPlaying() {
// Define Now Playing Info
var nowPlayingInfo = [String : Any]()
nowPlayingInfo[MPMediaItemPropertyTitle] = "Test"
if let image = UIImage(named: "Default_albumart") {
nowPlayingInfo[MPMediaItemPropertyArtwork] = MPMediaItemArtwork(boundsSize: image.size) { size in
return image
}
}
nowPlayingInfo[MPNowPlayingInfoPropertyIsLiveStream] = true
// Set the metadata
MPNowPlayingInfoCenter.default().nowPlayingInfo = nowPlayingInfo
}
func updateNowPlaying(isPause: Bool) {
// Define Now Playing Info
let nowPlayingInfo = MPNowPlayingInfoCenter.default().nowPlayingInfo!
//nowPlayingInfo[MPNowPlayingInfoPropertyElapsedPlaybackTime] = player.currentTime
//nowPlayingInfo[MPNowPlayingInfoPropertyPlaybackRate] = isPause ? 0 : 1
// Set the metadata
MPNowPlayingInfoCenter.default().nowPlayingInfo = nowPlayingInfo
}
//audio interruption
#objc func handleInterruption(notification: Notification) {
guard let userInfo = notification.userInfo,
let typeValue = userInfo[AVAudioSessionInterruptionTypeKey] as? UInt,
let type = AVAudioSession.InterruptionType(rawValue: typeValue) else {
return
}
// Switch over the interruption type.
switch type {
case .began:
print("Interruption began")
case .ended:
// An interruption ended. Resume playback, if appropriate.
guard let optionsValue = userInfo[AVAudioSessionInterruptionOptionKey] as? UInt else { return }
let options = AVAudioSession.InterruptionOptions(rawValue: optionsValue)
if options.contains(.shouldResume) {
player.play()
} else {
// An interruption ended. Don't resume playback.
}
default: ()
}
}
}
Here's what I've added in My AppDelegate.swift:
func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool {
application.beginReceivingRemoteControlEvents()
// Override point for customization after application launch.
return true
}
Remove .mixWithOthers from your category options.
I think the reasoning is that only the primary iOS audio app can control the remote screen. .mixWithOthers is for secondary audio apps.
Identify yourself as .longForm audio content provider with this code:
try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayback, mode: AVAudioSessionModeDefault, routeSharingPolicy: .longForm)
For whole implementation of AirPlay2 check this Apple WWDC presentation: https://developer.apple.com/videos/play/wwdc2017/509/

MPRemoteCommandCenter Not displaying in lock screen

May be it's a duplicate question, but none of the solutions working for me. I have tried out almost everything.
The now playing info is not getting updated in lock screen.
Swift Version : 5 &
iOS Version : 13
Here is my code
func setupRemoteCommandCenter() {
UIApplication.shared.beginReceivingRemoteControlEvents()
let commandCenter = MPRemoteCommandCenter.shared()
commandCenter.playCommand.addTarget { event in
return .success
}
commandCenter.pauseCommand.addTarget { event in
return .success
}
commandCenter.nextTrackCommand.addTarget { event in
return .success
}
commandCenter.previousTrackCommand.addTarget { event in
return .success
}
}
func updateLockScreen() {
var nowPlayingInfo = [String : Any]()
nowPlayingInfo[MPMediaItemPropertyArtist] = "Artist"
nowPlayingInfo[MPMediaItemPropertyTitle] = "title"
MPNowPlayingInfoCenter.default().nowPlayingInfo = nowPlayingInfo
}
One method is getting called from viewDidLoad i.e
override func viewDidLoad() {
super.viewDidLoad()
AudioManager.shared.audioManageDelegate = self
// Do any additional setup after loading the view.
setupRemoteCommandCenter()
}
And other one is getting called from playbuttonAction method i.e
#IBAction func togglePlayPauseButton(sender: UIButton) {
//play pause button
sender.isSelected = !sender.isSelected
//updateLockScreen() //I checked it from calling here also
if sender.isSelected {
AudioManager.shared.playMusic()
} else {
AudioManager.shared.pauseMusic()
}
updateLockScreen()
}
My Appdelegate is here
func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool {
// Override point for customization after application launch.
setUPforbackground()
return true
}
func setUPforbackground() {
do {
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default, options: [.mixWithOthers, .allowAirPlay])
print("Playback OK")
try AVAudioSession.sharedInstance().setActive(true)
print("Session is Active")
} catch {
print(error)
}
}
Calling the updateLockScreen() before calling playMusic() also no result. Do I miss anything here?
After analysing code carefully I have found that
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default, options: [.mixWithOthers, .allowAirPlay])
Throwing error So I have changed it to below
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default)
Hope it will help someone. Thank you.

iOS media playback controls notification

I am new to iOS, and developing a cross platform app with Flutter. I am trying to play audio from network URL, which i found it can be done using the AVPlayer. The audio plays when the app is in foreground and in background, but i can display the media playback controls like this: .
i used the let mediaController = MPMusicPlayerController.applicationMusicPlayer and then calling self.mediaController.beginGeneratingPlaybackNotifications(), also providing the playing info MPNowPlayingInfoCenter.default().nowPlayingInfo = mediaInfo and setting the targets for the remote command center in self.registerCommands() method.
i did alot of research but no luck in finding the problem, and as i said before am new to ios.
AppDelegate
import UIKit
import Flutter
import AVFoundation
import MediaPlayer
#UIApplicationMain
#objc class AppDelegate: FlutterAppDelegate {
static let CHANNEL = "APP_CHANNEL"
let mPlayer = AudioPlayer()
let mediaController = MPMusicPlayerController.applicationMusicPlayer
override func application(
_ application: UIApplication,
didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?
) -> Bool {
self.requestNotificationPermission(application: application)
let controller : FlutterViewController = window?.rootViewController as! FlutterViewController
let mainChannel = FlutterMethodChannel(name: AppDelegate.CHANNEL,
binaryMessenger: controller.binaryMessenger)
mainChannel.setMethodCallHandler({
(call: FlutterMethodCall, result: #escaping FlutterResult) -> Void in
switch(call.method) {
case "getSharedContainerPath":
let path = Utils.getSharedContainerPath()
result(path)
break
case "saveSelectedCity":
let city = call.arguments as! String
Utils.saveCityToUserDefaults(city: city)
result(true)
break
case "playSurah":
let number = call.arguments as! Int
self.initAudioPlayer()
self.mPlayer.toggle(num: number)
result(true)
break
default:
result(FlutterMethodNotImplemented)
return
}
})
GeneratedPluginRegistrant.register(with: self)
return super.application(application, didFinishLaunchingWithOptions: launchOptions)
}
func initAudioPlayer() {
self.mediaController.beginGeneratingPlaybackNotifications()
self.mPlayer.initPlayer(object: self)
self.registerCommands()
let nc = NotificationCenter.default
nc.addObserver(self,
selector: #selector(handleInterruption),
name: AVAudioSession.interruptionNotification,
object: nil)
nc.addObserver(self, selector: #selector(playerDidFinishPlaying), name: .AVPlayerItemDidPlayToEndTime, object: nil)
}
func requestNotificationPermission(application: UIApplication) {
if #available(iOS 10, *) {
// iOS 10 support
//create the notificationCenter
let center = UNUserNotificationCenter.current()
center.delegate = self as UNUserNotificationCenterDelegate
// set the type as sound or badge
center.requestAuthorization(options: [.sound,.alert,.badge]) { (granted, error) in
if granted {
print("Notification Enable Successfully")
}else{
print("Some Error Occure")
}
}
application.registerForRemoteNotifications()
} else if #available(iOS 9, *) {
// iOS 9 support
UIApplication.shared.registerUserNotificationSettings(UIUserNotificationSettings(types: [.badge, .sound, .alert], categories: nil))
UIApplication.shared.registerForRemoteNotifications()
} else if #available(iOS 8, *) {
// iOS 8 support
UIApplication.shared.registerUserNotificationSettings(UIUserNotificationSettings(types: [.badge, .sound, .alert], categories: nil))
UIApplication.shared.registerForRemoteNotifications()
} else { // iOS 7 support
application.registerForRemoteNotifications(matching: [.badge, .sound, .alert])
}
}
func registerCommands() {
let command = MPRemoteCommandCenter.shared()
command.playCommand.isEnabled = true;
command.playCommand.addTarget { (_) -> MPRemoteCommandHandlerStatus in
self.mPlayer.play()
return .success
}
command.pauseCommand.isEnabled = true;
command.pauseCommand.addTarget { (_) -> MPRemoteCommandHandlerStatus in
self.mPlayer.pause()
return .success
}
command.togglePlayPauseCommand.isEnabled = true;
command.togglePlayPauseCommand.addTarget { (_) -> MPRemoteCommandHandlerStatus in
self.mPlayer.toggle(num: self.mPlayer.index)
return .success
}
command.nextTrackCommand.isEnabled = true;
command.nextTrackCommand.addTarget { (_) -> MPRemoteCommandHandlerStatus in
self.mPlayer.playNext()
return .success
}
command.previousTrackCommand.isEnabled = true;
command.previousTrackCommand.addTarget { (_) -> MPRemoteCommandHandlerStatus in
self.mPlayer.playPrev()
return .success
}
command.stopCommand.isEnabled = true;
command.stopCommand.addTarget { (_) -> MPRemoteCommandHandlerStatus in
self.mPlayer.stop()
return .success
}
}
// [notificationCenter addObserver: self
// selector: #selector (handle_NowPlayingItemChanged:)
// name: MPMusicPlayerControllerNowPlayingItemDidChangeNotification
// object: musicPlayer];
//
// [notificationCenter addObserver: self
// selector: #selector (handle_PlaybackStateChanged:)
// name: MPMusicPlayerControllerPlaybackStateDidChangeNotification
// object: musicPlayer];
//
// [notificationCenter addObserver: self
// selector: #selector (handle_VolumeChanged:)
// name: MPMusicPlayerControllerVolumeDidChangeNotification
// object: musicPlayer];
func destroyPlayer() {
self.mPlayer.stop()
let nc = NotificationCenter.default
nc.removeObserver(self, name: AVAudioSession.interruptionNotification, object: nil)
nc.removeObserver(self, name: .AVPlayerItemDidPlayToEndTime, object: nil)
self.mediaController.endGeneratingPlaybackNotifications()
let command = MPRemoteCommandCenter.shared()
command.playCommand.isEnabled = false;
command.pauseCommand.isEnabled = false;
command.togglePlayPauseCommand.isEnabled = false;
command.nextTrackCommand.isEnabled = false;
command.previousTrackCommand.isEnabled = false;
command.stopCommand.isEnabled = false;
}
// override func applicationDidReceiveMemoryWarning(_ application: UIApplication) {
// self.destroyPlayer()
// }
override func applicationWillTerminate(_ application: UIApplication) {
self.destroyPlayer()
}
#objc func playerDidFinishPlaying(note: NSNotification) {
self.mPlayer.playNext()
}
override func observeValue(forKeyPath keyPath: String?,
of object: Any?,
change: [NSKeyValueChangeKey : Any]?,
context: UnsafeMutableRawPointer?) {
// Only handle observations for the playerItemContext
guard context == &mPlayer.playerItemContext else {
super.observeValue(forKeyPath: keyPath,
of: object,
change: change,
context: context)
return
}
if keyPath == #keyPath(AVPlayerItem.status) {
let status: AVPlayerItem.Status
if let statusNumber = change?[.newKey] as? NSNumber {
status = AVPlayerItem.Status(rawValue: statusNumber.intValue)!
} else {
status = .unknown
}
// Switch over status value
switch status {
case .readyToPlay:
self.mPlayer.updateMediaInfo()
break
// Player item is ready to play.
case .failed: break
// Player item failed. See error.
case .unknown: break
// Player item is not yet ready.
#unknown default:
super.observeValue(forKeyPath: keyPath,
of: object,
change: change,
context: context)
}
} else if keyPath == #keyPath(AVPlayer.timeControlStatus) {
if object is AVPlayer {
if (object as? AVPlayer) != nil {
self.mPlayer.updateMediaInfo()
}
}
} else {
super.observeValue(forKeyPath: keyPath,
of: object,
change: change,
context: context)
}
}
#objc func handleInterruption(notification: Notification) {
guard let userInfo = notification.userInfo,
let typeValue = userInfo[AVAudioSessionInterruptionTypeKey] as? UInt,
let type = AVAudioSession.InterruptionType(rawValue: typeValue) else {
return
}
// Switch over the interruption type.
switch type {
case .began:
// An interruption began. Update the UI as needed.
self.mPlayer.pause()
break
case .ended:
// An interruption ended. Resume playback, if appropriate.
guard let optionsValue = userInfo[AVAudioSessionInterruptionOptionKey] as? UInt else { return }
let options = AVAudioSession.InterruptionOptions(rawValue: optionsValue)
if options.contains(.shouldResume) {
// Interruption ended. Playback should resume.
self.mPlayer.play()
} else {
// Interruption ended. Playback should not resume.
}
default: ()
}
}
}
Audio Player class
//
// AudioPlayer.swift
// Runner
import Foundation
import AVFoundation
import MediaPlayer
class AudioPlayer {
private var player: AVPlayer?
var index: Int = 0
private var object: NSObject!
// Key-value observing context
var playerItemContext = 0
private var mediaInfo = [String : Any]()
func initPlayer(object: NSObject) {
self.object = object
do {
if #available(iOS 10.0, *) {
try AVAudioSession.sharedInstance().setCategory(AVAudioSession.Category.playback, mode: AVAudioSession.Mode.default, options: [.mixWithOthers, .allowAirPlay])
try AVAudioSession.sharedInstance().setActive(false)
} else {
// Fallback on earlier versions
try AVAudioSession.sharedInstance().setCategory(AVAudioSession.Category.playback, options: .mixWithOthers)
}
} catch {
print(error)
}
}
func startPlayer() {
do {
try AVAudioSession.sharedInstance().setActive(true)
} catch {
print(error)
}
self.mediaInfo[MPMediaItemPropertyTitle] = ""
self.mediaInfo[MPMediaItemPropertyArtist] = ""
updateMediaInfo()
let url = getUrl()
let playerItem = AVPlayerItem(url: url!)
playerItem.addObserver(self.object, forKeyPath: #keyPath(AVPlayerItem.status), options: [.old, .new], context: &playerItemContext)
if self.player == nil {
self.player = AVPlayer(playerItem: playerItem)
} else {
self.player?.replaceCurrentItem(with: playerItem)
}
self.player?.addObserver(self.object, forKeyPath: #keyPath(AVPlayer.timeControlStatus), options: [.new, .old], context: &playerItemContext)
if let p = self.player {
p.play()
}
getMetadata(for: url!, completionHandler: { (metadata) in
self.mediaInfo[MPMediaItemPropertyTitle] = metadata?["title"]
self.mediaInfo[MPMediaItemPropertyArtist] = metadata!["artist"]
self.mediaInfo[MPMediaItemPropertyPlaybackDuration] = playerItem.asset.duration.seconds
self.updateMediaInfo()
})
}
func toggle(num: Int) {
if self.index == num {
if let p = self.player {
if(p.isPlaying) {
p.pause()
}
else {
p.play()
}
self.updateMediaInfo()
}
} else {
self.index = num
startPlayer()
}
}
func pause() {
if let p = self.player {
if(p.isPlaying) {
p.pause()
self.updateMediaInfo()
}
}
}
func play() {
if let p = self.player {
if(!p.isPlaying ) {
p.play()
self.updateMediaInfo()
}
}
}
func playNext() {
if self.index + 1 <= 114 {
self.index += 1
} else {
self.index = 1
}
self.startPlayer()
}
func playPrev() {
if self.index - 1 >= 1 {
self.index -= 1
} else {
self.index = 114
}
self.startPlayer()
}
func stop() {
if let p = self.player {
p.pause()
self.player?.replaceCurrentItem(with: nil)
}
MPNowPlayingInfoCenter.default().nowPlayingInfo = nil
}
func getUrl() -> URL? {
return URL(string: String(format: Utils.QURAN_AUDIO, self.index))
}
func updateMediaInfo() {
mediaInfo[MPNowPlayingInfoPropertyPlaybackRate] = player?.rate
mediaInfo[MPNowPlayingInfoPropertyElapsedPlaybackTime] = player?.currentTime().seconds
if #available(iOS 10.0, *) {
mediaInfo[MPNowPlayingInfoPropertyMediaType] = NSNumber(value: MPNowPlayingInfoMediaType.audio.rawValue)
}
MPNowPlayingInfoCenter.default().nowPlayingInfo = mediaInfo
}
func getMetadata(for url: URL, completionHandler: #escaping (_ metadata: [String : String]?) -> ()) {
var request = URLRequest(url: url)
request.httpMethod = "HEAD"
let task = URLSession.shared.dataTask(with: request) { (data, response, error) in
guard error == nil,
let res1 = response as? HTTPURLResponse,
let contentLength = res1.allHeaderFields["Content-Length"] as? String else {
completionHandler(nil)
return
}
do {
var req = URLRequest(url: url)
req.setValue("bytes=\(UInt64(contentLength)! - 128)-", forHTTPHeaderField: "Range")
let data = try NSURLConnection.sendSynchronousRequest(req, returning: nil)
let titleBytes = data.subdata(in: Range<Int>(NSRange(location: 3, length: 29))!)
.filter { (data) -> Bool in
data != 0
}
let artistBytes = data.subdata(in: Range<Int>(NSRange(location: 33, length: 29))!)
.filter { (data) -> Bool in
data != 0
}
let title = String(data: titleBytes, encoding: String.Encoding.utf8)
let artist = String(data: artistBytes, encoding: String.Encoding.utf8)
completionHandler(["title": title!, "artist": artist!])
} catch {
completionHandler(nil)
}
}
task.resume()
}
}
extension AVPlayer {
var isPlaying: Bool {
if #available(iOS 10.0, *) {
return timeControlStatus.rawValue == TimeControlStatus.playing.rawValue
}
return rate != 0 && error == nil
}
}
From a comment:
i don't have a real device, i am using the IPhone 11 pro max simulator
That’s the problem. You cannot test this feature except on a device. The simulator is not a reliable guide for many iOS features / behaviors, and this is one of them. Without a device, you have no evidence of whether your code works as desired.
If I get you right, the NowPlayingInfo doesn't show your MediaInfo (Title, etc..).
This is because currently iOS ignores NowPlayingInfo from AVAudioSessions with .mixWithOthers option enabled.
I did setup a little test project with your code. With .mixWithOthers option I could reproduce your problem. After removing this option NowPlayingInfoCenter worked as expected.
One more thing:
When trying to set AVAudioSession category I always get an error Error Domain=NSOSStatusErrorDomain Code=-50 "(null)".
This is because setting the .allowsAirPlay option isn't allowed for category .playback. (https://developer.apple.com/documentation/avfoundation/avaudiosession/categoryoptions/1771736-allowairplay)

Updating the label with current heart rate (swift and watch kit)

When you click on the button the current heart rate should be captured and the label should be updated. But, I do not understand why the label won't get updated with the heart rate value. I do not know what is wrong here.
I am still a very beginner in terms of swift and watch kit. I hope anyone can help me out.
Thank you very much.
Below you can see the code. I have also added the authorization part into the AppDelegate.swift file.
import WatchKit
import Foundation
import HealthKit
class InterfaceController: WKInterfaceController {
#IBOutlet weak var label: WKInterfaceLabel!
#IBOutlet weak var button: WKInterfaceButton!
var isRecording = false
//For workout session
let healthStore = HKHealthStore()
var session: HKWorkoutSession?
var currentQuery: HKQuery?
override func awake(withContext context: Any?) {
super.awake(withContext: context)
// Configure interface objects here.
}
override func willActivate() {
// This method is called when watch view controller is about to be visible to user
super.willActivate()
//Check HealthStore
guard HKHealthStore.isHealthDataAvailable() == true else {
print("Health Data Not Avaliable")
return
}
}
override func didDeactivate() {
// This method is called when watch view controller is no longer visible
super.didDeactivate()
}
#IBAction func btnPressed() {
if(!isRecording){
let stopTitle = NSMutableAttributedString(string: "Stop Recording")
stopTitle.setAttributes([NSAttributedString.Key.foregroundColor: UIColor.red], range: NSMakeRange(0, stopTitle.length))
button.setAttributedTitle(stopTitle)
isRecording = true
startWorkout() //Start workout session/healthkit streaming
}else{
let exitTitle = NSMutableAttributedString(string: "Start Recording")
exitTitle.setAttributes([NSAttributedString.Key.foregroundColor: UIColor.green], range: NSMakeRange(0, exitTitle.length))
button.setAttributedTitle(exitTitle)
isRecording = false
healthStore.end(session!)
label.setText("Heart Rate")
}
}
}
extension InterfaceController: HKWorkoutSessionDelegate{
func workoutSession(_ workoutSession: HKWorkoutSession, didChangeTo toState: HKWorkoutSessionState, from fromState: HKWorkoutSessionState, date: Date) {
switch toState {
case .running:
print(date)
if let query = heartRateQuery(date){
self.currentQuery = query
healthStore.execute(query)
}
//Execute Query
case .ended:
//Stop Query
healthStore.stop(self.currentQuery!)
session = nil
default:
print("Unexpected state: \(toState)")
}
}
func workoutSession(_ workoutSession: HKWorkoutSession, didFailWithError error: Error) {
//Do Nothing
}
func startWorkout(){
// If a workout has already been started, do nothing.
if (session != nil) {
return
}
// Configure the workout session.
let workoutConfiguration = HKWorkoutConfiguration()
workoutConfiguration.activityType = .running
workoutConfiguration.locationType = .outdoor
do {
session = try HKWorkoutSession(configuration: workoutConfiguration)
session?.delegate = self
} catch {
fatalError("Unable to create workout session")
}
healthStore.start(self.session!)
print("Start Workout Session")
}
func heartRateQuery(_ startDate: Date) -> HKQuery? {
let quantityType = HKObjectType.quantityType(forIdentifier: HKQuantityTypeIdentifier.heartRate)
let datePredicate = HKQuery.predicateForSamples(withStart: startDate, end: nil, options: .strictEndDate)
let predicate = NSCompoundPredicate(andPredicateWithSubpredicates:[datePredicate])
let heartRateQuery = HKAnchoredObjectQuery(type: quantityType!, predicate: predicate, anchor: nil, limit: Int(HKObjectQueryNoLimit)) { (query, sampleObjects, deletedObjects, newAnchor, error) -> Void in
//Do nothing
}
heartRateQuery.updateHandler = {(query, samples, deleteObjects, newAnchor, error) -> Void in
guard let samples = samples as? [HKQuantitySample] else {return}
DispatchQueue.main.async {
guard let sample = samples.first else { return }
let value = sample.quantity.doubleValue(for: HKUnit(from: "count/min"))
print("This line is executed!")
self.label.setText(String(UInt16(value))) //Update label
}
}
return heartRateQuery
}
}
import UIKit
import HealthKit
#UIApplicationMain
class AppDelegate: UIResponder, UIApplicationDelegate {
var window: UIWindow?
func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool {
// Override point for customization after application launch.
return true
}
func applicationWillResignActive(_ application: UIApplication) {
// Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state.
// Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game.
}
func applicationDidEnterBackground(_ application: UIApplication) {
// Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later.
// If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits.
}
func applicationWillEnterForeground(_ application: UIApplication) {
// Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background.
}
func applicationDidBecomeActive(_ application: UIApplication) {
// Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface.
}
func applicationWillTerminate(_ application: UIApplication) {
// Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:.
}
let healthStore = HKHealthStore()
func applicationShouldRequestHealthAuthorization(_ application: UIApplication) {
healthStore.handleAuthorizationForExtension{
(success,error) in
}
}
}

Resources