iPad Media controls - ios

So i am using the following .swift file to get notification showing the media player controls on the lock screen and notification screen. The following code works fine on iOS but not on iPad.
I have uploaded the full code to Github
https://github.com/redimongo/DRN1
The part in question is what do I need to add for the iPad to get media control?
//
// MusicPlayer.swift
// DRN1
//
// Created by Russell Harrower on 25/11/19.
// Copyright © 2019 Russell Harrower. All rights reserved.
//
import Foundation
import AVFoundation
import MediaPlayer
import Kingfisher
class MusicPlayer {
static let shared = MusicPlayer()
var player: AVPlayer?
func startBackgroundMusic() {
self.setupRemoteTransportControls()
let urlString = "http://stream.radiomedia.com.au:8003/stream"
guard let url = URL.init(string: urlString)
else {
return
}
let playerItem = AVPlayerItem.init(url: url)
player = AVPlayer.init(playerItem: playerItem)
player?.play()
do {
// try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default, options: [.mixWithOthers, .allowAirPlay])
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default, options: [.duckOthers, .defaultToSpeaker, .mixWithOthers, .allowAirPlay])
print("Playback OK")
let defaults = UserDefaults.standard
defaults.set("1", forKey: defaultsKeys.musicplayer_connected)
try AVAudioSession.sharedInstance().setActive(true)
print("Session is Active")
} catch {
let defaults = UserDefaults.standard
defaults.set("0", forKey: defaultsKeys.musicplayer_connected)
print(error)
}
}
func setupRemoteTransportControls() {
// Get the shared MPRemoteCommandCenter
let commandCenter = MPRemoteCommandCenter.shared()
// Add handler for Play Command
commandCenter.playCommand.addTarget { [unowned self] event in
if self.player?.rate == 0.0 {
self.player?.play()
return .success
}
return .commandFailed
}
// Add handler for Pause Command
commandCenter.pauseCommand.addTarget { [unowned self] event in
if self.player?.rate == 1.0 {
self.player?.pause()
return .success
}
return .commandFailed
}
// self.nowplaying(artist: "Anna", song: "test")
}
func nowplaying(with artwork: MPMediaItemArtwork, artist: String, song: String){
MPNowPlayingInfoCenter.default().nowPlayingInfo = [
MPMediaItemPropertyTitle:song,
MPMediaItemPropertyArtist:artist,
MPMediaItemPropertyArtwork: artwork,
MPNowPlayingInfoPropertyIsLiveStream: true
]
// self.getArtBoard();
}
func setupNowPlayingInfo(with artwork: MPMediaItemArtwork) {
MPNowPlayingInfoCenter.default().nowPlayingInfo = [
// MPMediaItemPropertyTitle: "Some name",
// MPMediaItemPropertyArtist: "Some name",
MPMediaItemPropertyArtwork: artwork,
//MPMediaItemPropertyPlaybackDuration: CMTimeGetSeconds(currentItem.duration),
//MPNowPlayingInfoPropertyPlaybackRate: 1,
//MPNowPlayingInfoPropertyElapsedPlaybackTime: CMTimeGetSeconds(currentItem.currentTime())
]
}
func getData(from url: URL, completion: #escaping (UIImage?) -> Void) {
URLSession.shared.dataTask(with: url, completionHandler: {(data, response, error) in
if let data = data {
completion(UIImage(data:data))
}
})
.resume()
}
func getArtBoard(artist: String, song: String, cover: String) {
guard let url = URL(string: cover) else { return }
getData(from: url) { [weak self] image in
guard let self = self,
let downloadedImage = image else {
return
}
let artwork = MPMediaItemArtwork.init(boundsSize: downloadedImage.size, requestHandler: { _ -> UIImage in
return downloadedImage
})
self.nowplaying(with: artwork, artist: artist, song: song)
}
}
func stopBackgroundMusic() {
guard let player = player else { return }
player.pause()
}
}
I have added with no success
override func viewDidLoad() {
UIApplication.shared.beginReceivingRemoteControlEvents()
do {
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default)
try AVAudioSession.sharedInstance().setActive(true)
} catch {
print(error)
}
}

iOS simulators running iOS11+ (including iPadOS) will not show remote control center unless if it’s the default controls created by AVPlayerController. You will need a real device to test MPRemoteCommandCenter or install iOS 10 and lower to run it on simulator.
override func viewDidLoad() {
super.viewDidLoad()
// This is an alternative
let avPlayerViewController = AVPlayerViewController()
avPlayerViewController.player = MusicPlayer.shared.player
avPlayerViewController.player?.play()
#if targetEnvironment(simulator)
/*
if you don’t set up remote controls, the AVPlayer from
AVPlayerController will immediately create default remote controls
otherwise your simulator will not show the controls.
*/
#else
MusicPlayer.shared.setupRemoteTransportControls()
// more custom controls set up
#endif
}
Also there is also no need to call UIApplication.shared.beginReceivingRemoteControlEvents() on devices running iOS 7 and later.
**EDIT : **
—— In your MusicPlayer Class —
import Foundation
import AVFoundation
import MediaPlayer
import AVKit
class MusicPlayer {
static let shared = MusicPlayer()
var player: AVPlayer?
let playerViewController = AVPlayerViewController()
func startBackgroundMusic() {
let urlString = "http://stream.radiomedia.com.au:8003/stream"
guard let url = URL.init(string: urlString) else { return }
let playerItem = AVPlayerItem.init(url: url)
player = AVPlayer.init(playerItem: playerItem)
do {
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default, options: [.duckOthers, .defaultToSpeaker, .mixWithOthers, .allowAirPlay])
print("Playback OK")
// let defaults = UserDefaults.standard
// defaults.set("1", forKey: defaultsKeys.musicplayer_connected)
try AVAudioSession.sharedInstance().setActive(true)
print("Session is Active")
} catch {
// let defaults = UserDefaults.standard
// defaults.set("0", forKey: defaultsKeys.musicplayer_connected)
print(error)
}
#if targetEnvironment(simulator)
if playerViewController.player == nil {
playerViewController.player = player
}
playerViewController.player?.play()
print("SIMULATOR")
#else
self.setupRemoteTransportControls()
player?.play()
#endif
}
func setupRemoteTransportControls() {
// Get the shared MPRemoteCommandCenter
let commandCenter = MPRemoteCommandCenter.shared()
// Add handler for Play Command
commandCenter.playCommand.addTarget { [unowned self] event in
if self.player?.rate == 0.0 {
self.player?.play()
return .success
}
return .commandFailed
}
// Add handler for Pause Command
commandCenter.pauseCommand.addTarget { [unowned self] event in
if self.player?.rate == 1.0 {
self.player?.pause()
return .success
}
return .commandFailed
}
// self.nowplaying(artist: "Anna", song: "test")
}
func nowplaying(with artwork: MPMediaItemArtwork, artist: String, song: String){
MPNowPlayingInfoCenter.default().nowPlayingInfo = [
MPMediaItemPropertyTitle:song,
MPMediaItemPropertyArtist:artist,
MPMediaItemPropertyArtwork: artwork,
MPNowPlayingInfoPropertyIsLiveStream: true
]
// self.getArtBoard();
}
func setupNowPlayingInfo(with artwork: MPMediaItemArtwork) {
MPNowPlayingInfoCenter.default().nowPlayingInfo = [
// MPMediaItemPropertyTitle: "Some name",
// MPMediaItemPropertyArtist: "Some name",
MPMediaItemPropertyArtwork: artwork,
//MPMediaItemPropertyPlaybackDuration: CMTimeGetSeconds(currentItem.duration),
//MPNowPlayingInfoPropertyPlaybackRate: 1,
//MPNowPlayingInfoPropertyElapsedPlaybackTime: CMTimeGetSeconds(currentItem.currentTime())
]
}
func getData(from url: URL, completion: #escaping (UIImage?) -> Void) {
URLSession.shared.dataTask(with: url, completionHandler: {(data, response, error) in
if let data = data {
completion(UIImage(data:data))
}
})
.resume()
}
func getArtBoard(artist: String, song: String, cover: String) {
guard let url = URL(string: cover) else { return }
getData(from: url) { [weak self] image in
guard let self = self,
let downloadedImage = image else {
return
}
let artwork = MPMediaItemArtwork.init(boundsSize: downloadedImage.size, requestHandler: { _ -> UIImage in
return downloadedImage
})
self.nowplaying(with: artwork, artist: artist, song: song)
}
}
func stopBackgroundMusic() {
#if targetEnvironment(simulator)
if playerViewController.player != nil {
playerViewController.player?.pause()
}
#else
guard let player = player else { return }
player.pause()
#endif
}
}
Tested it using iPadOS Simulator (7th Generation)

Also, you can use the Media Player framework’s MPRemoteCommandCenter and MPNowPlayingInfoCenter classes with AVPlayer.
You need to invoke beginReceivingRemoteControlEvents()
Swift 3+
UIApplication.sharedApplication().beginReceivingRemoteControlEvents()
AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayback)
If you would like to specify custom actions for the MPRemoteCommandCenter:
let commandCenter = MPRemoteCommandCenter.shared()
commandCenter.nextTrackCommand.isEnabled = true
commandCenter.nextTrackCommand.addTarget(self, action:#selector(nextTrackCommandSelector))
For more information refer Apple's official Documentation

Related

iOS Video recording with AVFoundation

I need to open the camera and record the process. Also I have a timer, so according to the timer interval I should save multiple videos without stopping the record process.
So I use AVFoundation and in timer action I call 2 functions (stopRecording, startRecording).
TimerInterval is 4 seconds.
When I call stopRecording method "didFinishRecordingToOutputFileAtURL" delegate method does not return the record source immediately, it returns after 3 seconds, so I lose every second record.
Is there any other way to organize this kind of process or how can I fix this issue?
Thanks
func start(complition: (Error?, Bool)->()) {
setupSession { success in
if !success {
print("Error!")
return
}
setupPreview()
startSession()
let timeInterval = 4
timer = Timer.scheduledTimer(timeInterval: timeInterval, target: self, selector: #selector(timerAction), userInfo: nil, repeats: true)
}
}
func setupSession(complition: (Bool)->()) {
captureSession.beginConfiguration()
guard let camera = AVCaptureDevice.default(for: .video) else {
complition(false)
return
}
guard let mic = AVCaptureDevice.default(for: .audio) else {
complition(false)
return
}
do {
let videoInput = try AVCaptureDeviceInput(device: camera)
let audioInput = try AVCaptureDeviceInput(device: mic)
for input in [videoInput, audioInput] {
if captureSession.canAddInput(input) {
captureSession.addInput(input)
}
}
activeInput = videoInput
} catch {
print("Error setting device input: \(error)")
complition(false)
return
}
captureSession.addOutput(movieOutput)
captureSession.commitConfiguration()
}
func setupPreview() {
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = containerView.bounds
previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
containerView.layer.addSublayer(previewLayer)
}
func startSession() {
if !captureSession.isRunning {
DispatchQueue.global(qos: .default).async { [weak self] in
self?.captureSession.startRunning()
}
}
}
func stopSession() {
if captureSession.isRunning {
DispatchQueue.global(qos: .default).async() { [weak self] in
self?.captureSession.stopRunning()
}
}
}
public func captureMovie() {
guard let connection = movieOutput.connection(with: .video) else {
return
}
if connection.isVideoStabilizationSupported {
connection.preferredVideoStabilizationMode = .auto
}
let device = activeInput.device
if device.isSmoothAutoFocusEnabled {
do {
try device.lockForConfiguration()
device.isSmoothAutoFocusEnabled = true
device.unlockForConfiguration()
} catch {
print("error: \(error)")
}
}
guard let outUrl = tempURL else { return }
movieOutput.startRecording(to: outUrl, recordingDelegate: self)
}
public func stopRecording() {
if movieOutput.isRecording {
movieOutput.stopRecording()
}
}
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
print(Date.now, " ", "file")
if let error = error {
print("error: \(error.localizedDescription)")
} else {
// Save the source
}
}
#objc private func timerAction() {
print(Date.now, " timerAction")
stopRecording()
captureMovie()
}

In Swift 5 how would you add Audio Controls to Control Center via background audio?

I'm playing audio from a JSON source that I built to feed the audio URL and it's audio information to my app (Such as title, description, and cover art URL). The audio is a radio station feed. Audio works, play/stop controls work, cover art works, and background audio works. I'm stumped on adding the controls to Control Center to play/stop the audio while outside the app. I've taken a look at Apple's Documentation on this and it's very straight forward. I've also enabled the build setting to allow for background fetch, background audio, and Bluetooth. But it doesn't seem to work on my iPhone attached to Xcode via USB-C (I'm assuming iOS Simulator doesn't support Control Center). Below is my working code. Any thoughts on how to get this working? Do I need to pass on the audio to setupRemoteTransportControls()? I'm assuming though Title and Cover Art will need to be passed from data source, but overall the Control Center will not recognize that the app is playing audio.
ViewController.swift
import UIKit
import MediaPlayer
import AVFoundation
import Foundation
class ViewController: UIViewController {
var player: AVPlayer!
var playerItem: AVPlayerItem!
var audioCheck: Timer?
var timer: Timer?
var passText: String? = "Test"
#IBOutlet weak var trackTitle: UILabel!
#IBOutlet weak var togglePlay: UIButton!
#IBOutlet weak var coverPhoto: UIImageView!
func loadAudio() {
let audioURL = URL.init(string: "AUDIO_URL_GOES_HERE")
player = AVPlayer.init(url: audioURL!)
}
let minutes = 60
func playAudio() {
let audioURL = URL.init(string: "AUDIO_URL_GOES_HERE")
player = AVPlayer.init(url: audioURL!)
player.play()
}
#IBAction func OpenPlayer(_ sender: Any) {
performSegue(withIdentifier: "PlayerSegue", sender: self)
}
override func viewDidLoad() {
super.viewDidLoad()
loadAudio()
// Audio in the background
let audioSession = AVAudioSession.sharedInstance()
do {
try audioSession.setCategory(AVAudioSession.Category.playback)
} catch {
print("Error: Audio is not paying in the background.")
}
// Timer checking if other audio sources are running
audioCheck = Timer.scheduledTimer(timeInterval: 0, target: self, selector: #selector(timedAudioCheck), userInfo: nil, repeats: true)
struct currentTrack: Codable {
let title: String
let artwork_url: String
}
struct getTrack: Codable {
// let name: String
// let status: String
let current_track: currentTrack
}
let jsonURL = URL(string: "JSON_URL_GOES_HERE")!
URLSession.shared.dataTask(with: jsonURL) {data, _, _ in
if let data = data {
let trackData = try? JSONDecoder().decode([getTrack].self, from: data)
// print(users)
// print(trackData![0].current_track.title)
// print(trackData![0].current_track.artwork_url)
DispatchQueue.main.async {
self.trackTitle.text = trackData![0].current_track.title
let coverPhotoURL = trackData![0].current_track.artwork_url
if let coverPhotoConverted = URL(string: coverPhotoURL) {
do {
let coverPhotoData = try Data(contentsOf: coverPhotoConverted)
self.coverPhoto.image = UIImage(data: coverPhotoData)
} catch {
}
}
}
}
}.resume()
_ = Timer.scheduledTimer(withTimeInterval: 10.0, repeats: true) { timer in
// print("Data updated!")
let jsonURL = URL(string: "JSON_URL_GOES_HERE")!
URLSession.shared.dataTask(with: jsonURL) {data, _, _ in
if let data = data {
let trackData = try? JSONDecoder().decode([getTrack].self, from: data)
// print(users)
// print(trackData![0].current_track.title)
// print(trackData![0].current_track.artwork_url)
DispatchQueue.main.async {
self.trackTitle.text = trackData![0].current_track.title
let coverPhotoURL = trackData![0].current_track.artwork_url
if let coverPhotoConverted = URL(string: coverPhotoURL) {
do {
let coverPhotoData = try Data(contentsOf: coverPhotoConverted)
self.coverPhoto.image = UIImage(data: coverPhotoData)
} catch {
}
}
}
}
}.resume()
}
func setupRemoteTransportControls() {
// Get the shared MPRemoteCommandCenter
let commandCenter = MPRemoteCommandCenter.shared()
// Add handler for Play Command
commandCenter.playCommand.addTarget { [unowned self] event in
if self.player.rate == 0.0 {
self.player.play()
return .success
}
return .commandFailed
}
// Add handler for Pause Command
commandCenter.pauseCommand.addTarget { [unowned self] event in
if self.player.rate == 1.0 {
self.player.pause()
return .success
}
return .commandFailed
}
}
// Apple's Documentation on playing audio in control center
func setupRemoteTransportControls() {
// Get the shared MPRemoteCommandCenter
let commandCenter = MPRemoteCommandCenter.shared()
// Add handler for Play Command
commandCenter.playCommand.addTarget { [unowned self] event in
if self.player.rate == 0.0 {
self.player.play()
return .success
}
return .commandFailed
}
// Add handler for Pause Command
commandCenter.pauseCommand.addTarget { [unowned self] event in
if self.player.rate == 1.0 {
self.player.pause()
return .success
}
return .commandFailed
}
}
func setupNowPlaying() {
// Define Now Playing Info
var nowPlayingInfo = [String : Any]()
nowPlayingInfo[MPMediaItemPropertyTitle] = "My Movie"
if let image = UIImage(named: "lockscreen") {
nowPlayingInfo[MPMediaItemPropertyArtwork] =
MPMediaItemArtwork(boundsSize: image.size) { size in
return image
}
}
nowPlayingInfo[MPNowPlayingInfoPropertyElapsedPlaybackTime] = playerItem.currentTime().seconds
nowPlayingInfo[MPMediaItemPropertyPlaybackDuration] = playerItem.asset.duration.seconds
nowPlayingInfo[MPNowPlayingInfoPropertyPlaybackRate] = player.rate
// Set the metadata
MPNowPlayingInfoCenter.default().nowPlayingInfo = nowPlayingInfo
}
}
// Along with timer, checks if other audio sources are playing and resets the audio in the app
#objc func timedAudioCheck() {
if (AVAudioSession.sharedInstance().secondaryAudioShouldBeSilencedHint) {
togglePlay.setTitle("Play", for: .normal)
} else if (player.rate == 0) {
togglePlay.setTitle("Play", for: .normal)
} else {
togglePlay.setTitle("Puase", for: .normal)
}
}
#IBAction func togglePlay(_ sender: UIButton) {
if player.rate == 0 {
// Plays the audio stream
sender.setTitle("Pause", for: .normal)
playAudio()
} else {
sender.setTitle("Play", for: .normal)
player.pause()
}
}
}
Apple's suggestion to add Control Center Support
func setupRemoteTransportControls() {
// Get the shared MPRemoteCommandCenter
let commandCenter = MPRemoteCommandCenter.shared()
// Add handler for Play Command
commandCenter.playCommand.addTarget { [unowned self] event in
if self.player.rate == 0.0 {
self.player.play()
return .success
}
return .commandFailed
}
// Add handler for Pause Command
commandCenter.pauseCommand.addTarget { [unowned self] event in
if self.player.rate == 1.0 {
self.player.pause()
return .success
}
return .commandFailed
}
}
func setupNowPlaying() {
// Define Now Playing Info
var nowPlayingInfo = [String : Any]()
nowPlayingInfo[MPMediaItemPropertyTitle] = "My Movie"
if let image = UIImage(named: "lockscreen") {
nowPlayingInfo[MPMediaItemPropertyArtwork] =
MPMediaItemArtwork(boundsSize: image.size) { size in
return image
}
}
nowPlayingInfo[MPNowPlayingInfoPropertyElapsedPlaybackTime] = playerItem.currentTime().seconds
nowPlayingInfo[MPMediaItemPropertyPlaybackDuration] = playerItem.asset.duration.seconds
nowPlayingInfo[MPNowPlayingInfoPropertyPlaybackRate] = player.rate
// Set the metadata
MPNowPlayingInfoCenter.default().nowPlayingInfo = nowPlayingInfo
}

Media Player not moving seek bar when fast forwarding

So I am having this small issue, where the seek bar is not being moved forward when the user taps on the fast forward or rewind buttons on the native iOS controller.
See video
https://youtu.be/CJP131GpSYI
The section of code that makes it possible is printed below.
func setupRemoteTransportControls() {
// Get the shared MPRemoteCommandCenter
let commandCenter = MPRemoteCommandCenter.shared()
let changePlaybackPositionCommand = commandCenter.changePlaybackPositionCommand
changePlaybackPositionCommand.isEnabled = true
changePlaybackPositionCommand.addTarget { event in
let seconds = (event as? MPChangePlaybackPositionCommandEvent)?.positionTime ?? 0
let time = CMTime(seconds: seconds, preferredTimescale: 1)
self.player?.seek(to: time)
return .success
}
let skipBackwardCommand = commandCenter.skipBackwardCommand
if(MusicPlayer.mediatype == "podcast")
{
skipBackwardCommand.isEnabled = true
}
else{
skipBackwardCommand.isEnabled = false
}
skipBackwardCommand.preferredIntervals = [NSNumber(value: 10)]
skipBackwardCommand.addTarget(handler: skipBackward)
let skipForwardCommand = commandCenter.skipForwardCommand
if(MusicPlayer.mediatype == "podcast")
{
skipForwardCommand.isEnabled = true
}
else{
skipForwardCommand.isEnabled = false
}
skipForwardCommand.addTarget(handler: skipForward)
// Add handler for Play Command
commandCenter.playCommand.addTarget { [unowned self] event in
if self.player?.rate == 0.0 {
self.player?.play()
return .success
}
return .commandFailed
}
// Add handler for Pause Command
commandCenter.pauseCommand.addTarget { [unowned self] event in
if self.player?.rate == 1.0 {
self.player?.pause()
return .success
}
return .commandFailed
}
func skipBackward(_ event: MPRemoteCommandEvent) -> MPRemoteCommandHandlerStatus {
//self.player?.seek(to: CMTimeMakeWithSeconds(CMTimeGetSeconds((self.player?.currentTime())!).advanced(by: -30), preferredTimescale: 1))
// print(CMTimeGetSeconds((self.player?.currentTime())!)) //Output: 42
//print(event.interval)
self.player!.seek(to: CMTimeMakeWithSeconds(CMTimeGetSeconds((self.player?.currentTime())!).advanced(by: -30), preferredTimescale: 1))
return .success
}
func skipForward(_ event: MPRemoteCommandEvent) -> MPRemoteCommandHandlerStatus {
self.player?.seek(to: CMTimeMakeWithSeconds(CMTimeGetSeconds((self.player?.currentTime())!).advanced(by: 30), preferredTimescale: 1))
return .success
}
}
Full Media Player code:
import SwiftUI
//import Foundation
import AVFoundation
import MediaPlayer
import AVKit
struct NowPlayingData: Codable , Identifiable {
var id,artist,song,cover:String
private enum CodingKeys : String, CodingKey {
case id = "_id", artist , song , cover
}
}
class MusicPlayer {
static let shared = MusicPlayer()
static var mediatype = ""
static var artist = ""
static var song = ""
static var cover = ""
static var urls = ""
static var dur = 0
static var uuid = UIDevice.current.identifierForVendor?.uuidString
var player: AVPlayer?
let playerViewController = AVPlayerViewController()
func gettype(completion: #escaping (String) -> Void){
completion(MusicPlayer.mediatype)
}
func getPodCastPlayerNP(completion: #escaping (NowPlayingData) -> ()) {
// Timer.scheduledTimer(withTimeInterval: 15, repeats: true) { (timer) in
let songdata = "{\"_id\": \"1\",\"song\": \"\(MusicPlayer.song)\",\"artist\": \"\(MusicPlayer.artist)\", \"cover\": \"\(MusicPlayer.cover)\"}"
let data: Foundation.Data = songdata.data(using: .utf8)!
let podcast = try! JSONDecoder().decode(NowPlayingData.self, from: data)
//print(data!)
// let episode = podcast.programs
DispatchQueue.main.async{
// The array is stored under programs now
//print(podcast)
completion(podcast)
}
// }
}
func startBackgroundMusic(url: String, type:String) {
MusicPlayer.mediatype = String(type)
//let urlString = "http://stream.radiomedia.com.au:8003/stream"
let urlString = url+"?uuid="+MusicPlayer.uuid!
print(urlString)
guard let url = URL.init(string: urlString) else { return }
let playerItem = AVPlayerItem.init(url: url)
player = AVPlayer.init(playerItem: playerItem)
do {
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default, options: [.duckOthers, .defaultToSpeaker, .mixWithOthers, .allowAirPlay])
print("Playback OK")
// let defaults = UserDefaults.standard
// defaults.set("1", forKey: defaultsKeys.musicplayer_connected)
try AVAudioSession.sharedInstance().setActive(true)
print("Session is Active")
} catch {
// let defaults = UserDefaults.standard
// defaults.set("0", forKey: defaultsKeys.musicplayer_connected)
print(error)
}
#if targetEnvironment(simulator)
self.playerViewController.player = player
self.playerViewController.player?.play()
print("SIMULATOR")
#else
self.setupRemoteTransportControls()
player?.play()
#endif
}
func startBackgroundMusicTwo() {
let urlString = "http://stream.radiomedia.com.au:8003/stream"
//let urlString = url
guard let url = URL.init(string: urlString) else { return }
let playerItem = AVPlayerItem.init(url: url)
player = AVPlayer.init(playerItem: playerItem)
do {
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default, options: [.duckOthers, .defaultToSpeaker, .mixWithOthers, .allowAirPlay])
print("Playback OK")
// let defaults = UserDefaults.standard
// defaults.set("1", forKey: defaultsKeys.musicplayer_connected)
try AVAudioSession.sharedInstance().setActive(true)
print("Session is Active")
} catch {
// let defaults = UserDefaults.standard
// defaults.set("0", forKey: defaultsKeys.musicplayer_connected)
print(error)
}
#if targetEnvironment(simulator)
self.playerViewController.player = player
self.playerViewController.player?.play()
print("SIMULATOR")
#else
self.setupRemoteTransportControls()
player?.play()
#endif
}
func setupRemoteTransportControls() {
// Get the shared MPRemoteCommandCenter
let commandCenter = MPRemoteCommandCenter.shared()
let changePlaybackPositionCommand = commandCenter.changePlaybackPositionCommand
changePlaybackPositionCommand.isEnabled = true
changePlaybackPositionCommand.addTarget { event in
let seconds = (event as? MPChangePlaybackPositionCommandEvent)?.positionTime ?? 0
let time = CMTime(seconds: seconds, preferredTimescale: 1)
self.player?.seek(to: time)
return .success
}
let skipBackwardCommand = commandCenter.skipBackwardCommand
if(MusicPlayer.mediatype == "podcast")
{
skipBackwardCommand.isEnabled = true
}
else{
skipBackwardCommand.isEnabled = false
}
skipBackwardCommand.preferredIntervals = [NSNumber(value: 10)]
skipBackwardCommand.addTarget(handler: skipBackward)
let skipForwardCommand = commandCenter.skipForwardCommand
if(MusicPlayer.mediatype == "podcast")
{
skipForwardCommand.isEnabled = true
}
else{
skipForwardCommand.isEnabled = false
}
skipForwardCommand.addTarget(handler: skipForward)
// Add handler for Play Command
commandCenter.playCommand.addTarget { [unowned self] event in
if self.player?.rate == 0.0 {
self.player?.play()
return .success
}
return .commandFailed
}
// Add handler for Pause Command
commandCenter.pauseCommand.addTarget { [unowned self] event in
if self.player?.rate == 1.0 {
self.player?.pause()
return .success
}
return .commandFailed
}
func skipBackward(_ event: MPRemoteCommandEvent) -> MPRemoteCommandHandlerStatus {
//self.player?.seek(to: CMTimeMakeWithSeconds(CMTimeGetSeconds((self.player?.currentTime())!).advanced(by: -30), preferredTimescale: 1))
// print(CMTimeGetSeconds((self.player?.currentTime())!)) //Output: 42
//print(event.interval)
self.player!.seek(to: CMTimeMakeWithSeconds(CMTimeGetSeconds((self.player?.currentTime())!).advanced(by: -30), preferredTimescale: 1))
return .success
}
func skipForward(_ event: MPRemoteCommandEvent) -> MPRemoteCommandHandlerStatus {
self.player?.seek(to: CMTimeMakeWithSeconds(CMTimeGetSeconds((self.player?.currentTime())!).advanced(by: 30), preferredTimescale: 1))
return .success
}
}
func nowplaying(with artwork: MPMediaItemArtwork, artist: String, song: String, duration: Int){
if(duration == 0){
MPNowPlayingInfoCenter.default().nowPlayingInfo = [
MPMediaItemPropertyTitle:song,
MPMediaItemPropertyArtist:artist,
MPMediaItemPropertyArtwork: artwork,
MPNowPlayingInfoPropertyIsLiveStream: true
]
}
else{
MPNowPlayingInfoCenter.default().nowPlayingInfo = [
MPMediaItemPropertyTitle:song,
MPMediaItemPropertyArtist:artist,
MPMediaItemPropertyArtwork: artwork,
MPNowPlayingInfoPropertyIsLiveStream: false,
MPMediaItemPropertyPlaybackDuration: duration,
MPNowPlayingInfoPropertyPlaybackRate: 1.0,
MPNowPlayingInfoPropertyElapsedPlaybackTime: CMTimeGetSeconds((self.player?.currentTime())!)
]
}
// self.getArtBoard();
}
func setupNowPlayingInfo(with artwork: MPMediaItemArtwork) {
MPNowPlayingInfoCenter.default().nowPlayingInfo = [
MPMediaItemPropertyTitle: "Some name",
MPMediaItemPropertyArtist: "Some name",
MPMediaItemPropertyArtwork: artwork,
//MPMediaItemPropertyPlaybackDuration: CMTimeGetSeconds(currentItem.duration),
//MPNowPlayingInfoPropertyPlaybackRate: 1,
//MPNowPlayingInfoPropertyElapsedPlaybackTime: CMTimeGetSeconds(currentItem.currentTime())
]
}
func getData(from url: URL, completion: #escaping (UIImage?) -> Void) {
URLSession.shared.dataTask(with: url, completionHandler: {(data, response, error) in
if let data = data {
completion(UIImage(data:data))
}
})
.resume()
}
func getArtBoard(artist: String, song: String, cover: String, urls: String, duration: Int) {
// MusicPlayer.JN = "[{'artist': \(artist), 'song':\(song), 'cover': \(cover)}]"
MusicPlayer.artist = artist
MusicPlayer.song = song
MusicPlayer.cover = cover
MusicPlayer.urls = urls
guard let url = URL(string: cover) else { return }
getData(from: url) { [weak self] image in
guard let self = self,
let downloadedImage = image else {
return
}
let artwork = MPMediaItemArtwork.init(boundsSize: downloadedImage.size, requestHandler: { _ -> UIImage in
return downloadedImage
})
self.nowplaying(with: artwork, artist: artist, song: song, duration: duration)
}
}
func stopBackgroundMusic() {
guard let player = player else { return }
player.pause()
}
}
Full GitHub code:
https://github.com/redimongo/iOS-Radio-App
func skipBackward(_ event: MPRemoteCommandEvent) -> MPRemoteCommandHandlerStatus {
//self.player?.seek(to: CMTimeMakeWithSeconds(CMTimeGetSeconds((self.player?.currentTime())!).advanced(by: -30), preferredTimescale: 1))
// print(CMTimeGetSeconds((self.player?.currentTime())!)) //Output: 42
//print(event.interval)
//self.player!.seek(to: CMTimeMakeWithSeconds(CMTimeGetSeconds((self.player?.currentTime())!).advanced(by: -30), preferredTimescale: 1))
let currentTime = self.player?.currentTime()
self.player?.seek(to: CMTime(seconds: currentTime!.seconds - 30, preferredTimescale: 1), completionHandler: { isCompleted in
if isCompleted {
}
})
return .success
}
func skipForward(_ event: MPRemoteCommandEvent) -> MPRemoteCommandHandlerStatus {
//self.player?.seek(to: CMTimeMakeWithSeconds(CMTimeGetSeconds((self.player?.currentTime())!).advanced(by: 30), preferredTimescale: 1))
let currentTime = self.player?.currentTime()
self.player?.seek(to: CMTime(seconds: currentTime!.seconds + 30, preferredTimescale: 1), completionHandler: { isCompleted in
if isCompleted {
}
})
return .success
}
Change this 2 function with above code and let me know. what you face here.

WebRTC iOS: Record Remote Audio stream using WebRTC

I am working on an audio streaming application with recording functionality for a receiver.
I got stuck at the point where the user want to record audio stream on the receiver side.
Below is my code
Initialisation
var engine = AVAudioEngine()
var recordingFile: AVAudioFile?
var audioPlayer: AVAudioPlayer?
let player = AVAudioPlayerNode()
var isRecording: Bool = false
Initialise AudioEngine
func initializeAudioEngine() {
let input = self.engine.inputNode
let format = input.inputFormat(forBus: 0)
self.engine.attach(self.player)
let mainMixerNode = self.engine.mainMixerNode
self.engine.connect(input, to:mainMixerNode, format: format)
self.engine.prepare()
do {
try self.engine.start()
self.startRecording()
} catch (let error) {
print("START FAILED", error)
}
}
Start Recording
func startRecording() {
self.createRecordingFile()
self.engine.mainMixerNode.installTap(onBus: 0,
bufferSize: 1024,
format: self.engine.mainMixerNode.outputFormat(forBus: 0)) { (buffer, time) -> Void in
do {
self.isRecording = true
try self.recordingFile?.write(from: buffer)
} catch (let error) {
print("RECORD ERROR", error);
}
return
}
}
Create Buffer
private func createBuffer(forFileNamed fileName: String) -> AVAudioPCMBuffer? {
var res: AVAudioPCMBuffer?
if let fileURL = Bundle.main.url(forResource: fileName, withExtension: "caf") {
do {
let file = try AVAudioFile(forReading: fileURL)
res = AVAudioPCMBuffer(pcmFormat: file.processingFormat, frameCapacity:AVAudioFrameCount(file.length))
if let _ = res {
do {
try file.read(into: res!)
} catch (let error) {
print("ERROR read file", error)
}
}
} catch (let error) {
print("ERROR file creation", error)
}
}
return res
}
Stop Recording
func stopRecording() {
self.engine.mainMixerNode.removeTap(onBus: 0)
}
I am trying to record using earphone, but It's not working
Its will work because once you setup
let audiosession = AVAudioSession()
As AVAudioSessionCategoryPlayAndRecord and set
audiosession.setActive(true)
It will start recording whichever audio dump to device.
WebRTC does not have any Internal API to start or stop recording.
We can try using AVAudioSession instead.
First setUp Audio session
func setUPAudioSession() -> Bool {
let audiosession = AVAudioSession()
do {
try audiosession.setCategory(AVAudioSessionCategoryPlayAndRecord)
} catch(let error) {
print("--> \(error.localizedDescription)")
}
do {
try audiosession.setActive(true)
} catch (let error) {
print("--> \(error.localizedDescription)")
}
return audiosession.isInputAvailable;
}
After setUp the audio session now start recording as below
func startRecording() -> Bool {
var settings: [String: Any] = [String: String]()
settings[AVFormatIDKey] = kAudioFormatLinearPCM
settings[AVSampleRateKey] = 8000.0
settings[AVNumberOfChannelsKey] = 1
settings[AVLinearPCMBitDepthKey] = 16
settings[AVLinearPCMIsBigEndianKey] = false
settings[AVLinearPCMIsFloatKey] = false
settings[AVAudioQualityMax] = AVEncoderAudioQualityKey
//Create device directory where recorded file will be save automatically
let searchPaths: [String] = NSSearchPathForDirectoriesInDomains(.documentDirectory, .allDomainsMask, true)
let documentPath_ = searchPaths.first
let pathToSave = "\(documentPath_)/\(dateString)"
let url: URL = URL(pathToSave)
recorder = try? AVAudioRecorder(url: url, settings: settings)
// Initialize degate, metering, etc.
recorder.delegate = self;
recorder.meteringEnabled = true;
recorder?.prepareToRecord()
if let recordIs = recorder {
return recordIs.record()
}
return false
}
Play recorded file
func playrecodingFile() {
//Get the path of recorded file saved in previous method
let searchPaths: [String] = NSSearchPathForDirectoriesInDomains(.documentDirectory, .allDomainsMask, true)
let documentPath_ = searchPaths.first
let fileManager = FileManager.default
let arrayListOfRecordSound: [String]
if fileManager.fileExists(atPath: recordingFolder()) {
let arrayListOfRecordSound = try? fileManager.contentsOfDirectory(atPath: documentPath_)
}
let selectedSound = "\(documentPath_)/\(arrayListOfRecordSound.first)"
let url = URL.init(fileURLWithPath: selectedSound)
let player = try? AVAudioPlayer(contentsOf: url)
player?.delegate = self;
try? AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayback)
player?.prepareToPlay()
player?.play()
}
Stop recording
func stopRecording() {
recorder?.stop()
}
pauseRecording
func pauseRecording() {
recorder?.pause()
}
Stop recording
func stopRecording() {
recorder?.stop()
}

Camera app freezes during phone call

I have a bug in my camera app. If you open the app while on a phone call, the entire app freezes. I've tried using AVCaptureSessionWasInterrupted and AVCaptureSessionInterruptionEnded notifications to handle the audio input management during a phone call, but have had no luck fixing the issue. When I comment out the audio input setup, the app no longer freezes during a phone call, so I'm pretty confident the issue lies somewhere with the audio management.
Why is the app freezing during phone calls and how can I fix it?
Thanks in advance!
Relevant code:
class CameraManager: NSObject {
static let shared = CameraManager()
private let notificationQueue = OperationQueue.main
var delegate: CameraManagerDelegate? = nil
let session = AVCaptureSession()
var captureDeviceInput: AVCaptureDeviceInput? = nil
var audioInput: AVCaptureDeviceInput? = nil
let photoOutput = AVCapturePhotoOutput()
let videoOutput = AVCaptureMovieFileOutput()
var isRecording: Bool {
return videoOutput.isRecording
}
func getCurrentVideoCaptureDevice() throws -> AVCaptureDevice {
guard let device = self.captureDeviceInput?.device else {
throw CameraManagerError.missingCaptureDeviceInput
}
return device
}
func getZoomFactor() throws -> CGFloat {
return try getCurrentVideoCaptureDevice().videoZoomFactor
}
func getMaxZoomFactor() throws -> CGFloat {
return try getCurrentVideoCaptureDevice().activeFormat.videoMaxZoomFactor
}
override init() {
super.init()
NotificationCenter.default.addObserver(forName: Notification.Name.UIApplicationDidBecomeActive, object: nil, queue: notificationQueue) { [unowned self] (notification) in
self.session.startRunning()
try? self.setupCamera()
try? self.setZoomLevel(zoomLevel: 1.0)
if Settings.shared.autoRecord {
try? self.startRecording()
}
}
NotificationCenter.default.addObserver(forName: Notification.Name.UIApplicationWillResignActive, object: nil, queue: notificationQueue) { [unowned self] (notification) in
self.stopRecording()
self.session.stopRunning()
}
NotificationCenter.default.addObserver(forName: Notification.Name.AVCaptureSessionWasInterrupted, object: nil, queue: notificationQueue) { [unowned self] (notification) in
if let audioInput = self.audioInput {
self.session.removeInput(audioInput)
}
}
NotificationCenter.default.addObserver(forName: Notification.Name.AVCaptureSessionInterruptionEnded, object: nil, queue: notificationQueue) { [unowned self] (notification) in
try? self.setupAudio()
}
try? self.setupSession()
}
func setupSession() throws {
session.sessionPreset = .high
if !session.isRunning {
session.startRunning()
}
if Utils.checkPermissions() {
try setupInputs()
setupOutputs()
}
}
func setupInputs() throws {
try setupCamera()
try setupAudio()
}
func setupCamera() throws {
do {
try setCamera(position: Settings.shared.defaultCamera)
} catch CameraManagerError.unableToFindCaptureDevice(let position) {
//some devices don't have a front camera, so try the back for setup
if position == .front {
try setCamera(position: .back)
}
}
}
func setupAudio() throws {
if let audioInput = self.audioInput {
self.session.removeInput(audioInput)
}
guard let audioDevice = AVCaptureDevice.default(for: .audio) else {
throw CameraManagerError.unableToGetAudioDevice
}
let audioInput = try AVCaptureDeviceInput(device: audioDevice)
if session.canAddInput(audioInput) {
session.addInput(audioInput)
self.audioInput = audioInput
} else {
self.delegate?.unableToAddAudioInput()
}
}
func setupOutputs() {
self.photoOutput.isHighResolutionCaptureEnabled = true
guard session.canAddOutput(self.photoOutput) else {
//error
return
}
session.addOutput(self.photoOutput)
guard session.canAddOutput(self.videoOutput) else {
//error
return
}
session.addOutput(self.videoOutput)
}
func startRecording() throws {
if !self.videoOutput.isRecording {
let documentDirectory = try FileManager.default.url(for: .documentDirectory, in: .userDomainMask, appropriateFor:nil, create:false)
let url = documentDirectory.appendingPathComponent(UUID().uuidString + ".mov")
self.videoOutput.startRecording(to: url, recordingDelegate: self)
}
}
func stopRecording() {
if self.videoOutput.isRecording {
self.videoOutput.stopRecording()
}
}
func setZoomLevel(zoomLevel: CGFloat) throws {
guard let captureDevice = self.captureDeviceInput?.device else {
throw CameraManagerError.missingCaptureDevice
}
try captureDevice.lockForConfiguration()
captureDevice.videoZoomFactor = zoomLevel
captureDevice.unlockForConfiguration()
}
func capturePhoto() {
let photoOutputSettings = AVCapturePhotoSettings()
photoOutputSettings.flashMode = Settings.shared.flash
photoOutputSettings.isAutoStillImageStabilizationEnabled = true
photoOutputSettings.isHighResolutionPhotoEnabled = true
self.photoOutput.capturePhoto(with: photoOutputSettings, delegate: self)
}
func toggleCamera() throws {
if let captureDeviceInput = self.captureDeviceInput,
captureDeviceInput.device.position == .back {
try setCamera(position: .front)
} else {
try setCamera(position: .back)
}
}
func setCamera(position: AVCaptureDevice.Position) throws {
if let captureDeviceInput = self.captureDeviceInput {
if captureDeviceInput.device.position == position {
return
} else {
session.removeInput(captureDeviceInput)
}
}
var device: AVCaptureDevice? = nil
switch position {
case .front:
device = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front)
default:
device = AVCaptureDevice.default(for: .video)
}
guard let nonNilDevice = device else {
throw CameraManagerError.unableToFindCaptureDevice(position)
}
try nonNilDevice.lockForConfiguration()
if nonNilDevice.isFocusModeSupported(.continuousAutoFocus) {
nonNilDevice.focusMode = .continuousAutoFocus
}
if nonNilDevice.isExposureModeSupported(.continuousAutoExposure) {
nonNilDevice.exposureMode = .continuousAutoExposure
}
nonNilDevice.unlockForConfiguration()
let input = try AVCaptureDeviceInput(device: nonNilDevice)
guard session.canAddInput(input) else {
throw CameraManagerError.unableToAddCaptureDeviceInput
}
session.addInput(input)
self.captureDeviceInput = input
}
func setFocus(point: CGPoint) throws {
guard let device = self.captureDeviceInput?.device else {
throw CameraManagerError.missingCaptureDeviceInput
}
guard device.isFocusPointOfInterestSupported && device.isFocusModeSupported(.autoFocus) else {
throw CameraManagerError.notSupportedByDevice
}
try device.lockForConfiguration()
device.focusPointOfInterest = point
device.focusMode = .autoFocus
device.unlockForConfiguration()
}
func setExposure(point: CGPoint) throws {
guard let device = self.captureDeviceInput?.device else {
throw CameraManagerError.missingCaptureDeviceInput
}
guard device.isExposurePointOfInterestSupported && device.isExposureModeSupported(.autoExpose) else {
throw CameraManagerError.notSupportedByDevice
}
try device.lockForConfiguration()
device.exposurePointOfInterest = point
device.exposureMode = .autoExpose
device.unlockForConfiguration()
}
}
extension CameraManager: AVCapturePhotoCaptureDelegate {
func photoOutput(_ output: AVCapturePhotoOutput, willCapturePhotoFor resolvedSettings: AVCaptureResolvedPhotoSettings) {
self.delegate?.cameraManagerWillCapturePhoto()
}
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
guard let imageData = photo.fileDataRepresentation() else {
//error
return
}
let capturedImage = UIImage.init(data: imageData , scale: 1.0)
if let image = capturedImage {
UIImageWriteToSavedPhotosAlbum(image, nil, nil, nil)
}
self.delegate?.cameraManagerDidFinishProcessingPhoto()
}
}
extension CameraManager: AVCaptureFileOutputRecordingDelegate {
func fileOutput(_ output: AVCaptureFileOutput, didStartRecordingTo fileURL: URL, from connections: [AVCaptureConnection]) {
self.delegate?.cameraManagerDidStartRecording()
}
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
self.delegate?.cameraManagerDidFinishRecording()
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: outputFileURL)
}) { saved, error in
if saved {
do {
try FileManager.default.removeItem(at: outputFileURL)
} catch _ as NSError {
//error
}
}
}
}
}

Resources