Record audio from an audio stream - ios

I'm building an app that plays audio streams. Below is the code I have.
class ViewController: UIViewController {
private var player: AVPlayer!
private var kvoContext: UInt8 = 1
override func viewDidLoad() {
super.viewDidLoad()
// Streaming
let streamURL = NSURL(string: "http://adsl.ranfm.lk/")!
player = AVPlayer(URL: streamURL)
}
#IBAction func didTapPlayButton(sender: UIButton) {
NSNotificationCenter.defaultCenter().addObserver(self, selector: #selector(playerDidReachEnd(_:)), name: AVPlayerItemDidPlayToEndTimeNotification, object: nil)
player.addObserver(self, forKeyPath: "status", options: NSKeyValueObservingOptions.New, context: &kvoContext)
player.play()
}
override func observeValueForKeyPath(keyPath: String?, ofObject object: AnyObject?, change: [String : AnyObject]?, context: UnsafeMutablePointer<Void>) {
if context == &kvoContext {
print("Change at \(keyPath) for \(object)")
}
if player.status == .Failed {
print("AVPlayer Failed")
}
if player.status == .ReadyToPlay {
print("Ready to play")
player.play()
}
if player.status == .Unknown {
print("AVPlayer Unknown")
}
}
func playerDidReachEnd(notification: NSNotification) {
print("Reached the end of file")
}
}
This plays the stream successfully. Now I want to add the ability to record the audio from this stream.
Based on what I could find, the AVAudioRecorder class is only for recording audio through the microphone. Is there a way to record audio from a stream?

Related

I want to add activity indicator to my avplayer when its buffering

private func startVideo() {
if let url = URL(string: "http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/VolkswagenGTIReview.mp4") {
player = AVPlayer(url: url)
let playerViewController = AVPlayerViewController()
playerViewController.player = player
playerViewController.view.frame = avPlayerView.bounds
addChild(playerViewController)
avPlayerView.addSubview(playerViewController.view)
playerViewController.didMove(toParent: self)
player?.play()
}
}
need to add a activity loader whenever the video is buffering
You can get this working using the following code. Observing the KeyPath on the Player can make you achieve this.
var activityIndicator: UIActivityIndicatorView!
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
if player != nil {
player.play()
player.addObserver(self, forKeyPath: "timeControlStatus", options: [.old, .new], context: nil)
}
}
override public func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey : Any]?, context: UnsafeMutableRawPointer?) {
if keyPath == "timeControlStatus", let change = change, let newValue = change[NSKeyValueChangeKey.newKey] as? Int, let oldValue = change[NSKeyValueChangeKey.oldKey] as? Int {
if #available(iOS 10.0, *) {
let oldStatus = AVPlayer.TimeControlStatus(rawValue: oldValue)
let newStatus = AVPlayer.TimeControlStatus(rawValue: newValue)
if newStatus != oldStatus {
DispatchQueue.main.async {[weak self] in
if newStatus == .playing || newStatus == .paused {
self!.activityIndicator.stopAnimating()
} else {
self!.activityIndicator.startAnimating()
}
}
}
} else {
// Fallback on earlier versions
self.activityIndicator.stopAnimating()
}
}
}

Swift AVAudioPlayer's playing status always be true?

Here's my code:
#objc func playSmusic1() {
guard let url = Bundle.main.url(forResource: "Snote6", withExtension: "wav") else { return }
do {
try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayback)
try AVAudioSession.sharedInstance().setActive(true)
player = try AVAudioPlayer(contentsOf: url, fileTypeHint: AVFileType.wav.rawValue)
guard let player = player else { return }
while (true) {
player.play()
player.enableRate = true;
player.rate = playrate
if !player.isPlaying {
break
}
}
} catch let error {
print(error.localizedDescription)
}
}
I found player.isPlaying properties always be true, so sometimes a tone will be play for 2 or 3 times. How to fix this bug? Thanks a lot!
First: Don't use while(true) for checking because it blocks the main thread!
You should add KVO observer to check this property asynchronously e.g.:
class YourController : UIViewController {
var player: AVAudioPlayer?
//...
deinit {
player?.removeObserver(self, forKeyPath: #keyPath(AVAudioPlayer.isPlaying))
}
func play() {
//...
player?.addObserver(self,
forKeyPath: #keyPath(AVAudioPlayer.isPlaying),
options: .new,
context: nil)
player?.play()
}
override func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey : Any]?, context: UnsafeMutableRawPointer?) {
if keyPath == #keyPath(AVAudioPlayer.isPlaying) {
if let isPlaying = player?.isPlaying {
print(isPlaying)
}
}
else {
self.observeValue(forKeyPath: keyPath, of: object, change: change, context: context)
}
}
}

AVPlayer doesn't play video until a video from UIImagePickerController is played

I'm having a problem which I haven't seen a post like it on here. I have an AVPlayerViewController which plays a video-based off the path from my Firebase Database (not Storage). The video plays perfectly as I want it to, but only once I watch a video that is clicked on in the UIImagePickerController elsewhere in the app.
So for example, the AVPlayer will show a black background (this occurs with all of the AVPlayer's in the app), except for when I watch a video from the UIImagePickerController which has nothing to do with any of the other views. I have no clue where to start with this. I appreciate all your help and suggestions!
Here is the example code of my AVPlayerViewController:
import UIKit
import AVFoundation
import AVKit
class VideoView: UIViewController {
private var videoURL: URL
var player: AVPlayer?
var playerController : AVPlayerViewController?
init(videoURL: URL) {
self.videoURL = videoURL
super.init(nibName: nil, bundle: nil)
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
override func viewDidLoad() {
super.viewDidLoad()
self.view.backgroundColor = UIColor.gray
player = AVPlayer(url: videoURL)
playerController = AVPlayerViewController()
guard player != nil && playerController != nil else {
return
}
playerController!.showsPlaybackControls = false
playerController!.player = player!
self.addChild(playerController!)
self.view.addSubview(playerController!.view)
playerController!.view.frame = view.frame
NotificationCenter.default.addObserver(self, selector: #selector(playerItemDidReachEnd), name: NSNotification.Name.AVPlayerItemDidPlayToEndTime, object: self.player!.currentItem)
let cancelButton = UIButton(frame: CGRect(x: 10.0, y: 10.0, width: 30.0, height: 30.0))
cancelButton.setImage(#imageLiteral(resourceName: "cancel"), for: UIControl.State())
cancelButton.addTarget(self, action: #selector(cancel), for: .touchUpInside)
view.addSubview(cancelButton)
// Allow background audio to continue to play
do {
try AVAudioSession.sharedInstance().setCategory(AVAudioSession.Category.ambient)
} catch let error as NSError {
print(error)
}
do {
try AVAudioSession.sharedInstance().setActive(true)
} catch let error as NSError {
print(error)
}
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
player?.play()
}
#objc func cancel() {
dismiss(animated: true, completion: nil)
}
#objc fileprivate func playerItemDidReachEnd(_ notification: Notification) {
if self.player != nil {
self.player!.seek(to: CMTime.zero)
self.player!.play()
}
}
}
Did you check the size of child controller view?
playerController!.view.frame = view.frame
Put it into
override func viewDidLayoutSubviews() {
super.viewDidLayoutSubviews()
playerController!.view.frame = view.frame
}
or use constraints.
The problem is that when your view appears it might not be ready for playback. What you should do to properly handle this is to KVO on the player to make sure it is ready to play.
I have answered this before here for a similar question:
Video playback issues only on iOS 13 with AVPlayerViewController and AVPlayer when using HLS video
playerItem.addObserver(self,
forKeyPath: #keyPath(AVPlayerItem.status),
options: [.old, .new],
context: &playerItemContext)
override func observeValue(forKeyPath keyPath: String?,
of object: Any?,
change: [NSKeyValueChangeKey : Any]?,
context: UnsafeMutableRawPointer?) {
// Only handle observations for the playerItemContext
guard context == &playerItemContext else {
super.observeValue(forKeyPath: keyPath,
of: object,
change: change,
context: context)
return
}
if keyPath == #keyPath(AVPlayerItem.status) {
let status: AVPlayerItemStatus
if let statusNumber = change?[.newKey] as? NSNumber {
status = AVPlayerItemStatus(rawValue: statusNumber.intValue)!
} else {
status = .unknown
}
// Switch over status value
switch status {
case .readyToPlay:
// Player item is ready to play.
case .failed:
// Player item failed. See error.
case .unknown:
// Player item is not yet ready.
}
}
}
You can find documentation about it from Apple here: https://developer.apple.com/documentation/avfoundation/media_assets_playback_and_editing/responding_to_playback_state_changes
Another example of usage is here:
playerItem?.observe(\AVPlayerItem.status, options: [.new, .initial]) { [weak self] item, _ in
guard let self = self else { return }
switch status {
case .readyToPlay:
// Player item is ready to play.
case .failed:
// Player item failed. See error.
case .unknown:
// Player item is not yet ready.
}

Observing AVPlayer: An -observeValueForKeyPath:ofObject:change:context: message was received but not handled

Hi I'm trying to observe an AVPlayerItem and an AVPlayer in my UIViewController.
But I got some trouble doing it. I've googled a lot and found lot's of examples but none of these worked for me.
Here is my ViewController:
class VideoAudioViewController: UIViewController {
var audioPlayer: AVPlayer!
var playerItem: AVPlayerItem!
private var audio: Audio
private let ITEM_STATUS_PATH = #keyPath(AVPlayerItem.status)
private let PLAYER_STATUS_PATH = #keyPath(AVPlayer.status)
private let PLAYER_RATE_PATH = #keyPath(AVPlayer.rate)
init(for audio: Audio) {
self.audio = audio
super.init(nibName: nil, bundle: nil)
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
override func viewDidLoad() {
super.viewDidLoad()
// Play audio
playerItem = AVPlayerItem(url: audio.url)
playerItem.addObserver(self, forKeyPath: ITEM_STATUS_PATH, options: [.old, .new], context: nil)
audioPlayer = AVPlayer(playerItem: playerItem)
audioPlayer!.play()
// audioPlayer!.
var audioSession = AVAudioSession.sharedInstance()
do {
try audioSession.setCategory(AVAudioSessionCategoryPlayback)
} catch {
print("error")
}
NotificationCenter.default.addObserver(
self, selector: #selector(handleInterruption), name: .AVAudioSessionInterruption, object: audioSession)
audioPlayer.addObserver(self, forKeyPath: PLAYER_STATUS_PATH, options: [.old, .new], context: nil)
audioPlayer.addObserver(self, forKeyPath: PLAYER_RATE_PATH, options: [.old, .new], context: nil)
}
override func observeValue(forKeyPath keyPath: String?, of object: Any?,
change: [NSKeyValueChangeKey : Any]?, context: UnsafeMutableRawPointer?) {
if keyPath == "status" {
switch (audioPlayer.status) {
case AVPlayerStatus.readyToPlay:
print("player status ready to play")
audioPlayer.play()
break
case AVPlayerStatus.failed:
print("player status failed")
break
default:
break
}
return
} else if keyPath == "rate" {
if audioPlayer.rate != 0 {
playPauseButton.setImage(#imageLiteral(resourceName: "ic_pause_circle_outline_white"), for: .normal)
} else {
playPauseButton.setImage(#imageLiteral(resourceName: "ic_play_circle_outline_white"), for: .normal)
}
} else if keyPath == "status" {
switch (playerItem.status) {
case AVPlayerItemStatus.readyToPlay:
print("player item ready to play")
break
case AVPlayerItemStatus.failed:
print("player item failed")
break
default:
break
}
return
}
super.observeValue(forKeyPath: keyPath, of: object, change: change, context: context)
}
// MARK: - AVAudioPlayer delegate
func handleInterruption(_ notification: Notification) {
guard let info = notification.userInfo,
let typeValue = info[AVAudioSessionInterruptionTypeKey] as? UInt,
let type = AVAudioSessionInterruptionType(rawValue: typeValue) else {
return
}
if type == .began {
playPauseButton.setImage(#imageLiteral(resourceName: "ic_play_circle_outline_white"), for: .normal)
}
else if type == .ended {
// guard let optionsValue =
// userInfo[AVAudioSessionInterruptionOptionKey] as? UInt else {
// return
// }
// let options = AVAudioSessionInterruptionOptions(rawValue: optionsValue)
// if options.contains(.shouldResume) {
// Interruption Ended - playback should resume
audioPlayer?.play()
// }
}
}
}
My problem is this:
Terminati
ng app due to uncaught exception 'NSInternalInconsistencyException', reason: '<TheSimpleClub_Nachhilfe.VideoAudioViewController: 0x7fa6ee2b0c10>: An -observeValueForKeyPath:ofObject:change:context: message was received but not handled.
Key path: rate
Observed object: <AVPlayer: 0x60000021c8c0>
Change: {
kind = 1;
new = 1;
old = 1;
}
Context: 0
x0'
The error occurrs in this line:
super.observeValue(forKeyPath: keyPath, of: object, change: change, context: context)
What can I do to resolve this?

MPNowPlayingInfoCenter not showing details

today I spent hours trying to figure out why MPNowPlayingInfoCenter is not working, but without success. I want it to show info in control center and lockscreen, the media is a video.
Here is the problem:
I have a singleton class called GlobalAVPlayer that holds an AVPlayerViewController. It is a singleton because there must be only one, and I need to access it globally.
class GlobalAVPlayer: NSObject {
static let sharedInstance = GlobalAVPlayer()
private var _currentVideo: Video?
var playerViewController = AVPlayerViewController()
var isPlaying: Bool = false
var almostPlaying: Bool = false
var hasItemToPlay: Bool = false
var currentVideo: Video?
{
set {
_currentVideo = newValue
notify_VideoChanged()
}
get {
return _currentVideo
}
}
private var player: AVPlayer!
override init()
{
super.init()
player = AVPlayer()
playerViewController.player = player
player.addObserver(self, forKeyPath: "rate", options: NSKeyValueObservingOptions.New, context: nil)
NSNotificationCenter.defaultCenter().addObserver(self, selector: #selector(didPlayToEnd), name: "AVPlayerItemDidPlayToEndTimeNotification", object: nil)
}
func itemToPlay(item: AVPlayerItem)
{
if let player = player {
almostPlaying = true
hasItemToPlay = true
player.replaceCurrentItemWithPlayerItem(item)
}
}
func didPlayToEnd()
{
print("[GlobalAVPlayer] End video notification")
let time = CMTimeMakeWithSeconds(0, 1)
player.seekToTime(time)
}
func play()
{
if player.rate == 0
{
player.play()
if player.rate != 0 && player.error == nil
{
isPlaying = true
print("[GlobalAVPlayer] Playing video without errors")
}
}
}
func pause()
{
if player.rate == 1
{
player.pause()
if player.rate == 0 && player.error == nil
{
isPlaying = false
print("[GlobalAVPlayer] Pausing video without errors")
}
}
}
func notify_PlaybackChanged()
{
NSNotificationCenter.defaultCenter().postNotificationName("globalAVPlayerPlaybackChanged", object: self)
}
func notify_VideoChanged()
{
NSNotificationCenter.defaultCenter().postNotificationName("globalAVPlayerVideoChanged", object: self)
}
override func observeValueForKeyPath(keyPath: String?, ofObject object: AnyObject?, change: [String : AnyObject]?, context: UnsafeMutablePointer<Void>) {
if keyPath == "rate" {
let newRate = change!["new"] as! Int
//rate = 0 (il player รจ fermo) rate = 1 (il player sta andando)
self.isPlaying = newRate == 1 ? true : false
notify_PlaybackChanged()
}
}
deinit
{
player.removeObserver(self, forKeyPath: "rate", context: nil)
}
}
The init is called one time when app starts, after that I use "itemToPlay" method to change the video.
I have also correctly (I think) configured the audio session in AppDelegate:
do
{
let session = AVAudioSession.sharedInstance()
try session.setCategory(AVAudioSessionCategoryPlayback)
try session.setActive(true)
}
catch
{
print("[AppDelegate] Something went wrong")
}
I tried to put MPNowPlayingInfoCenter.defaultCenter().nowPlayingInfo = something everywhere, but at max I could see the video title in the control center for 1 second when I put it into the play() method. After that it disappeared and the playback controls went gray. Information is correctly stored, because when i print nowPlayingInfo content into the console everything is as expected.
I tried using becomeFirstResponder and UIApplication.beginReceivingRemoteControlEvents in different places without success.
Thanks
Starting with iOS 10 there is a BOOL property in AVPlayerViewController called updatesNowPlayingInfoCenter, that has the default value: YES. Just change it to NO:
//playerController is an instance of AVPlayerViewController
if ([self.playerController respondsToSelector:#selector(setUpdatesNowPlayingInfoCenter:)])
{
self.playerController.updatesNowPlayingInfoCenter = NO;
}

Resources