swift error: Value of type 'AVAudioRecorder' has no member 'Delegate' - ios

As a beginner, I am unable to figure out why I get this error. The code I am using comes directly from the Udacity course I am taking. Here is the code:
import UIKit
import AVFoundation
class RecordSoundsViewController: UIViewController, <AVAudioRecorderDelegate> {
var audioRecorder: AVAudioRecorder!
#IBOutlet weak var recordingLabel: UILabel!
#IBOutlet weak var recordbutton: UIButton!
#IBOutlet weak var stopRecordingButton: UIButton!
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
stopRecordingButton.isEnabled = false
}
#IBAction func recordAudio(_ sender: Any) {
recordingLabel.text = "Recording in progress..."
recordbutton.isEnabled = false
stopRecordingButton.isEnabled = true
let dirPath = NSSearchPathForDirectoriesInDomains(.documentDirectory,.userDomainMask, true)[0] as String
let recordingName = "recordedVoice.wav"
let pathArray = [dirPath, recordingName]
let filePath = URL(string: pathArray.joined(separator: "/"))
let session = AVAudioSession.sharedInstance()
try! session.setCategory(AVAudioSession.Category.playAndRecord, mode: AVAudioSession.Mode.default, options: AVAudioSession.CategoryOptions.defaultToSpeaker)
try! audioRecorder = AVAudioRecorder(url: filePath!, settings: [:])
audioRecorder.delegate = self
audioRecorder.isMeteringEnabled = true
audioRecorder.prepareToRecord()
audioRecorder.record() }
#IBAction func stopRecording(_ sender: Any) {
recordbutton.isEnabled = true
stopRecordingButton.isEnabled = false
recordingLabel.text = "Tap to Record"
audioRecorder.stop()
let audioSession = AVAudioSession.sharedInstance()
try! audioSession.setActive(false)
}
func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully flag: BOOL) {
print("finished recording")
}
}
I appreciate any help you cold give me. XCode 11.5, Swift 5.2
Thanks,
Mike

You should get a bunch of more errors.
Anyway this is not Objective-C. Adopting protocols is not in angle brackets
class RecordSoundsViewController: UIViewController, AVAudioRecorderDelegate {
and the boolean type in Swift is Bool (not BOOL)
func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully flag: Bool) {
And don't try!. Catch errors.

Related

Save and show recorded voice notes in tableview Swift 3

I am new to Swift, so any help would be gladly appreciated. I am creating this project to record, save recorded data and show data in table view. Right now I can record and play only the current recording. If I press record again the previous recorded audio will be deleted. But I want to save the recorded audio permanently and show in table view. This is what I got so far. Thanks in advance!
import UIKit
import AVFoundation
class ViewController: UIViewController, AVAudioPlayerDelegate,
AVAudioRecorderDelegate {
var audioPlayer: AVAudioPlayer?
var audioRecorder: AVAudioRecorder?
#IBOutlet weak var recordAudio: UIButton!
#IBOutlet weak var stopAudio: UIButton!
#IBOutlet weak var playAudio: UIButton!
var arrayData: [FileManager] = []
override func viewDidLoad() {
super.viewDidLoad()
playAudio.isEnabled = false
stopAudio.isEnabled = false
// Creating File Manager
let fileMgr = FileManager.default
let dirPath = fileMgr.urls(for: .documentDirectory, in: .userDomainMask)
let soundFileUrl = dirPath[0].appendingPathComponent("sound.caf")
let recordSettings = [AVEncoderAudioQualityKey: AVAudioQuality.min.rawValue,
AVEncoderBitRateKey: 16,
AVNumberOfChannelsKey: 2,
AVSampleRateKey: 44100.0 ] as [String:Any]
let audioSession = AVAudioSession.sharedInstance()
do {
try audioSession.setCategory(AVAudioSessionCategoryPlayAndRecord)
} catch {
print(error.localizedDescription)
}
do {
try audioRecorder = AVAudioRecorder(url: soundFileUrl, settings: recordSettings as [String : Any])
audioRecorder?.prepareToRecord()
} catch {
print(error.localizedDescription)
}
}
#IBAction func recordButton(_ sender: Any) {
if audioRecorder?.isRecording == false {
playAudio.isEnabled = false
stopAudio.isEnabled = true
audioRecorder?.record()
}
}
#IBAction func stopButton(_ sender: Any) {
stopAudio.isEnabled = false
playAudio.isEnabled = true
recordAudio.isEnabled = true
if audioRecorder?.isRecording == true{
audioRecorder?.stop()
} else {
audioPlayer?.stop()
}
}
#IBAction func playButton(_ sender: Any) {
if audioRecorder?.isRecording == false{
stopAudio.isEnabled = true
recordAudio.isEnabled = true
do {
try audioPlayer = AVAudioPlayer(contentsOf: (audioRecorder?.url)!)
audioPlayer!.delegate = self
audioPlayer!.prepareToPlay()
audioPlayer!.play()
} catch {
print(error.localizedDescription)
}
}
}
func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool) {
recordAudio.isEnabled = true
stopAudio.isEnabled = false
}
func audioPlayerDecodeErrorDidOccur(_ player: AVAudioPlayer, error: Error?) {
print("Audio Player Error")
}
func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully flag: Bool) {
}
func audioRecorderEncodeErrorDidOccur(_ recorder: AVAudioRecorder, error: Error?) {
print("Audio Recorder Error")
}
The reason you can only save one video is that you only specified one file name. If you want to keep multiple videos, you have to use different file names. You can ask the user to enter video file name and then store them. You can check if a video with same name exists or not in this way.
You have to store all your video files into document folder so that when user close and reopen the app, the video files are still there. An array of videos are stored in the memory of your phone and when the app is closed, you lose everything.
After you stored all your video files in document folder, on start up of your video list view controller, you can use this method to get a list of video names from the document folder and then display the name in the table view.

Recording Video with AVFoundation in Swift for iOS

I am having trouble recording video using the code provided. I am using example code created for recording video.
Specifically I am unable to compile this line without this error: "Cannot convert value of type 'ViewController' to specified type 'AVCaptureFileOutputRecordingDelegate'
var recordingDelegate:AVCaptureFileOutputRecordingDelegate? = self
This line is located in a IBAction function:
#IBAction func RecordButtonPressed(_ sender: Any) {
var recordingDelegate:AVCaptureFileOutputRecordingDelegate? = self
var videoFileOutput = AVCaptureMovieFileOutput()
self.captureSession.addOutput(videoFileOutput)
let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
let filePath = documentsURL.appendingPathComponent("temp")
videoFileOutput.startRecording(toOutputFileURL: filePath, recordingDelegate: recordingDelegate)
RecordButton.setTitle("Stop", for: .normal);
}
Rest of code is here:
import UIKit
import AVFoundation
import Darwin
class ViewController: UIViewController {
#IBOutlet weak var CameraView: UIImageView!
#IBOutlet weak var RecordButton: UIButton!
#IBOutlet weak var SelectFrButton: UIButton!
#IBOutlet weak var ISOslider: UISlider!
#IBOutlet weak var SSslider: UISlider!
#IBOutlet weak var ISOtextfield: UITextField!
#IBOutlet weak var SStextfield: UITextField!
#IBOutlet weak var TorchSlider: UISlider!
#IBOutlet weak var Torchtextfield: UITextField!
var captureSession = AVCaptureSession();
var DisplaySessionOutput = AVCaptureVideoDataOutput();
var SaveSessionOutput = AVCaptureMovieFileOutput();
var previewLayer = AVCaptureVideoPreviewLayer();
var CaptureDevice:AVCaptureDevice? = nil;
var CurrentTorchLevel:Float = 0.5;
override func viewDidLoad() {
super.viewDidLoad()
captureSession.sessionPreset = AVCaptureSessionPresetHigh
// Loop through all the capture devices on this phone
let deviceDiscoverySession = AVCaptureDeviceDiscoverySession(deviceTypes: [AVCaptureDeviceType.builtInDuoCamera, AVCaptureDeviceType.builtInTelephotoCamera,AVCaptureDeviceType.builtInWideAngleCamera], mediaType: AVMediaTypeVideo, position: AVCaptureDevicePosition.unspecified)
for device in (deviceDiscoverySession?.devices)! {
if(device.position == AVCaptureDevicePosition.back){
do{
try device.lockForConfiguration()
device.setExposureModeCustomWithDuration(CMTimeMake(1, 30), iso: 50, completionHandler: { (time) in
// Set text and sliders to correct levels
self.ISOslider.maximumValue = (self.CaptureDevice?.activeFormat.maxISO)!;
self.ISOslider.minimumValue = (self.CaptureDevice?.activeFormat.minISO)!;
self.SSslider.maximumValue = Float((self.CaptureDevice?.activeFormat.maxExposureDuration.seconds)!);
self.SSslider.minimumValue = Float((self.CaptureDevice?.activeFormat.minExposureDuration.seconds)!);
self.ISOtextfield.text = device.iso.description;
self.ISOslider.setValue(device.iso, animated: false)
self.SStextfield.text = device.exposureDuration.seconds.description;
self.SSslider.setValue(Float(device.exposureDuration.seconds), animated: false);
self.TorchSlider.minimumValue = 0.01;
self.TorchSlider.maximumValue = 1;
self.TorchSlider.value = 0.5;
self.Torchtextfield.text = "0.5";
})
//Turn torch on
if (device.torchMode == AVCaptureTorchMode.on) {
device.torchMode = AVCaptureTorchMode.off
} else {
try device.setTorchModeOnWithLevel(1.0)
}
device.unlockForConfiguration();
CaptureDevice = device;
let input = try AVCaptureDeviceInput(device: CaptureDevice)
if(captureSession.canAddInput(input)){
captureSession.addInput(input);
if(captureSession.canAddOutput(DisplaySessionOutput)){
captureSession.addOutput(DisplaySessionOutput);
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession);
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.portrait;
CameraView.layer.addSublayer(previewLayer);
}
}
}
catch{
print("exception!");
}
}
}
CameraView.transform = CGAffineTransform.init(scaleX: -1, y: -1);
captureSession.startRunning()
}
// Do any additional setup after loading the view, typically from a nib.
override func viewDidLayoutSubviews() {
previewLayer.frame = CameraView.bounds
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
#IBAction func RecordButtonPressed(_ sender: Any) {
var recordingDelegate:AVCaptureFileOutputRecordingDelegate? = self
var videoFileOutput = AVCaptureMovieFileOutput()
self.captureSession.addOutput(videoFileOutput)
let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
let filePath = documentsURL.appendingPathComponent("temp")
videoFileOutput.startRecording(toOutputFileURL: filePath, recordingDelegate: recordingDelegate)
RecordButton.setTitle("Stop", for: .normal);
}
#IBAction func ISOvaluechanged(_ sender: Any) {
SetVideoSettings(isolevel: ISOslider.value, exposurelevel: AVCaptureExposureDurationCurrent, TorchLevel: CurrentTorchLevel)
}
#IBAction func SSvaluechanged(_ sender: Any) {
let time = CMTimeMake(Int64(self.SSslider.value * 1000000),1000000);
SetVideoSettings(isolevel: AVCaptureISOCurrent, exposurelevel: time, TorchLevel: CurrentTorchLevel)
}
#IBAction func ISOtextchanged(_ sender: Any) {
}
#IBAction func SStextchanged(_ sender: Any) {
//let time = CMTimeMake(Int64(exposurelevel * 100000),100000);
}
#IBAction func ChooseButtonPressed(_ sender: Any) {
}
func ShowAlert(AlertMessage: String) {
let alertController = UIAlertController(title: "Alert", message: AlertMessage, preferredStyle: .alert)
self.present(alertController, animated: true, completion:nil)
let OKAction = UIAlertAction(title: "OK", style: .default) { (action:UIAlertAction) in
}
alertController.addAction(OKAction)
}
#IBAction func TorchSliderChanged(_ sender: Any) {
CurrentTorchLevel = self.TorchSlider.value;
SetVideoSettings(isolevel: AVCaptureISOCurrent, exposurelevel: AVCaptureExposureDurationCurrent, TorchLevel: CurrentTorchLevel);
}
func SetVideoSettings(isolevel: Float, exposurelevel: CMTime, TorchLevel: Float) {
var newISOval = isolevel;
var newSSval = exposurelevel;
let newTorchVal = TorchLevel;
if(newISOval == FLT_MAX){
// Pass through 0,0 for maintaining current SS.
}
else if(newISOval > (self.CaptureDevice?.activeFormat.maxISO)!) {
newISOval = (self.CaptureDevice?.activeFormat.maxISO)!;
}
else if(newISOval < (self.CaptureDevice?.activeFormat.minISO)!) {
newISOval = (self.CaptureDevice?.activeFormat.minISO)!;
}
if(newSSval.timescale == 0){
// Pass through 0,0 for maintaining current SS.
}
else if(CMTimeCompare(newSSval, (self.CaptureDevice?.activeFormat.maxExposureDuration)!) > 0) {
newSSval = (self.CaptureDevice?.activeFormat.maxExposureDuration)!;
}
else if(CMTimeCompare(newSSval,(self.CaptureDevice?.activeFormat.minExposureDuration)!) < 0) {
newSSval = (self.CaptureDevice?.activeFormat.minExposureDuration)!;
}
do {
try self.CaptureDevice?.lockForConfiguration();
try CaptureDevice?.setTorchModeOnWithLevel(newTorchVal);
CaptureDevice?.setExposureModeCustomWithDuration(newSSval, iso: newISOval, completionHandler: { (time) in
// Set text and sliders to correct levels
self.ISOtextfield.text = self.CaptureDevice?.iso.description;
self.ISOslider.setValue((self.CaptureDevice?.iso)!, animated: false)
self.SStextfield.text = self.CaptureDevice?.exposureDuration.seconds.description;
self.SSslider.setValue(Float((self.CaptureDevice?.exposureDuration.seconds)!), animated: false);
self.TorchSlider.setValue(self.CurrentTorchLevel, animated: false);
self.Torchtextfield.text = self.CurrentTorchLevel.description;
})
self.CaptureDevice?.unlockForConfiguration();
}
catch {
ShowAlert(AlertMessage: "Unable to set camera settings");
self.CaptureDevice?.unlockForConfiguration();
}
}
func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) {
return
}
func captureOutput(captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAtURL fileURL: NSURL!, fromConnections connections: [AnyObject]!) {
return
}
}
Thank you for any help you can provide!
Make an extension for your UIViewController that makes it conform to AVCaptureFileOutputRecordingDelegate. Remove and add the final two methods in your ViewController class into it.
class ViewController:UIViewController {
//your methods as usual but remove the final two methods and add them to the extension that follows. Those methods are what will make you conform to AVCaptureFileOutputRecordingDelegate
}
extension ViewController: AVCaptureFileOutputRecordingDelegate {
func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) {
}
func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) {
}
}
You can do the same thing by extending your UIViewController as below but I thought I'd give you a clean solution as above. You can choose.
class ViewController:UIViewController, AVCaptureFileOutputRecordingDelegate {
//your methods as usual but you keep your final two methods this time
func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) {
}
func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) {
}
}

Value of type 'PlaySoundsViewController' has no member 'recordedAudio'

I've been following Udacity's Intro to iOS App Development with Swift tutorial, but got this error.
Value of type 'PlaySoundsViewController' has no member 'recordedAudio'
Line 84 has the error.
playSoundsVC.recordedAudio = recordedAudioURL
Here is the entire code:
import UIKit
import AVFoundation
class RecordSoundsViewController: UIViewController , AVAudioRecorderDelegate {
#IBOutlet weak var recordingInProgress: UILabel!
#IBOutlet weak var stopButton: UIButton!
#IBOutlet weak var recordButton: UIButton!
var audioRecorder:AVAudioRecorder!
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
override func viewWillAppear(animated: Bool) {
//TODO: Hide stop button
stopButton.hidden = true
//TODO: Enable recordButton
recordButton.enabled = true
}
#IBAction func recordAudio(sender: AnyObject) {
//TODO: Show text "recording in progress"
recordingInProgress.hidden = false
//TODO: Show stopButton
stopButton.hidden = false
//TODO: Record the user's voice
print("in recordAudio")
//TODO: Disable recording button
recordButton.enabled = false
let dirPath = NSSearchPathForDirectoriesInDomains(.DocumentDirectory,.UserDomainMask, true)[0] as String
let recordingName = "recordedVoice.wav"
let pathArray = [dirPath, recordingName]
let filePath = NSURL.fileURLWithPathComponents(pathArray)
print(filePath)
let session = AVAudioSession.sharedInstance()
try! session.setCategory(AVAudioSessionCategoryPlayAndRecord)
try! audioRecorder = AVAudioRecorder(URL: filePath!, settings: [:])
audioRecorder.meteringEnabled = true
audioRecorder.prepareToRecord()
audioRecorder.record()
}
#IBAction func stopRecording(sender: AnyObject) {
//TODO: hide recordingInProgress label
recordingInProgress.hidden = true
recordButton.enabled = true
let audioSession = AVAudioSession.sharedInstance()
try! audioSession.setActive(false)
}
func audioRecorderDidFinishRecording(recorder: AVAudioRecorder, successfully flag: Bool) {
print("AVAudioRecorder finished saving recording")
if (flag) {
self.performSegueWithIdentifier("stopRecording", sender: audioRecorder.url)
} else {
print("Saving of recording failed")
}
}
override func prepareForSegue(segue: UIStoryboardSegue, sender: AnyObject?) {
if (segue.identifier == "stopRecording") {
let playSoundsVC = segue.destinationViewController as!
PlaySoundsViewController
let recordedAudioURL = sender as! NSURL
playSoundsVC.recordedAudio = recordedAudioURL
}
}
}
class PlaySoundsViewController: UIViewController {
var recordedURL: URL?
override func viewDidLoad() {
super.viewDidLoad()
}
}
Don't forget to add an identifier for segue:
add var recordedAudioURL:URL! to your PlaySoundsViewController file
it is a variable that holds the url in the next class which is been send from the current class.
happy coding :)
override func prepareForSegue(segue: UIStoryboardSegue, sender: AnyObject?) {
if (segue.identifier == "stopRecording") {
let playSoundsVC = segue.destinationViewController as! PlaySoundViewController
let recordedAudioURL = sender as! NSURL
playSoundsVC.recordedAudioURL = recordedAudioURL
}
}

Make a playlist (start next song) in swift

I have created a sound player in swift with AVFoundation. I am trying to start the next song in array when the playing song is finished. I was trying to implement this code
if (audioPlayer.currentTime >= audioPlayer.duration){
var recentSong = songPlaylist[selectedSongNumber + 1]
audioPlayer = AVAudioPlayer(contentsOfURL: NSURL(fileURLWithPath:
NSBundle.mainBundle().pathForResource(recentSong, ofType: "mp3")!), error: nil)
audioPlayer.play()
}
but I am not being able to implement this code (I do not know where to implement it).Here is my complete code
import UIKit
import AVFoundation
import AVKit
public var audioPlayer = AVPlayer()
public var selectedSongNumber = Int()
public var songPlaylist:[String] = ["song1", "song2"]
public var recentSong = "song1"
let playImage = UIImage(named: "Play.png") as UIImage!
let pauseImage = UIImage(named: "Pause.png") as UIImage!
class FirstViewController: UIViewController {
#IBOutlet weak var musicSlider: UISlider!
#IBOutlet weak var PlayPause: UIButton!
var audioPlayer = AVAudioPlayer(contentsOfURL: NSURL(fileURLWithPath:
NSBundle.mainBundle().pathForResource(recentSong, ofType: "mp3")!), error: nil)
override func viewDidLoad() {
super.viewDidLoad()
musicSlider.maximumValue = Float(audioPlayer.duration)
var timer = NSTimer.scheduledTimerWithTimeInterval(0.1, target: self, selector: Selector("updateMusicSlider"), userInfo: nil, repeats: true)
if (audioPlayer.currentTime >= audioPlayer.duration){
var recentSong = songPlaylist[selectedSongNumber + 1]
audioPlayer = AVAudioPlayer(contentsOfURL: NSURL(fileURLWithPath:
NSBundle.mainBundle().pathForResource(recentSong, ofType: "mp3")!), error: nil)
audioPlayer.play()
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
}
#IBAction func PlayPauseButton(sender: AnyObject) {
if (audioPlayer.playing == false){
audioPlayer.play()
PlayPause.setImage(pauseImage, forState: .Normal)
}else{
audioPlayer.pause()
PlayPause.setImage(playImage, forState: .Normal)
}
}
#IBAction func StopButton(sender: AnyObject) {
audioPlayer.stop()
audioPlayer.currentTime = 0
PlayPause.setImage(playImage, forState: .Normal)
}
#IBAction func musicSliderAction(sender: UISlider) {
audioPlayer.stop()
audioPlayer.currentTime = NSTimeInterval(musicSlider.value)
audioPlayer.play()
}
func updateMusicSlider(){
musicSlider.value = Float(audioPlayer.currentTime)
}
}
I am updating my code with something different:
import UIKit
import AVFoundation
class ViewController: UIViewController, AVAudioPlayerDelegate {
var counter = 0
var song = ["1","2","3"]
var player = AVAudioPlayer()
#IBOutlet weak var musicSlider: UISlider!
override func viewDidLoad() {
super.viewDidLoad()
musicSlider.value = 0.0
}
func updateMusicSlider(){
musicSlider.value = Float(player.currentTime)
}
#IBAction func playSong(sender: AnyObject) {
music()
}
#IBAction func sliderAction(sender: AnyObject) {
player.stop()
player.currentTime = NSTimeInterval(musicSlider.value)
player.play()
}
func music(){
var audioPath = NSBundle.mainBundle().pathForResource("\(song[counter])", ofType: "mp3")!
var error : NSError? = nil
player = AVAudioPlayer(contentsOfURL: NSURL(string: audioPath), error: &error)
musicSlider.maximumValue = Float(player.duration)
var timer = NSTimer.scheduledTimerWithTimeInterval(0.05, target: self, selector: Selector("updateMusicSlider"), userInfo: nil, repeats: true)
player.delegate = self
if error == nil {
player.delegate = self
player.prepareToPlay()
player.play()
}
}
func audioPlayerDidFinishPlaying(player: AVAudioPlayer!, successfully flag: Bool)
{
println("Called")
if flag {
counter++
}
if ((counter + 1) == song.count) {
counter = 0
}
music()
}
}
You can do it this way.
Hope It will help and HERE is sample project for more Info.
You need to implement AVAudioPlayerDelegate Protocol's method:
optional func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer!, successfully flag: Bool)
Documentation link
Play your next music item here.
But I will not recommend, since AVAudioPlayer can only play one item at a time. You need to instantiate again with another music item after completion. I will suggest you to use AVQueuePlayer. Detaled answer has been given here. Hope it helps!
I created a sound player in swift with AVFoundation. I used progressView to time the song and then when it hits 0.98743 it will update to the next song automatically this is the Github link: https://github.com/ryan-wlr/MusicPlayerIOS
func updateProgressView() {
if (progressView.progress > a.advanced(by: 0.98743)) {
audioPlayerDidFinishPlayeing()
}
if audioPlayer.isPlaying {
let progress = Float(audioPlayer.currentTime/audioPlayer.duration)
progressView.setProgress(progress, animated: true)
}
}

How to play audio in background with Swift?

As you see I'm streaming an audio broadcast. But when I press the home button and exit the app streaming stops or I cannot hear. How can I continue streaming in background and listen it from lock screen?
ViewController.Swift
import UIKit
import AVFoundation
import MediaPlayer
import GoogleMobileAds
class ViewController: UIViewController, GADInterstitialDelegate {
#IBOutlet weak var exitMapButton: UIButton!
#IBOutlet weak var radarMap: UIWebView!
var interstitial: GADInterstitial!
func createAndLoadInterstitial() -> GADInterstitial {
var interstitial = GADInterstitial(adUnitID: "adUnitID-XXXX")
interstitial.delegate = self
interstitial.loadRequest(GADRequest())
return interstitial
}
func getAd(){
if (self.interstitial.isReady)
{
self.interstitial.presentFromRootViewController(self)
self.interstitial = self.createAndLoadInterstitial()
}
}
#IBOutlet weak var ataturkButton: UIButton!
#IBOutlet weak var sabihaButton: UIButton!
#IBOutlet weak var esenbogaButton: UIButton!
#IBOutlet weak var weatherButton: UIButton!
#IBOutlet weak var statusLabel: UILabel!
#IBOutlet weak var playButton: UIButton!
#IBOutlet weak var webViewButton: UIButton!
var googleBannerView: GADBannerView!
override func viewDidLoad() {
super.viewDidLoad()
}
class PlayerAv {
var audioLink: String?
var player: AVPlayer
init(link: String) {
self.audioLink = link
self.player = AVPlayer(URL: NSURL(string: link))
}
}
var myPlayer = PlayerAv(link: "http://somewebsite.com/abc.pls")
var setTowerState = ""
#IBAction func sliderValueChanged(sender: UISlider) {
var currentValue = Float(sender.value)
println(currentValue)
myPlayer.player.volume = currentValue
}
#IBAction func getWeatherWindow(sender: AnyObject) {
UIApplication.sharedApplication().openURL(NSURL(string: "http://somewebpage.com")!)
println("Directed to weather page")
}
#IBAction func changeToAtaturk() {
myPlayer.player.pause()
myPlayer = PlayerAv(link: "http://somewebsite.com/abc.pls")
myPlayer.audioLink == ""
println("\(myPlayer.audioLink!)--a")
playButton.setTitle("Pause", forState: UIControlState.Normal)
myPlayer.player.play()
setTowerState = "ataturk"
statusLabel.text = "Status: Playing, LTBA"
}
#IBAction func changeToEsenboga() {
myPlayer.player.pause()
myPlayer = PlayerAv(link: "http://somewebsite.com/def.pls")
println("\(myPlayer.audioLink!)--a")
playButton.setTitle("Pause", forState: UIControlState.Normal)
myPlayer.player.play()
setTowerState = "esenboga"
statusLabel.text = "Status: Playing, LTAC"
}
#IBAction func changeToSabiha() {
myPlayer.player.pause()
myPlayer = PlayerAv(link: "http://somewebsite.com/efg.pls")
println("\(myPlayer.audioLink!)--a")
playButton.setTitle("Pause", forState: UIControlState.Normal)
myPlayer.player.play()
setTowerState = "sabiha"
statusLabel.text = "Status: Playing, LTFJ"
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
#IBAction func playButtonPressed(sender: AnyObject) {
toggle()
}
func toggle() {
if playButton.titleLabel?.text == "Play" {
playRadio()
println("Playing")
statusLabel.text = "Status: Playing"
} else {
pauseRadio()
println("Paused")
statusLabel.text = "Status: Paused"
}
}
func playRadio() {
myPlayer.player.play()
playButton.setTitle("Pause", forState: UIControlState.Normal)
}
func pauseRadio() {
myPlayer.player.pause()
playButton.setTitle("Play", forState: UIControlState.Normal)
}
}
You need to set your app Capabilities Background Modes (Audio and AirPlay) and set your AVAudioSession category to AVAudioSessionCategoryPlayback and set it active
From Xcode 11.4 • Swift 5.2
do {
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default, options: [.mixWithOthers, .allowAirPlay])
print("Playback OK")
try AVAudioSession.sharedInstance().setActive(true)
print("Session is Active")
} catch {
print(error)
}
Xcode 10.2.1 Swift 4
Please add the following code in your AppDelegate
func application(_ application: UIApplication, willFinishLaunchingWithOptions launchOptions: [UIApplicationLaunchOptionsKey : Any]? = nil) -> Bool {
do {
try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayback, mode: AVAudioSessionModeDefault, options: [.mixWithOthers, .allowAirPlay])
print("Playback OK")
try AVAudioSession.sharedInstance().setActive(true)
print("Session is Active")
} catch {
print(error)
}
return true
}
Note: - Please configure options as required. E.g to stop a background audio while a video file being played add
options: [.allowAirPlay, .defaultToSpeaker]
And don't forget to enable audio and airplay in Background mode
Only paste on the viewDidload
let path = Bundle.main.path(forResource:"Bismallah", ofType: "mp3")
do{
try playerr = AVAudioPlayer(contentsOf: URL(fileURLWithPath: path!))
} catch {
print("File is not Loaded")
}
let session = AVAudioSession.sharedInstance()
do{
try session.setCategory(AVAudioSessionCategoryPlayback)
}
catch{
}
player.play()
Swift 5 Xcode 11.2.1
Add this code where you have initialized the AudioPlayer.
audioPlayer.delegate = self
audioPlayer.prepareToPlay()
let audioSession = AVAudioSession.sharedInstance()
do{
try audioSession.setCategory(AVAudioSession.Category.playback)
}
catch{
fatalError("playback failed")
}

Resources