How to buffer audio using AVPlayer in iOS? - ios

I want to play stream audio from the Internet. I wrote code that plays stream but it don't have any buffer so if signal is weak application stop playing audio. This is my code:
import UIKit
import AVFoundation
import MediaPlayer
import AudioToolbox
class ViewController: UIViewController {
var playerItem:AVPlayerItem?
var player:AVPlayer?
#IBOutlet weak var PlayButton: UIButton!
override func viewDidLoad() {
super.viewDidLoad()
var buffer = AVAudioBuffer ()
let url = NSURL (string: "http://radio.afera.com.pl/afera64.aac")
playerItem = AVPlayerItem(URL: url!)
player = AVPlayer(playerItem: playerItem!)
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
#IBAction func PlayButtonTapped(sender: AnyObject)
{
if ((player!.rate != 0) && (player!.error == nil))
{
player!.pause()
PlayButton.setImage(UIImage(named:"Play"), forState: UIControlState.Normal)
}
else
{
player!.play()
PlayButton.setImage(UIImage(named:"Stop"), forState:UIControlState.Normal)
}
}
}
I have no idea how to buffer this stream. I searched apple documentation but can't find anything in Swift.
I find something like AVAudioBuffer but I don't know how to use it, and if its correct to resolve this problem.
P.S. C# have documentation on MSDN, is something similar on Apple in case of Swift?

The answer is in creating an error delegate that launched a selector every time the player stopped (The error changes when the network connection is interrupted or the stream didn't load properly):
Here are delegates, just outside and above my RadioPlayer class:
protocol errorMessageDelegate {
func errorMessageChanged(newVal: String)
}
class:
import Foundation
import AVFoundation
import UIKit
class RadioPlayer : NSObject {
static let sharedInstance = RadioPlayer()
var instanceDelegate:sharedInstanceDelegate? = nil
var sharedInstanceBool = false {
didSet {
if let delegate = self.instanceDelegate {
delegate.sharedInstanceChanged(self.sharedInstanceBool)
}
}
}
private var player = AVPlayer(URL: NSURL(string: Globals.radioURL)!)
private var playerItem = AVPlayerItem?()
private var isPlaying = false
var errorDelegate:errorMessageDelegate? = nil
var errorMessage = "" {
didSet {
if let delegate = self.errorDelegate {
delegate.errorMessageChanged(self.errorMessage)
}
}
}
override init() {
super.init()
errorMessage = ""
let asset: AVURLAsset = AVURLAsset(URL: NSURL(string: Globals.radioURL)!, options: nil)
let statusKey = "tracks"
asset.loadValuesAsynchronouslyForKeys([statusKey], completionHandler: {
var error: NSError? = nil
dispatch_async(dispatch_get_main_queue(), {
let status: AVKeyValueStatus = asset.statusOfValueForKey(statusKey, error: &error)
if status == AVKeyValueStatus.Loaded{
let playerItem = AVPlayerItem(asset: asset)
self.player = AVPlayer(playerItem: playerItem)
self.sharedInstanceBool = true
} else {
self.errorMessage = error!.localizedDescription
print(error!)
}
})
})
NSNotificationCenter.defaultCenter().addObserverForName(
AVPlayerItemFailedToPlayToEndTimeNotification,
object: nil,
queue: nil,
usingBlock: { notification in
print("Status: Failed to continue")
self.errorMessage = "Stream was interrupted"
})
print("Initializing new player")
}
func resetPlayer() {
errorMessage = ""
let asset: AVURLAsset = AVURLAsset(URL: NSURL(string: Globals.radioURL)!, options: nil)
let statusKey = "tracks"
asset.loadValuesAsynchronouslyForKeys([statusKey], completionHandler: {
var error: NSError? = nil
dispatch_async(dispatch_get_main_queue(), {
let status: AVKeyValueStatus = asset.statusOfValueForKey(statusKey, error: &error)
if status == AVKeyValueStatus.Loaded{
let playerItem = AVPlayerItem(asset: asset)
//playerItem.addObserver(self, forKeyPath: "status", options: NSKeyValueObservingOptions.New, context: &ItemStatusContext)
self.player = AVPlayer(playerItem: playerItem)
self.sharedInstanceBool = true
} else {
self.errorMessage = error!.localizedDescription
print(error!)
}
})
})
}
func bufferFull() -> Bool {
return bufferAvailableSeconds() > 45.0
}
func bufferAvailableSeconds() -> NSTimeInterval {
// Check if there is a player instance
if ((player.currentItem) != nil) {
// Get current AVPlayerItem
let item: AVPlayerItem = player.currentItem!
if (item.status == AVPlayerItemStatus.ReadyToPlay) {
let timeRangeArray: NSArray = item.loadedTimeRanges
if timeRangeArray.count < 1 { return(CMTimeGetSeconds(kCMTimeInvalid)) }
let aTimeRange: CMTimeRange = timeRangeArray.objectAtIndex(0).CMTimeRangeValue
//let startTime = CMTimeGetSeconds(aTimeRange.end)
let loadedDuration = CMTimeGetSeconds(aTimeRange.duration)
return (NSTimeInterval)(loadedDuration);
}
else {
return(CMTimeGetSeconds(kCMTimeInvalid))
}
}
else {
return(CMTimeGetSeconds(kCMTimeInvalid))
}
}
func play() {
player.play()
isPlaying = true
print("Radio is \(isPlaying ? "" : "not ")playing")
}
func pause() {
player.pause()
isPlaying = false
print("Radio is \(isPlaying ? "" : "not ")playing")
}
func currentlyPlaying() -> Bool {
return isPlaying
}
}
protocol sharedInstanceDelegate {
func sharedInstanceChanged(newVal: Bool)
}
RadioViewController:
import UIKit
import AVFoundation
class RadioViewController: UIViewController, errorMessageDelegate, sharedInstanceDelegate {
// MARK: Properties
var firstErrorSkip = true
var firstInstanceSkip = true
#IBOutlet weak var listenLabel: UILabel!
#IBOutlet weak var radioSwitch: UIImageView!
#IBAction func back(sender: AnyObject) {
print("Dismissing radio view")
if let navigationController = self.navigationController
{
navigationController.popViewControllerAnimated(true)
}
}
#IBAction func switched(sender: AnyObject) {
toggle()
}
override func viewDidLoad() {
super.viewDidLoad()
do {
try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayback)
print("AVAudioSession Category Playback OK")
do {
try AVAudioSession.sharedInstance().setActive(true)
print("AVAudioSession is Active")
} catch let error as NSError {
print(error.localizedDescription)
}
} catch let error as NSError {
print(error.localizedDescription)
}
RadioPlayer.sharedInstance.errorDelegate = self
RadioPlayer.sharedInstance.instanceDelegate = self
if RadioPlayer.sharedInstance.currentlyPlaying() {
radioSwitch.image = UIImage(named: "Radio_Switch_Active")
listenLabel.text = "Click to Pause Radio Stream:"
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
func toggle() {
if RadioPlayer.sharedInstance.currentlyPlaying() {
pauseRadio()
} else {
playRadio()
}
}
func playRadio() {
firstErrorSkip = false
firstInstanceSkip = false
if RadioPlayer.sharedInstance.errorMessage != "" || RadioPlayer.sharedInstance.bufferFull() {
resetStream()
} else {
radioSwitch.image = UIImage(named: "Radio_Switch_Active")
listenLabel.text = "Click to Pause Radio Stream:"
RadioPlayer.sharedInstance.play()
}
}
func pauseRadio() {
RadioPlayer.sharedInstance.pause()
radioSwitch.image = UIImage(named: "Radio_Switch_Inactive")
listenLabel.text = "Click to Play Radio Stream:"
}
func resetStream() {
print("Reloading interrupted stream");
RadioPlayer.sharedInstance.resetPlayer()
//RadioPlayer.sharedInstance = RadioPlayer();
RadioPlayer.sharedInstance.errorDelegate = self
RadioPlayer.sharedInstance.instanceDelegate = self
if RadioPlayer.sharedInstance.bufferFull() {
radioSwitch.image = UIImage(named: "Radio_Switch_Active")
listenLabel.text = "Click to Pause Radio Stream:"
RadioPlayer.sharedInstance.play()
} else {
playRadio()
}
}
func errorMessageChanged(newVal: String) {
if !firstErrorSkip {
print("Error changed to '\(newVal)'")
if RadioPlayer.sharedInstance.errorMessage != "" {
print("Showing Error Message")
let alertController = UIAlertController(title: "Stream Failure", message: RadioPlayer.sharedInstance.errorMessage, preferredStyle: UIAlertControllerStyle.Alert)
alertController.addAction(UIAlertAction(title: "Dismiss", style: UIAlertActionStyle.Default, handler: nil))
self.presentViewController(alertController, animated: true, completion: nil)
pauseRadio()
}
} else {
print("Skipping first init")
firstErrorSkip = false
}
}
func sharedInstanceChanged(newVal: Bool) {
if !firstInstanceSkip {
print("Detected New Instance")
if newVal {
RadioPlayer.sharedInstance.play()
}
} else {
firstInstanceSkip = false
}
}
}
Hope this will help :)

change
playerItem = AVPlayerItem?()
to
playerItem:AVPlayerItem?

Related

Problems resuming recording in the background using AVAudioRecorder in Swift

To put it in context, I'm developing an app that is recording a lot of time while in background or in the app. I'm facing two problems using AVAudioRecorder to record in my app:
My recording resumes fine when, for example, I play an audio in WhatsApp or a video in Instagram. But when I play a song in Apple Music or a play a voice note, it doesn't resume the recording again.
If I'm in a Phone Call and I start the recording, my app crashes and outputs the following:
(Error de OSStatus 561017449.)
I think also need to handle when the input/output device changes so my app doesn't crash.
The code that I implemented for the class where I'm recording (and also playing the sound I use recorder so I can try it and see if it works) is this:
class RecordViewController: UIViewController, AVAudioPlayerDelegate , AVAudioRecorderDelegate {
#IBOutlet weak var principalLabel: UILabel!
#IBOutlet weak var loginLabel: UILabel!
#IBOutlet weak var recordBTN: UIButton!
#IBOutlet weak var playBTN: UIButton!
var audioRecorder : AVAudioRecorder!
var audioPlayer : AVAudioPlayer!
var isAudioRecordingGranted: Bool!
var isRecording = false
var isPlaying = false
override func viewDidLoad() {
super.viewDidLoad()
view.backgroundColor = .white
check_record_permission()
checkActivateMicrophone()
NotificationCenter.default.addObserver(self, selector: #selector(handleInterruption(notification:)), name: AVAudioSession.interruptionNotification, object: audioRecorder)
let tapRegister: UITapGestureRecognizer = UITapGestureRecognizer(target: self, action: #selector(self.goToLogin))
loginLabel.addGestureRecognizer(tapRegister)
loginLabel.isUserInteractionEnabled = true
}
#objc func goToLogin() {
self.performSegue(withIdentifier: "goToLogin", sender: self)
}
#objc func handleInterruption(notification: Notification) {
guard let userInfo = notification.userInfo,
let typeInt = userInfo[AVAudioSessionInterruptionTypeKey] as? UInt,
let type = AVAudioSession.InterruptionType(rawValue: typeInt) else {
return
}
switch type {
case .began:
if isRecording {
print("OOOOOOO")
audioRecorder.pause()
}
break
case .ended:
guard let optionsValue = userInfo[AVAudioSessionInterruptionOptionKey] as? UInt else { return }
let options = AVAudioSession.InterruptionOptions(rawValue: optionsValue)
if options.contains(.shouldResume) {
audioRecorder.record()
print("AAAAAAA")
} else {
print("III")
DispatchQueue.main.asyncAfter(deadline: .now() + 2.0) {
self.audioRecorder.record()
}
}
default: ()
}
}
func check_record_permission()
{
switch AVAudioSession.sharedInstance().recordPermission {
case AVAudioSessionRecordPermission.granted:
isAudioRecordingGranted = true
break
case AVAudioSessionRecordPermission.denied:
isAudioRecordingGranted = false
break
case AVAudioSessionRecordPermission.undetermined:
AVAudioSession.sharedInstance().requestRecordPermission({ (allowed) in
if allowed {
self.isAudioRecordingGranted = true
} else {
self.isAudioRecordingGranted = false
}
})
break
default:
break
}
}
func checkActivateMicrophone() {
if !isAudioRecordingGranted {
playBTN.isEnabled = false
playBTN.alpha = 0.5
recordBTN.isEnabled = false
recordBTN.alpha = 0.5
principalLabel.text = "Activa el micrófono desde ajustes"
} else {
playBTN.isEnabled = true
playBTN.alpha = 1
recordBTN.isEnabled = true
recordBTN.alpha = 1
principalLabel.text = "¡Prueba las grabaciones!"
}
}
func getDocumentsDirectory() -> URL
{
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
let documentsDirectory = paths[0]
return documentsDirectory
}
func getFileUrl() -> URL
{
let filename = "myRecording.m4a"
let filePath = getDocumentsDirectory().appendingPathComponent(filename)
return filePath
}
func setup_recorder()
{
if isAudioRecordingGranted
{
let session = AVAudioSession.sharedInstance()
do
{
try session.setCategory(AVAudioSession.Category.playAndRecord, options: .defaultToSpeaker)
try session.setActive(true)
let settings = [
AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
AVSampleRateKey: 44100,
AVNumberOfChannelsKey: 2,
AVEncoderAudioQualityKey:AVAudioQuality.high.rawValue
]
audioRecorder = try AVAudioRecorder(url: getFileUrl(), settings: settings)
audioRecorder.delegate = self
audioRecorder.isMeteringEnabled = true
audioRecorder.prepareToRecord()
}
catch let error {
print(error.localizedDescription)
}
}
else
{
print("AAAAA")
}
}
#IBAction func recordAct(_ sender: Any) {
if(isRecording) {
finishAudioRecording(success: true)
recordBTN.setTitle("Record", for: .normal)
playBTN.isEnabled = true
isRecording = false
}
else
{
setup_recorder()
audioRecorder.record()//aaaaaa
recordBTN.setTitle("Stop", for: .normal)
playBTN.isEnabled = false
isRecording = true
}
}
func finishAudioRecording(success: Bool)
{
if success
{
audioRecorder.stop()
audioRecorder = nil
print("recorded successfully.")
}
else
{
print("Recording failed.")
}
}
func prepare_play()
{
do
{
audioPlayer = try AVAudioPlayer(contentsOf: getFileUrl())
audioPlayer.delegate = self
audioPlayer.prepareToPlay()
}
catch{
print("Error")
}
}
#IBAction func playAct(_ sender: Any) {
if(isPlaying)
{
audioPlayer.stop()
recordBTN.isEnabled = true
playBTN.setTitle("Play", for: .normal)
isPlaying = false
}
else
{
if FileManager.default.fileExists(atPath: getFileUrl().path)
{
recordBTN.isEnabled = false
playBTN.setTitle("pause", for: .normal)
prepare_play()
audioPlayer.play()
isPlaying = true
}
else
{
print("Audio file is missing.")
}
}
}
func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully flag: Bool)
{
if !flag
{
print("UUU")
finishAudioRecording(success: false)
}
playBTN.isEnabled = true
}
func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool)
{
recordBTN.isEnabled = true
}
}
I implemented the "Background Mode" of Audio, AirPlay and Picture in Picture

Pausing and Resuming Timer in Swift

I am playing an audio which can be pausing, resuming and stoped I have also a slider which can be use to change the play the audio current point and a label that show duration of the audio and I want to pause the timer when the user pause the audio and I read If I want that I can invalidate and nill the timer then start it again , but the issue with that it will replay the audio from the start, is there is way to start the timer at the last point it was paused?
func startTimer() {
if replayTimer == nil {
replayTimer = Timer.scheduledTimer(timeInterval: 0.1, target: self, selector: #selector(updateSlider), userInfo: nil, repeats: true)
}
}
#objc func updateSlider() {
progressBar.value = Float(audio.audio!.currentTime)
}
#IBAction func playReceiverVoicenote(_ sender: Any) {
if replayTimer == nil {
audioBtn.setImage(#imageLiteral(resourceName: "pause"), for: .normal)
audio.playAudio(filePath: filePath!)
startTimer()
receiverProgressBar.maximumValue = audio.getAudioDuration()
} else if audio.isAudioPlaying() {
audioBtn.setImage(#imageLiteral(resourceName: "playAudio"), for: .normal)
audio.pauseAudio()
replayTimer?.invalidate()
replayTimer = nil
} else {
audioBtn.setImage(#imageLiteral(resourceName: "pause"), for: .normal)
audio.replayAudio()
startTimer()
}
}
func playAudio(filePath:URL){
do {
audio = try AVAudioPlayer(contentsOf: filePath)
audio!.delegate = self
audio!.prepareToPlay()
audio!.volume = 1.0
audio!.play()
} catch {
print(error.localizedDescription)
}
}
func pauseAudio() {
audio!.pause()
}
func replayAudio() {
audio!.play()
}
func stopAudio() {
audio!.stop()
}
When the audio is going to pause save audio.currentTime in a variable and invalidate the timer.
When the audio is going to resume get the saved currentTime, call play(atTime:) passing the time interval and restart the timer
You can also use this Audio Manager in this way:
let player = AudioPlayer()
player.loadAudio(url: URL(string: "myAudioUrl.mp3")!, name: "", img: "")
And the action of the button to play and pause:
if player.isPlaying {
player.pauseAudio()
} else {
player.playAudio { isFinish, player, currentTimeInSec, remainingTimeInSec in
if isFinish {
// Audio finish to play
}
}
}
Who return in block closure:
isFinish: Bool // If the audio has finished playing
player: AVAudioPlayer // The player
currentTimeInSec: Int // The current time in seconds of the audio playing
remainingTimeInsec: Int // The remaining time
AudioPlayer:
import Foundation
import AVFoundation
import MediaPlayer
class AudioPlayer {
var audioPlayer: AVAudioPlayer?
var hasBeenPaused = false
var songName = ""
var songImage = ""
var timer: Timer?
var isPlaying: Bool {
return audioPlayer?.isPlaying ?? false
}
public func loadAudio(url: URL, name: String, img: String) {
songName = name
songImage = img
setupRemoteTransportControls()
do {
audioPlayer = try AVAudioPlayer(contentsOf: url)
audioPlayer?.prepareToPlay()
let audioSession = AVAudioSession.sharedInstance()
do {
try audioSession.setCategory(AVAudioSession.Category.playback, mode: .default)
} catch let sessionError {
print(sessionError)
}
} catch let songPlayerError {
print(songPlayerError)
}
}
public func playAudio(completion: ((_ isFinish: Bool, _ player: AVAudioPlayer, _ currentTimeInSec: Int, _ restTimeInSec: Int) -> ())? = nil) {
guard let audioPlayer = audioPlayer else { return }
audioPlayer.play()
setupNowPlaying()
if timer == nil {
timer = Timer.scheduledTimer(withTimeInterval: 1, repeats: true) { _ in
let currentTime = Int(audioPlayer.currentTime)
let remainingTime = Int(audioPlayer.duration) - Int(currentTime)
if remainingTime == 0 {
completion?(true, audioPlayer, currentTime, remainingTime)
if self.timer != nil {
self.timer!.invalidate()
self.timer = nil
}
} else {
completion?(false, audioPlayer, currentTime, remainingTime)
}
}
}
}
public func pauseAudio() {
guard let audioPlayer = audioPlayer else { return }
if audioPlayer.isPlaying {
audioPlayer.pause()
hasBeenPaused = true
} else {
hasBeenPaused = false
}
setupNowPlaying()
if timer != nil {
timer!.invalidate()
timer = nil
}
}
public func replayAudio() {
guard let audioPlayer = audioPlayer else { return }
if audioPlayer.isPlaying || hasBeenPaused {
audioPlayer.stop()
audioPlayer.currentTime = 0
audioPlayer.play()
} else {
audioPlayer.play()
}
setupNowPlaying()
}
public func stopAudio() {
guard let audioPlayer = audioPlayer else { return }
audioPlayer.stop()
setupNowPlaying()
if timer != nil {
timer!.invalidate()
timer = nil
}
}
func setupRemoteTransportControls() {
let commandCenter = MPRemoteCommandCenter.shared()
commandCenter.previousTrackCommand.isEnabled = false
commandCenter.nextTrackCommand.isEnabled = false
commandCenter.skipBackwardCommand.isEnabled = false
commandCenter.skipForwardCommand.isEnabled = false
commandCenter.playCommand.isEnabled = true
commandCenter.playCommand.addTarget { (event) -> MPRemoteCommandHandlerStatus in
//Update your button here for the play command
self.playAudio()
NotificationCenter.default.post(name: NSNotification.Name(rawValue: "updatedControlCenterAudio"), object: nil)
return .success
}
commandCenter.pauseCommand.isEnabled = true
commandCenter.pauseCommand.addTarget { (event) -> MPRemoteCommandHandlerStatus in
//Update your button here for the pause command
self.pauseAudio()
NotificationCenter.default.post(name: NSNotification.Name(rawValue: "updatedControlCenterAudio"), object: nil)
return .success
}
}
func setupNowPlaying() {
guard let audioPlayer = audioPlayer else { return }
var nowPlayingInfo = [String: Any]()
nowPlayingInfo[MPMediaItemPropertyTitle] = songName
if let image = UIImage(named: songImage) {
nowPlayingInfo[MPMediaItemPropertyArtwork] =
MPMediaItemArtwork(boundsSize: image.size) { size in
return image
}
}
nowPlayingInfo[MPNowPlayingInfoPropertyElapsedPlaybackTime] = audioPlayer.currentTime
nowPlayingInfo[MPMediaItemPropertyPlaybackDuration] = audioPlayer.duration
nowPlayingInfo[MPNowPlayingInfoPropertyPlaybackRate] = audioPlayer.rate
// Set the metadata
MPNowPlayingInfoCenter.default().nowPlayingInfo = nowPlayingInfo
}
}

iOS Swift: Sound recorder Interrupted when switching to or from headphones

I am using the below code to record a 5 second sound sample in an iOS Swift app. The code works fine for recording and also recognised headphone correctly if they are plugged in. However, if during the recording the headphones are added or removed, the recording is interrupted. I wonder why that is as otherwise it runs smoothly?
import AVFoundation
class RecorderViewController: UIViewController {
var recorder: AVAudioRecorder!
var uploadObjectSuccess = false
#IBOutlet weak var recordButton: UIButton!
#IBOutlet weak var statusLabel: UILabel!
#IBOutlet weak var progressView: UIProgressView!
var meterTimer: Timer!
var soundFileURL: URL!
override func viewDidLoad() {
super.viewDidLoad()
setSessionPlayback()
askForNotifications()
checkHeadphones()
}
func updateAudioMeter(_ timer:Timer) {
self.progressView.setProgress(0, animated: false)
if recorder != nil && recorder.isRecording {
let sec = Int(recorder.currentTime.truncatingRemainder(dividingBy: 60))
let s = String(format: "%02d", sec)
statusLabel.text = s
recorder.updateMeters()
self.progressView.setProgress(Float(sec*4), animated: true)
if (sec==6) && recorder != nil {
func aux()->Bool {
recorder.stop()
return true
}
if aux()==true {
if self.soundFileURL != nil {
self.uploadObjectSuccess = false
let path = Auth.auth().currentUser?.uid
let FIRStoragePath = Storage.storage().reference().child(path!).child(FileName!)
let uploadObject = FIRStoragePath.putFile(from: self.soundFileURL!, metadata: nil)
uploadObject.observe(.success) { snapshot in
// Upload completed successfully
self.uploadObjectSuccess = true
}
uploadObject.observe(.failure) { snapshot in
// Upload failed
self.uploadObjectSuccess = true
}
}
}
}
} else if (uploadObjectSuccess==true) {
self.statusLabel.text = "00"
self.recordButton.isEnabled = true
isPlaying = false
self.uploadObjectSuccess = false
} else {
self.statusLabel.text = "00"
}
}
#IBAction func record(_ sender: Any) {
if recorder != nil {
recorder.stop()
}
recordButton.isEnabled = false
recordWithPermission(true)
}
func setupRecorder() {
let documentsDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
self.soundFileURL = documentsDirectory.appendingPathComponent(self.currentFileName) //appendingPathComponent(currentFileName)
if FileManager.default.fileExists(atPath: soundFileURL.absoluteString) {
do {
try FileManager.default.removeItem(atPath: self.soundFileURL.absoluteString)
} catch let error as NSError {
print("Ooops! Something went wrong: \(error)")
}
}
let recordSettings:[String : AnyObject] = [
AVFormatIDKey: NSNumber(value:(kAudioFormatMPEG4AAC)),
AVEncoderAudioQualityKey : NSNumber(value:AVAudioQuality.min.rawValue),
//AVEncoderBitRateKey : NSNumber(value:16),
AVNumberOfChannelsKey: NSNumber(value:1),
AVSampleRateKey : NSNumber(value:16000.0)
]
do {
recorder = try AVAudioRecorder(url: soundFileURL, settings: recordSettings)
recorder.delegate = self
recorder.isMeteringEnabled = true
recorder.prepareToRecord() // creates/overwrites the file at soundFileURL
} catch let error as NSError {
recorder = nil
print(error.localizedDescription)
}
}
func recordWithPermission(_ setup:Bool) {
checkHeadphones()
setSessionPlayback()
askForNotifications()
let session:AVAudioSession = AVAudioSession.sharedInstance()
// ios 8 and later
if (session.responds(to: #selector(AVAudioSession.requestRecordPermission(_:)))) {
AVAudioSession.sharedInstance().requestRecordPermission({(granted: Bool)-> Void in
if granted {
print("Permission to record granted")
self.setSessionPlayAndRecord()
if setup {
self.setupRecorder()
}
self.recorder.record()
self.meterTimer = Timer.scheduledTimer(timeInterval: 0.1,
target:self,
selector:#selector(RecorderViewController.updateAudioMeter(_:)),
userInfo:nil,
repeats:true)
} else {
print("Permission to record not granted")
self.statusLabel.text = "00"
self.recordButton.isEnabled = true
self.uploadObjectSuccess = false
self.recorder.stop()
}
})
} else {
print("requestRecordPermission unrecognized")
self.statusLabel.text = "00"
self.recordButton.isEnabled = true
self.uploadObjectSuccess = false
self.recorder.stop()
}
}
func setSessionPlayback() {
let session:AVAudioSession = AVAudioSession.sharedInstance()
do {
try session.setCategory(AVAudioSessionCategoryPlayback)
} catch let error as NSError {
print("could not set session category")
print(error.localizedDescription)
}
do {
try session.setActive(true)
} catch let error as NSError {
print("could not make session active")
print(error.localizedDescription)
}
}
func setSessionPlayAndRecord() {
let session = AVAudioSession.sharedInstance()
do {
try session.setCategory(AVAudioSessionCategoryPlayAndRecord)
} catch let error as NSError {
print("could not set session category")
print(error.localizedDescription)
}
do {
try session.setActive(true)
} catch let error as NSError {
print("could not make session active")
print(error.localizedDescription)
}
}
func askForNotifications() {
NotificationCenter.default.addObserver(self,
selector:#selector(RecorderViewController.background(_:)),
name:NSNotification.Name.UIApplicationWillResignActive,
object:nil)
NotificationCenter.default.addObserver(self,
selector:#selector(RecorderViewController.foreground(_:)),
name:NSNotification.Name.UIApplicationWillEnterForeground,
object:nil)
NotificationCenter.default.addObserver(self,
selector:#selector(RecorderViewController.routeChange(_:)),
name:NSNotification.Name.AVAudioSessionRouteChange,
object:nil)
}
#objc func background(_ notification:Notification) {
print("background")
if recorder != nil {
recorder.stop()
}
}
#objc func foreground(_ notification:Notification) {
print("foreground")
}
#objc func routeChange(_ notification:Notification) {
print("routeChange \(String(describing: (notification as NSNotification).userInfo))")
if let userInfo = (notification as NSNotification).userInfo {
//print("userInfo \(userInfo)")
if let reason = userInfo[AVAudioSessionRouteChangeReasonKey] as? UInt {
//print("reason \(reason)")
switch AVAudioSessionRouteChangeReason(rawValue: reason)! {
case AVAudioSessionRouteChangeReason.newDeviceAvailable:
print("NewDeviceAvailable")
print("did you plug in headphones?")
checkHeadphones()
case AVAudioSessionRouteChangeReason.oldDeviceUnavailable:
print("OldDeviceUnavailable")
print("did you unplug headphones?")
checkHeadphones()
case AVAudioSessionRouteChangeReason.categoryChange:
print("CategoryChange")
case AVAudioSessionRouteChangeReason.override:
print("Override")
case AVAudioSessionRouteChangeReason.wakeFromSleep:
print("WakeFromSleep")
case AVAudioSessionRouteChangeReason.unknown:
print("Unknown")
case AVAudioSessionRouteChangeReason.noSuitableRouteForCategory:
print("NoSuitableRouteForCategory")
case AVAudioSessionRouteChangeReason.routeConfigurationChange:
print("RouteConfigurationChange")
}
}
}
}
func checkHeadphones() {
// check NewDeviceAvailable and OldDeviceUnavailable for them being plugged in/unplugged
let currentRoute = AVAudioSession.sharedInstance().currentRoute
if currentRoute.outputs.count > 0 {
for description in currentRoute.outputs {
if description.portType == AVAudioSessionPortHeadphones {
print("headphones are plugged in")
break
} else {
print("headphones are unplugged")
}
}
} else {
print("checking headphones requires a connection to a device")
}
}
}
// MARK: AVAudioRecorderDelegate
extension RecorderViewController : AVAudioRecorderDelegate {
func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder,
successfully flag: Bool) {
print("finished recording \(flag)")
}
func audioRecorderEncodeErrorDidOccur(_ recorder: AVAudioRecorder,
error: Error?) {
if let e = error {
print("\(e.localizedDescription)")
}
}
}

iOS Swift ImagePickerController: Improving Edit Mode

I have an ImagePickerController with allowsEditing = true. But I want to have more options in the edit screen, not just the resize mode. How can I improve it?
There is nothing you can do to improve native image/video editing screen. If you need to have more options, you will need to create your own controller that will accept raw camera output and do its editing on that.
I agree with #Lope you can make your own by gathering the camera input:
import UIKit
import AVFoundation
import AssetsLibrary
class ViewController: UIViewController, AVCaptureFileOutputRecordingDelegate {
var SessionRunningAndDeviceAuthorizedContext = "SessionRunningAndDeviceAuthorizedContext"
var CapturingStillImageContext = "CapturingStillImageContext"
#available(iOS 4.0, *)
public func fileOutput(_ captureOutput: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
}
// MARK: property
var sessionQueue: DispatchQueue!
var session: AVCaptureSession?
var videoDeviceInput: AVCaptureDeviceInput?
#objc var stillImageOutput: AVCaptureStillImageOutput?
var deviceAuthorized: Bool = false
var backgroundRecordId: UIBackgroundTaskIdentifier = UIBackgroundTaskInvalid
var sessionRunningAndDeviceAuthorized: Bool {
get {
return (self.session?.isRunning != nil && self.deviceAuthorized )
}
}
var runtimeErrorHandlingObserver: AnyObject?
var lockInterfaceRotation: Bool = false
#IBOutlet weak var previewView: AVCamPreviewView!
#IBOutlet weak var snapButton: UIButton!
#IBOutlet weak var cameraButton: UIButton!
var snappedimage = UIImage()
override func viewDidLoad() {
super.viewDidLoad()
let session: AVCaptureSession = AVCaptureSession()
session.sessionPreset = AVCaptureSession.Preset.photo
self.session = session
self.previewView.session = session
self.checkDeviceAuthorizationStatus()
let sessionQueue: DispatchQueue = DispatchQueue(label: "KehoeGames.whatever",attributes: [])
self.sessionQueue = sessionQueue
sessionQueue.async {
self.backgroundRecordId = UIBackgroundTaskInvalid
let videoDevice: AVCaptureDevice! = ViewController.deviceWithMediaType(AVMediaType.video.rawValue, preferringPosition: AVCaptureDevice.Position.back)
var error: NSError? = nil
var videoDeviceInput: AVCaptureDeviceInput?
do {
videoDeviceInput = try AVCaptureDeviceInput(device: videoDevice)
} catch let error1 as NSError {
error = error1
videoDeviceInput = nil
} catch {
fatalError()
}
if (error != nil) {
print(error)
let alert = UIAlertController(title: "Error", message: error!.localizedDescription
, preferredStyle: .alert)
alert.addAction(UIAlertAction(title: "OK", style: .default, handler: nil))
self.present(alert, animated: true, completion: nil)
}
if session.canAddInput(videoDeviceInput!){
session.addInput(videoDeviceInput!)
self.videoDeviceInput = videoDeviceInput
DispatchQueue.main.async {
let orientation: AVCaptureVideoOrientation = AVCaptureVideoOrientation(rawValue: UIDevice.current.orientation.rawValue)!
(self.previewView.layer as! AVCaptureVideoPreviewLayer).connection?.videoOrientation = .portrait
(self.previewView.layer as! AVCaptureVideoPreviewLayer).videoGravity = AVLayerVideoGravity.resizeAspectFill
}
}
//Audio
let stillImageOutput: AVCaptureStillImageOutput = AVCaptureStillImageOutput()
if session.canAddOutput(stillImageOutput) {
stillImageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
session.addOutput(stillImageOutput)
self.stillImageOutput = stillImageOutput
}
}
}
override func viewWillAppear(_ animated: Bool) {
self.sessionQueue.async {
self.addObserver(self, forKeyPath: "sessionRunningAndDeviceAuthorized", options: [.old , .new] , context: &SessionRunningAndDeviceAuthorizedContext)
self.addObserver(self, forKeyPath: "stillImageOutput.capturingStillImage", options:[.old , .new], context: &CapturingStillImageContext)
NotificationCenter.default.addObserver(self, selector: #selector(ViewController.subjectAreaDidChange(_:)), name: NSNotification.Name.AVCaptureDeviceSubjectAreaDidChange, object: self.videoDeviceInput?.device)
self.runtimeErrorHandlingObserver = NotificationCenter.default.addObserver(forName: NSNotification.Name.AVCaptureSessionRuntimeError, object: self.session, queue: nil) {
(note: Notification?) in
self.sessionQueue.async { [unowned self] in
if let sess = self.session {
sess.startRunning()
}
}
}
self.session?.startRunning()
}
}
override func viewWillDisappear(_ animated: Bool) {
self.sessionQueue.async {
if let sess = self.session {
sess.stopRunning()
NotificationCenter.default.removeObserver(self, name: NSNotification.Name.AVCaptureDeviceSubjectAreaDidChange, object: self.videoDeviceInput?.device)
NotificationCenter.default.removeObserver(self.runtimeErrorHandlingObserver!)
self.removeObserver(self, forKeyPath: "sessionRunningAndDeviceAuthorized", context: &SessionRunningAndDeviceAuthorizedContext)
self.removeObserver(self, forKeyPath: "stillImageOutput.capturingStillImage", context: &CapturingStillImageContext)
}
}
}
override var shouldAutorotate : Bool {
return !self.lockInterfaceRotation
}
override func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey : Any]?, context: UnsafeMutableRawPointer?) {
if context == &CapturingStillImageContext{
let isCapturingStillImage: Bool = (change![NSKeyValueChangeKey.newKey]! as AnyObject).boolValue
if isCapturingStillImage {
self.runStillImageCaptureAnimation()
}
}else {
return super.observeValue(forKeyPath: keyPath, of: object, change: change, context: context)
}
}
#objc func subjectAreaDidChange(_ notification: Notification){
let devicePoint: CGPoint = CGPoint(x: 0.5, y: 0.5)
self.focusWithMode(AVCaptureDevice.FocusMode.continuousAutoFocus, exposureMode: AVCaptureDevice.ExposureMode.continuousAutoExposure, point: devicePoint, monitorSubjectAreaChange: false)
}
// MARK: Custom Function
func focusWithMode(_ focusMode:AVCaptureDevice.FocusMode, exposureMode:AVCaptureDevice.ExposureMode, point:CGPoint, monitorSubjectAreaChange:Bool){
self.sessionQueue.async {
let device: AVCaptureDevice! = self.videoDeviceInput!.device
do {
try device.lockForConfiguration()
if device.isFocusPointOfInterestSupported && device.isFocusModeSupported(focusMode){
device.focusMode = focusMode
device.focusPointOfInterest = point
}
if device.isExposurePointOfInterestSupported && device.isExposureModeSupported(exposureMode){
device.exposurePointOfInterest = point
device.exposureMode = exposureMode
}
device.isSubjectAreaChangeMonitoringEnabled = monitorSubjectAreaChange
device.unlockForConfiguration()
} catch {
print(error)
}
}
}
class func setFlashMode(_ flashMode: AVCaptureDevice.FlashMode, device: AVCaptureDevice){
if device.hasFlash && device.isFlashModeSupported(flashMode) {
var error: NSError? = nil
do {
try device.lockForConfiguration()
device.flashMode = flashMode
device.unlockForConfiguration()
} catch let error1 as NSError {
error = error1
print(error)
}
}
}
func runStillImageCaptureAnimation(){
DispatchQueue.main.async {
self.previewView.layer.opacity = 0.0
UIView.animate(withDuration: 0.25, animations: {
self.previewView.layer.opacity = 1.0
})
}
}
class func deviceWithMediaType(_ mediaType: String, preferringPosition:AVCaptureDevice.Position) -> AVCaptureDevice? {
var devices = AVCaptureDevice.devices(for: AVMediaType(rawValue: mediaType));
if (devices.isEmpty) {
print("This device has no camera. Probably the simulator.")
return nil
} else {
var captureDevice: AVCaptureDevice = devices[0]
for device in devices {
if (device as AnyObject).position == preferringPosition {
captureDevice = device
break
}
}
return captureDevice
}
}
func checkDeviceAuthorizationStatus(){
let mediaType:String = AVMediaType.video.rawValue;
AVCaptureDevice.requestAccess(for: AVMediaType(rawValue: mediaType)) { (granted: Bool) in
if granted {
self.deviceAuthorized = true;
} else {
DispatchQueue.main.async {
let alert: UIAlertController = UIAlertController(
title: "AVCam",
message: "AVCam does not have permission to access camera",
preferredStyle: UIAlertControllerStyle.alert)
let action = UIAlertAction(title: "OK", style: .default) { _ in }
alert.addAction(action)
self.present(alert, animated: true, completion: nil)
}
self.deviceAuthorized = false;
}
}
}
var snaptrue = false
#IBAction func snapStillImage(_ sender: AnyObject) {
if snaptrue == false {
snaptrue = true
self.sessionQueue.async {
// Update the orientation on the still image output video connection before capturing.
let videoOrientation = (self.previewView.layer as! AVCaptureVideoPreviewLayer).connection?.videoOrientation
self.stillImageOutput!.connection(with: AVMediaType.video)?.videoOrientation = videoOrientation!
// Flash set to Auto for Still Capture
MeChoosePic.setFlashMode(AVCaptureDevice.FlashMode.off, device: self.videoDeviceInput!.device)
self.stillImageOutput!.captureStillImageAsynchronously(from: self.stillImageOutput!.connection(with:AVMediaType.video)!, completionHandler: { (imageDataSampleBuffer: CMSampleBuffer?, error: Error?) in
if error == nil {
let data:Data = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(imageDataSampleBuffer!)!
//PNG
let imagesnap:UIImage = UIImage(data: data)!
self.snappedimage = imagesnap
Timer.scheduledTimer(timeInterval: 1.3, target: self, selector: #selector(self.move), userInfo: nil, repeats: false)
} else {
print(error)
}
})
}
}
}
#IBAction func changeCamera(_ sender: AnyObject) {
self.cameraButton.isEnabled = false
self.snapButton.isEnabled = false
self.sessionQueue.async {
let currentVideoDevice:AVCaptureDevice = self.videoDeviceInput!.device
let currentPosition: AVCaptureDevice.Position = currentVideoDevice.position
var preferredPosition: AVCaptureDevice.Position = AVCaptureDevice.Position.unspecified
switch currentPosition {
case AVCaptureDevice.Position.front:
preferredPosition = AVCaptureDevice.Position.back
case AVCaptureDevice.Position.back:
preferredPosition = AVCaptureDevice.Position.front
case AVCaptureDevice.Position.unspecified:
preferredPosition = AVCaptureDevice.Position.back
}
guard let device:AVCaptureDevice? = MeChoosePic.deviceWithMediaType(AVMediaType.video.rawValue, preferringPosition: preferredPosition) else {
print("there is no AVCapture Device")
return
}
var videoDeviceInput: AVCaptureDeviceInput?
do {
videoDeviceInput = try AVCaptureDeviceInput(device: device!)
} catch _ as NSError {
videoDeviceInput = nil
} catch {
fatalError()
}
self.session!.beginConfiguration()
self.session!.removeInput(self.videoDeviceInput!)
if self.session!.canAddInput(videoDeviceInput!) {
NotificationCenter.default.removeObserver(self, name:NSNotification.Name.AVCaptureDeviceSubjectAreaDidChange, object:currentVideoDevice)
ViewController.setFlashMode(AVCaptureDevice.FlashMode.auto, device: device!)
NotificationCenter.default.addObserver(self, selector: #selector(ViewController.subjectAreaDidChange(_:)), name: NSNotification.Name.AVCaptureDeviceSubjectAreaDidChange, object: device)
self.session!.addInput(videoDeviceInput!)
self.videoDeviceInput = videoDeviceInput
} else {
self.session!.addInput(self.videoDeviceInput!)
}
self.session!.commitConfiguration()
DispatchQueue.main.async {
self.snapButton.isEnabled = true
self.cameraButton.isEnabled = true
}
}
}
#IBAction func focusAndExposeTap(_ gestureRecognizer: UIGestureRecognizer) {
let devicePoint: CGPoint = (self.previewView.layer as! AVCaptureVideoPreviewLayer).captureDevicePointConverted(fromLayerPoint: gestureRecognizer.location(in: gestureRecognizer.view))
self.focusWithMode(AVCaptureDevice.FocusMode.autoFocus, exposureMode: AVCaptureDevice.ExposureMode.autoExpose, point: devicePoint, monitorSubjectAreaChange: true)
}
And in AVCamPreviewView:
import Foundation
import UIKit
import AVFoundation
class AVCamPreviewView: UIView {
var session: AVCaptureSession? {
get {
return (self.layer as! AVCaptureVideoPreviewLayer).session
}
set (session) {
(self.layer as! AVCaptureVideoPreviewLayer).session = session
}
}
override class var layerClass : AnyClass {
return AVCaptureVideoPreviewLayer.self
}
}
Do you also need the option of selecting picture already taking in the camera roll? - You just need to create a Collection View and fetch all the images to that. Just let me know!
Niall

Error when trying to use UISlider to change rate in Swift audio app

I am getting an error that reads Thread 1: EX_BAD_INSTRUCTION(code=EXC_1386_INVOP, subcode=0x0). Specifically, this line, player.rate = sliderValue.value is being flagged.
// RecorderViewController.swift
import UIKit
import AVFoundation
/**
Uses AVAudioRecorder to record a sound file and an AVAudioPlayer to play it back.
*/
class RecorderViewController: UIViewController {
var recorder: AVAudioRecorder!
var player:AVAudioPlayer!
#IBOutlet var recordButton: UIButton!
#IBOutlet var stopButton: UIButton!
#IBOutlet var playButton: UIButton!
#IBOutlet var statusLabel: UILabel!
#IBOutlet weak var sliderValue: UISlider!
var meterTimer:NSTimer!
var soundFileURL:NSURL!
override func viewDidLoad() {
super.viewDidLoad()
stopButton.enabled = false
playButton.enabled = false
setSessionPlayback()
askForNotifications()
checkHeadphones()
}
func updateAudioMeter(timer:NSTimer) {
if recorder.recording {
let min = Int(recorder.currentTime / 60)
let sec = Int(recorder.currentTime % 60)
let s = String(format: "%02d:%02d", min, sec)
statusLabel.text = s
recorder.updateMeters()
// if you want to draw some graphics...
//var apc0 = recorder.averagePowerForChannel(0)
//var peak0 = recorder.peakPowerForChannel(0)
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
recorder = nil
player = nil
}
#IBAction func removeAll(sender: AnyObject) {
deleteAllRecordings()
}
#IBAction func record(sender: UIButton) {
if player != nil && player.playing {
player.stop()
}
if recorder == nil {
print("recording. recorder nil")
recordButton.setTitle("Pause", forState:.Normal)
playButton.enabled = false
stopButton.enabled = true
recordWithPermission(true)
return
}
if recorder != nil && recorder.recording {
print("pausing")
recorder.pause()
recordButton.setTitle("Continue", forState:.Normal)
} else {
print("recording")
recordButton.setTitle("Pause", forState:.Normal)
playButton.enabled = false
stopButton.enabled = true
// recorder.record()
recordWithPermission(false)
}
}
#IBAction func stop(sender: UIButton) {
print("stop")
recorder?.stop()
player?.stop()
meterTimer.invalidate()
recordButton.setTitle("Record", forState:.Normal)
let session = AVAudioSession.sharedInstance()
do {
try session.setActive(false)
playButton.enabled = true
stopButton.enabled = false
recordButton.enabled = true
} catch let error as NSError {
print("could not make session inactive")
print(error.localizedDescription)
}
//recorder = nil
}
#IBAction func play(sender: UIButton) {
setSessionPlayback()
play()
}
func play() {
var url:NSURL?
if self.recorder != nil {
url = self.recorder.url
} else {
url = self.soundFileURL!
}
print("playing \(url)")
do {
self.player = try AVAudioPlayer(contentsOfURL: url!)
stopButton.enabled = true
player.enableRate = true
player.delegate = self
player.prepareToPlay()
player.volume = 1.0
player.play()
} catch let error as NSError {
self.player = nil
print(error.localizedDescription)
}
}
#IBAction func slideChange(sender: AnyObject) {
player.rate = sliderValue.value
}
func setupRecorder() {
let format = NSDateFormatter()
format.dateFormat="yyyy-MM-dd-HH-mm-ss"
let currentFileName = "recording-\(format.stringFromDate(NSDate())).m4a"
print(currentFileName)
let documentsDirectory = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)[0]
self.soundFileURL = documentsDirectory.URLByAppendingPathComponent(currentFileName)
if NSFileManager.defaultManager().fileExistsAtPath(soundFileURL.absoluteString) {
// probably won't happen. want to do something about it?
print("soundfile \(soundFileURL.absoluteString) exists")
}
let recordSettings:[String : AnyObject] = [
AVFormatIDKey: NSNumber(unsignedInt:kAudioFormatAppleLossless),
AVEncoderAudioQualityKey : AVAudioQuality.Max.rawValue,
AVEncoderBitRateKey : 320000,
AVNumberOfChannelsKey: 2,
AVSampleRateKey : 44100.0
]
do {
recorder = try AVAudioRecorder(URL: soundFileURL, settings: recordSettings)
recorder.delegate = self
recorder.meteringEnabled = true
recorder.prepareToRecord() // creates/overwrites the file at soundFileURL
} catch let error as NSError {
recorder = nil
print(error.localizedDescription)
}
}
func recordWithPermission(setup:Bool) {
let session:AVAudioSession = AVAudioSession.sharedInstance()
// ios 8 and later
if (session.respondsToSelector("requestRecordPermission:")) {
AVAudioSession.sharedInstance().requestRecordPermission({(granted: Bool)-> Void in
if granted {
print("Permission to record granted")
self.setSessionPlayAndRecord()
if setup {
self.setupRecorder()
}
self.recorder.record()
self.meterTimer = NSTimer.scheduledTimerWithTimeInterval(0.1,
target:self,
selector:"updateAudioMeter:",
userInfo:nil,
repeats:true)
} else {
print("Permission to record not granted")
}
})
} else {
print("requestRecordPermission unrecognized")
}
}
func setSessionPlayback() {
let session:AVAudioSession = AVAudioSession.sharedInstance()
do {
try session.setCategory(AVAudioSessionCategoryPlayback)
} catch let error as NSError {
print("could not set session category")
print(error.localizedDescription)
}
do {
try session.setActive(true)
} catch let error as NSError {
print("could not make session active")
print(error.localizedDescription)
}
}
func setSessionPlayAndRecord() {
let session = AVAudioSession.sharedInstance()
do {
try session.setCategory(AVAudioSessionCategoryPlayAndRecord)
} catch let error as NSError {
print("could not set session category")
print(error.localizedDescription)
}
do {
try session.setActive(true)
} catch let error as NSError {
print("could not make session active")
print(error.localizedDescription)
}
}
func deleteAllRecordings() {
let docsDir =
NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0]
let fileManager = NSFileManager.defaultManager()
do {
let files = try fileManager.contentsOfDirectoryAtPath(docsDir)
var recordings = files.filter( { (name: String) -> Bool in
return name.hasSuffix("m4a")
})
for var i = 0; i < recordings.count; i++ {
let path = docsDir + "/" + recordings[i]
print("removing \(path)")
do {
try fileManager.removeItemAtPath(path)
} catch let error as NSError {
NSLog("could not remove \(path)")
print(error.localizedDescription)
}
}
} catch let error as NSError {
print("could not get contents of directory at \(docsDir)")
print(error.localizedDescription)
}
}
func askForNotifications() {
NSNotificationCenter.defaultCenter().addObserver(self,
selector:"background:",
name:UIApplicationWillResignActiveNotification,
object:nil)
NSNotificationCenter.defaultCenter().addObserver(self,
selector:"foreground:",
name:UIApplicationWillEnterForegroundNotification,
object:nil)
NSNotificationCenter.defaultCenter().addObserver(self,
selector:"routeChange:",
name:AVAudioSessionRouteChangeNotification,
object:nil)
}
func background(notification:NSNotification) {
print("background")
}
func foreground(notification:NSNotification) {
print("foreground")
}
func routeChange(notification:NSNotification) {
print("routeChange \(notification.userInfo)")
if let userInfo = notification.userInfo {
//print("userInfo \(userInfo)")
if let reason = userInfo[AVAudioSessionRouteChangeReasonKey] as? UInt {
//print("reason \(reason)")
switch AVAudioSessionRouteChangeReason(rawValue: reason)! {
case AVAudioSessionRouteChangeReason.NewDeviceAvailable:
print("NewDeviceAvailable")
print("did you plug in headphones?")
checkHeadphones()
case AVAudioSessionRouteChangeReason.OldDeviceUnavailable:
print("OldDeviceUnavailable")
print("did you unplug headphones?")
checkHeadphones()
case AVAudioSessionRouteChangeReason.CategoryChange:
print("CategoryChange")
case AVAudioSessionRouteChangeReason.Override:
print("Override")
case AVAudioSessionRouteChangeReason.WakeFromSleep:
print("WakeFromSleep")
case AVAudioSessionRouteChangeReason.Unknown:
print("Unknown")
case AVAudioSessionRouteChangeReason.NoSuitableRouteForCategory:
print("NoSuitableRouteForCategory")
case AVAudioSessionRouteChangeReason.RouteConfigurationChange:
print("RouteConfigurationChange")
}
}
}
}
func checkHeadphones() {
// check NewDeviceAvailable and OldDeviceUnavailable for them being plugged in/unplugged
let currentRoute = AVAudioSession.sharedInstance().currentRoute
if currentRoute.outputs.count > 0 {
for description in currentRoute.outputs {
if description.portType == AVAudioSessionPortHeadphones {
print("headphones are plugged in")
break
} else {
print("headphones are unplugged")
}
}
} else {
print("checking headphones requires a connection to a device")
}
}
#IBAction
func trim() {
if self.soundFileURL == nil {
print("no sound file")
return
}
print("trimming \(soundFileURL!.absoluteString)")
print("trimming path \(soundFileURL!.lastPathComponent)")
let asset = AVAsset(URL:self.soundFileURL!)
exportAsset(asset, fileName: "trimmed.m4a")
}
func exportAsset(asset:AVAsset, fileName:String) {
let documentsDirectory = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)[0]
let trimmedSoundFileURL = documentsDirectory.URLByAppendingPathComponent(fileName)
print("saving to \(trimmedSoundFileURL.absoluteString)")
if NSFileManager.defaultManager().fileExistsAtPath(trimmedSoundFileURL.absoluteString) {
print("sound exists, removing \(trimmedSoundFileURL.absoluteString)")
do {
var error:NSError?
if trimmedSoundFileURL.checkResourceIsReachableAndReturnError(&error) {
print("is reachable")
}
if let e = error {
print(e.localizedDescription)
}
try NSFileManager.defaultManager().removeItemAtPath(trimmedSoundFileURL.absoluteString)
} catch let error as NSError {
NSLog("could not remove \(trimmedSoundFileURL)")
print(error.localizedDescription)
}
}
if let exporter = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetAppleM4A) {
exporter.outputFileType = AVFileTypeAppleM4A
exporter.outputURL = trimmedSoundFileURL
let duration = CMTimeGetSeconds(asset.duration)
if (duration < 5.0) {
print("sound is not long enough")
return
}
// e.g. the first 5 seconds
let startTime = CMTimeMake(0, 1)
let stopTime = CMTimeMake(5, 1)
exporter.timeRange = CMTimeRangeFromTimeToTime(startTime, stopTime)
// do it
exporter.exportAsynchronouslyWithCompletionHandler({
switch exporter.status {
case AVAssetExportSessionStatus.Failed:
if let e = exporter.error {
print("export failed \(e)")
switch e.code {
case AVError.FileAlreadyExists.rawValue:
print("File Exists")
break
default: break
}
} else {
print("export failed")
}
case AVAssetExportSessionStatus.Cancelled:
print("export cancelled \(exporter.error)")
default:
print("export complete")
}
})
}
}
#IBAction
func speed() {
let asset = AVAsset(URL:self.soundFileURL!)
exportSpeedAsset(asset, fileName: "trimmed.m4a")
}
func exportSpeedAsset(asset:AVAsset, fileName:String) {
let documentsDirectory = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)[0]
let trimmedSoundFileURL = documentsDirectory.URLByAppendingPathComponent(fileName)
let filemanager = NSFileManager.defaultManager()
if filemanager.fileExistsAtPath(trimmedSoundFileURL.absoluteString) {
print("sound exists")
}
if let exporter = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetAppleM4A) {
exporter.outputFileType = AVFileTypeAppleM4A
exporter.outputURL = trimmedSoundFileURL
exporter.audioTimePitchAlgorithm = AVAudioTimePitchAlgorithmVarispeed
let duration = CMTimeGetSeconds(asset.duration)
if (duration < 5.0) {
print("sound is not long enough")
return
}
// do it
exporter.exportAsynchronouslyWithCompletionHandler({
switch exporter.status {
case AVAssetExportSessionStatus.Failed:
print("export failed \(exporter.error)")
case AVAssetExportSessionStatus.Cancelled:
print("export cancelled \(exporter.error)")
default:
print("export complete")
}
})
}
}
}
// MARK: AVAudioRecorderDelegate
extension RecorderViewController : AVAudioRecorderDelegate {
func audioRecorderDidFinishRecording(recorder: AVAudioRecorder,
successfully flag: Bool) {
print("finished recording \(flag)")
stopButton.enabled = false
playButton.enabled = true
recordButton.setTitle("Record", forState:.Normal)
// iOS8 and later
let alert = UIAlertController(title: "Recorder",
message: "Finished Recording",
preferredStyle: .Alert)
alert.addAction(UIAlertAction(title: "Keep", style: .Default, handler: {action in
print("keep was tapped")
}))
alert.addAction(UIAlertAction(title: "Delete", style: .Default, handler: {action in
print("delete was tapped")
self.recorder.deleteRecording()
}))
self.presentViewController(alert, animated:true, completion:nil)
}
func audioRecorderEncodeErrorDidOccur(recorder: AVAudioRecorder,
error: NSError?) {
if let e = error {
print("\(e.localizedDescription)")
}
}
}
// MARK: AVAudioPlayerDelegate
extension RecorderViewController : AVAudioPlayerDelegate {
func audioPlayerDidFinishPlaying(player: AVAudioPlayer, successfully flag: Bool) {
print("finished playing \(flag)")
recordButton.enabled = true
stopButton.enabled = false
}
func audioPlayerDecodeErrorDidOccur(player: AVAudioPlayer, error: NSError?) {
if let e = error {
print("\(e.localizedDescription)")
}
}
}
reviewing your code I think that you should check first if player is not nil
something like this
#IBAction func slideChange(sender: AnyObject) {
if(player != nil)
{
player.rate = sliderValue.value
}
}
I hope this helps you!

Resources