Swift audio recording and tableview display - ios

I am having trouble recording audio and displaying it in a tableview. I am able to record and immediately play it back, but the audio doesn't seem to actually be stored to the device permanently, so I am unable to call it from the tableview. The directory also seems to change each time the app is open. How can I correct my code for permanent save and recall when populating tableview rows?
func record() {
let audioSession:AVAudioSession = AVAudioSession.sharedInstance()
if (audioSession.respondsToSelector("requestRecordPermission:")) {
AVAudioSession.sharedInstance().requestRecordPermission({(granted: Bool)-> Void in
if granted {
print("granted")
try! audioSession.setCategory(AVAudioSessionCategoryPlayAndRecord)
try! audioSession.setActive(true)
let documentsDirectory = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0]
let fullPath = (documentsDirectory as NSString).stringByAppendingPathComponent("Mobile.PCM")
let url = NSURL.fileURLWithPath(fullPath)
print(fullPath)
let settings: [String : AnyObject] = [
AVFormatIDKey:Int(kAudioFormatAppleIMA4),
AVSampleRateKey:44100.0,
AVNumberOfChannelsKey:2,
AVEncoderBitRateKey:12800,
AVLinearPCMBitDepthKey:16,
AVEncoderAudioQualityKey:AVAudioQuality.Max.rawValue
]
try! self.audioRecorder = AVAudioRecorder(URL: url, settings: settings)
self.audioRecorder.meteringEnabled = true
self.audioRecorder.record()
} else{
print("not granted")
}
})
}
}

I am able to record and save with this:
#IBAction func record(sender: UIButton) {
if player != nil && player.playing {
player.stop()
}
if recorder == nil {
print("recording. recorder nil")
// recordButton.setTitle("Pause", forState:.Normal)
playButton.enabled = false
stopButton.enabled = true
recordWithPermission(true)
return
}
if recorder != nil && recorder.recording {
print("pausing")
recorder.pause()
recordButton.setTitle("Continue", forState:.Normal)
} else {
print("recording")
// recordButton.setTitle("Pause", forState:.Normal)
playButton.enabled = false
stopButton.enabled = true
recordWithPermission(false)
}
}
func setupRecorder() {
let format = NSDateFormatter()
format.dateFormat="yyyy-MM-dd-HH-mm-ss"
let currentFileName = "recording-\(format.stringFromDate(NSDate())).caf"
print(currentFileName)
let documentsDirectory = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)[0]
self.soundFileURL = documentsDirectory.URLByAppendingPathComponent(currentFileName)
if NSFileManager.defaultManager().fileExistsAtPath(soundFileURL.absoluteString) {
print("soundfile \(soundFileURL.absoluteString) exists")
}
let recordSettings:[String : AnyObject] = [
AVFormatIDKey: NSNumber(unsignedInt:kAudioFormatAppleLossless),
AVEncoderAudioQualityKey : AVAudioQuality.Max.rawValue,
AVEncoderBitRateKey : 320000,
AVNumberOfChannelsKey: 2,
AVSampleRateKey : 44100.0
]
do {
recorder = try AVAudioRecorder(URL: soundFileURL, settings: recordSettings)
recorder.delegate = self
recorder.meteringEnabled = true
recorder.prepareToRecord() soundFileURL
} catch let error as NSError {
recorder = nil
print(error.localizedDescription)
}
}
func recordWithPermission(setup:Bool) {
let session:AVAudioSession = AVAudioSession.sharedInstance()
if (session.respondsToSelector("requestRecordPermission:")) {
AVAudioSession.sharedInstance().requestRecordPermission({(granted: Bool)-> Void in
if granted {
print("Permission to record granted")
self.setSessionPlayAndRecord()
if setup {
self.setupRecorder()
}
self.recorder.record()
self.meterTimer = NSTimer.scheduledTimerWithTimeInterval(0.1,
target:self,
selector:"updateAudioMeter:",
userInfo:nil,
repeats:true)
} else {
print("Permission to record not granted")
}
})
} else {
print("requestRecordPermission unrecognized")
}
}
I am able to recall it in a TableView with this:
override func viewDidLoad() {
super.viewDidLoad()
tableView.reloadData()
listRecordings()
}
func tableView(tableView: UITableView, cellForRowAtIndexPath indexPath: NSIndexPath) -> UITableViewCell {
let cell: UITableViewCell = self.tableView.dequeueReusableCellWithIdentifier("cell")!
cell.textLabel!.text = recordings[indexPath.row].lastPathComponent
return cell
}
func listRecordings() {
let documentsDirectory = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)[0]
do {
let urls = try NSFileManager.defaultManager().contentsOfDirectoryAtURL(documentsDirectory, includingPropertiesForKeys: nil, options: NSDirectoryEnumerationOptions.SkipsHiddenFiles)
self.recordings = urls.filter( { (name: NSURL) -> Bool in
return name.lastPathComponent!.hasSuffix("caf")
})
} catch let error as NSError {
print(error.localizedDescription)
} catch {
print("something went wrong")
}
}

Related

Uploading recording to Firebase with Swift

I've been trying to upload audio recording right after user stops recording to the Firebase. But it doesn't do anything apart from creating a new folder named "audio".
Code I'm using for starting and stopping recording
#IBAction func recordAudio(_ sender: AnyObject) {
recordingLabel.text = "Recording in progress"
stopRecordingButton.isEnabled = true
recordButton.isEnabled = false
let dirPath = NSSearchPathForDirectoriesInDomains(.documentDirectory,.userDomainMask, true)[0] as String
let recordingName = "recordedVoice.wav"
let pathArray = [dirPath, recordingName]
let filePath = URL(string: pathArray.joined(separator: "/"))
let session = AVAudioSession.sharedInstance()
try! session.setCategory(AVAudioSessionCategoryPlayAndRecord, with:AVAudioSessionCategoryOptions.defaultToSpeaker)
try! audioRecorder = AVAudioRecorder(url: filePath!, settings: [:])
audioRecorder.delegate = self
audioRecorder.isMeteringEnabled = true
audioRecorder.prepareToRecord()
audioRecorder.record()
}
#IBAction func stopRecording(_ sender: AnyObject) {
print("Stop recording button was pressed")
recordButton.isEnabled = true
stopRecordingButton.isEnabled = false
recordingLabel.text = "Tap to Record"
audioRecorder.stop()
let audioSession = AVAudioSession.sharedInstance()
try! audioSession.setActive(false)
}
code I'm using for uploading to Firebase
func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully flag: Bool) {
print("finished recording")
let storageRef = Storage.storage().reference().child("audio/recordedVoice.wav")
if let uploadData = AVFileType(self.recordedVoice.wav!) {
storageRef.put(uploadData, metadata: nil) {(metadata, error) in
if error != nil {
print(error)
return
}
}
}
}
Please help me!
Try:
let audioName = NSUUID().uuidString //You'll get unique audioFile name
let storageRef = Storage.storage().reference().child("audio").child(audioName)
let metadata = StorageMetadata()
metadata.contentType = "audio/wav"
if let uploadData = AVFileType(self.recordedVoice.wav!) {
storageRef.putData(uploadData, metadata: metadata) { (metadata, err) in
if err != nil {
//print(err)
return
}
if let _ = metadata?.downloadURL()?.absoluteString {
print("uploading done!")
}
}
}
so I have been working on this for hours and here is my answer:
func getDocumentsDirectory() -> URL {
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
return paths[0]
}
func getFileURL() -> URL {
let path = getDocumentsDirectory()
let filePath = path.appendingPathComponent(K.fileName)
return filePath
}
let referance = storage.reference()
let mediaFolder = referance.child("media")
let id = UUID().uuidString // using uuid to give uniq names to audiofiles preventing overwrite
let mediaRef = mediaFolder.child(id + K.fileName) // creating file referance using uuid + filename
let path = getFileURL() // getting filepath
do {
let data = try Data(contentsOf: path) // getting data from filepath
mediaRef.putData(data) { metadata, error in
if error != nil {
self.showAlert(title: "Error", message: error?.localizedDescription, cancelButtonTitle: "cancel", handler: nil)
} else {
mediaRef.downloadURL { url, error in
let url = url?.absoluteString
print(url)
}
}
}
print("record has come")
} catch {
print("error cant get audio file")
}
I'm relatively new at coding and still learning. So that my answer may not be the best and shortest. But This worked for me.

Automatically Start audio recording when user speaks

I am trying to start recording when the user starts to talk and stops recording when the user is done talking. And I want to limit the maximum record audio length.I could not be able to find enough function in AVAudioRecorderDelegate.Hope you understand my problem.Thanks in Advance
#IBAction func recordAudio(_ sender: Any) {
recordingLabel.text = "Recording in progress..."
let dirPath = NSSearchPathForDirectoriesInDomains(.documentDirectory,.userDomainMask, true)[0] as String
let recordingName = "recordedVoice.wav"
let pathArray = [dirPath, recordingName]
let filePath = URL(string: pathArray.joined(separator: "/"))
let session = AVAudioSession.sharedInstance()
try! session.setCategory(AVAudioSessionCategoryPlayAndRecord, with:AVAudioSessionCategoryOptions.defaultToSpeaker)
try! audioRecorder = AVAudioRecorder(url: filePath!, settings: [:])
audioRecorder.delegate = self
audioRecorder.isMeteringEnabled = true
audioRecorder.prepareToRecord()
audioRecorder.record()
}
#IBAction func stopRecording(_ sender: Any) {
recordButton.isEnabled = true
stopRecordingButton.isEnabled = false
recordingLabel.text = "Tap to record..."
audioRecorder.stop()
let audioSession = AVAudioSession.sharedInstance()
try! audioSession.setActive(false)
}
func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully flag: Bool) {
if (flag) {
//Success
} else {
print("Could not save audio recording!")
}
}
To Record Audio When user tak1 you need some steps
1. Permission from User to all your app to use Mic
In your Info Plist Add Privacy - Microphone Usage Description in user Plist and add Text Description
2. Location to save Recorded File user FileManager
3. To End After time : use audioRecorder.record(forDuration: 30) // record for 30 Sec
Check complete code :
import UIKit
import AVFoundation
class ViewController: UIViewController {
#IBOutlet weak var recordButton: UIButton!
var recordingSession: AVAudioSession!
var audioRecorder: AVAudioRecorder!
override func viewDidLoad() {
super.viewDidLoad()
}
#IBAction func recordAudio(_ sender: Any) {
self.requestRecordPermission()
}
func requestRecordPermission() {
recordingSession = AVAudioSession.sharedInstance()
do {
try recordingSession.setCategory(AVAudioSessionCategoryPlayAndRecord)
try recordingSession.setActive(true)
recordingSession.requestRecordPermission() { [unowned self] allowed in
DispatchQueue.main.async {
if allowed {
// User allow you to record
// Start recording and change UIbutton color
self.recordButton.backgroundColor = .red
self.startRecording()
} else {
// failed to record!
}
}
}
} catch {
// failed to record!
}
}
func startRecording() {
let audioFilename = getDocumentsDirectory().appendingPathComponent("recordedFile.m4a")
let settings = [
AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
AVSampleRateKey: 12000,
AVNumberOfChannelsKey: 1,
AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue
]
do {
audioRecorder = try AVAudioRecorder(url: audioFilename, settings: settings)
audioRecorder.delegate = self
audioRecorder.record(forDuration: 30) // record for 30 Sec
recordButton.setTitle("Tap to Stop", for: .normal)
recordButton.backgroundColor = .green
} catch {
finishRecording(success: false)
}
}
func getDocumentsDirectory() -> URL {
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
return paths[0]
}
#objc func recordTapped() {
if audioRecorder == nil {
startRecording()
} else {
finishRecording(success: true)
}
}
public func finishRecording(success: Bool) {
audioRecorder.stop()
audioRecorder = nil
if success {
// record sucess
recordButton.backgroundColor = .green
} else {
// record fail
recordButton.backgroundColor = .yellow
}
}
}
extension ViewController :AVAudioRecorderDelegate{
func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully flag: Bool) {
if !flag {
finishRecording(success: false)
}
}
}

Error when trying to use UISlider to change rate in Swift audio app

I am getting an error that reads Thread 1: EX_BAD_INSTRUCTION(code=EXC_1386_INVOP, subcode=0x0). Specifically, this line, player.rate = sliderValue.value is being flagged.
// RecorderViewController.swift
import UIKit
import AVFoundation
/**
Uses AVAudioRecorder to record a sound file and an AVAudioPlayer to play it back.
*/
class RecorderViewController: UIViewController {
var recorder: AVAudioRecorder!
var player:AVAudioPlayer!
#IBOutlet var recordButton: UIButton!
#IBOutlet var stopButton: UIButton!
#IBOutlet var playButton: UIButton!
#IBOutlet var statusLabel: UILabel!
#IBOutlet weak var sliderValue: UISlider!
var meterTimer:NSTimer!
var soundFileURL:NSURL!
override func viewDidLoad() {
super.viewDidLoad()
stopButton.enabled = false
playButton.enabled = false
setSessionPlayback()
askForNotifications()
checkHeadphones()
}
func updateAudioMeter(timer:NSTimer) {
if recorder.recording {
let min = Int(recorder.currentTime / 60)
let sec = Int(recorder.currentTime % 60)
let s = String(format: "%02d:%02d", min, sec)
statusLabel.text = s
recorder.updateMeters()
// if you want to draw some graphics...
//var apc0 = recorder.averagePowerForChannel(0)
//var peak0 = recorder.peakPowerForChannel(0)
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
recorder = nil
player = nil
}
#IBAction func removeAll(sender: AnyObject) {
deleteAllRecordings()
}
#IBAction func record(sender: UIButton) {
if player != nil && player.playing {
player.stop()
}
if recorder == nil {
print("recording. recorder nil")
recordButton.setTitle("Pause", forState:.Normal)
playButton.enabled = false
stopButton.enabled = true
recordWithPermission(true)
return
}
if recorder != nil && recorder.recording {
print("pausing")
recorder.pause()
recordButton.setTitle("Continue", forState:.Normal)
} else {
print("recording")
recordButton.setTitle("Pause", forState:.Normal)
playButton.enabled = false
stopButton.enabled = true
// recorder.record()
recordWithPermission(false)
}
}
#IBAction func stop(sender: UIButton) {
print("stop")
recorder?.stop()
player?.stop()
meterTimer.invalidate()
recordButton.setTitle("Record", forState:.Normal)
let session = AVAudioSession.sharedInstance()
do {
try session.setActive(false)
playButton.enabled = true
stopButton.enabled = false
recordButton.enabled = true
} catch let error as NSError {
print("could not make session inactive")
print(error.localizedDescription)
}
//recorder = nil
}
#IBAction func play(sender: UIButton) {
setSessionPlayback()
play()
}
func play() {
var url:NSURL?
if self.recorder != nil {
url = self.recorder.url
} else {
url = self.soundFileURL!
}
print("playing \(url)")
do {
self.player = try AVAudioPlayer(contentsOfURL: url!)
stopButton.enabled = true
player.enableRate = true
player.delegate = self
player.prepareToPlay()
player.volume = 1.0
player.play()
} catch let error as NSError {
self.player = nil
print(error.localizedDescription)
}
}
#IBAction func slideChange(sender: AnyObject) {
player.rate = sliderValue.value
}
func setupRecorder() {
let format = NSDateFormatter()
format.dateFormat="yyyy-MM-dd-HH-mm-ss"
let currentFileName = "recording-\(format.stringFromDate(NSDate())).m4a"
print(currentFileName)
let documentsDirectory = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)[0]
self.soundFileURL = documentsDirectory.URLByAppendingPathComponent(currentFileName)
if NSFileManager.defaultManager().fileExistsAtPath(soundFileURL.absoluteString) {
// probably won't happen. want to do something about it?
print("soundfile \(soundFileURL.absoluteString) exists")
}
let recordSettings:[String : AnyObject] = [
AVFormatIDKey: NSNumber(unsignedInt:kAudioFormatAppleLossless),
AVEncoderAudioQualityKey : AVAudioQuality.Max.rawValue,
AVEncoderBitRateKey : 320000,
AVNumberOfChannelsKey: 2,
AVSampleRateKey : 44100.0
]
do {
recorder = try AVAudioRecorder(URL: soundFileURL, settings: recordSettings)
recorder.delegate = self
recorder.meteringEnabled = true
recorder.prepareToRecord() // creates/overwrites the file at soundFileURL
} catch let error as NSError {
recorder = nil
print(error.localizedDescription)
}
}
func recordWithPermission(setup:Bool) {
let session:AVAudioSession = AVAudioSession.sharedInstance()
// ios 8 and later
if (session.respondsToSelector("requestRecordPermission:")) {
AVAudioSession.sharedInstance().requestRecordPermission({(granted: Bool)-> Void in
if granted {
print("Permission to record granted")
self.setSessionPlayAndRecord()
if setup {
self.setupRecorder()
}
self.recorder.record()
self.meterTimer = NSTimer.scheduledTimerWithTimeInterval(0.1,
target:self,
selector:"updateAudioMeter:",
userInfo:nil,
repeats:true)
} else {
print("Permission to record not granted")
}
})
} else {
print("requestRecordPermission unrecognized")
}
}
func setSessionPlayback() {
let session:AVAudioSession = AVAudioSession.sharedInstance()
do {
try session.setCategory(AVAudioSessionCategoryPlayback)
} catch let error as NSError {
print("could not set session category")
print(error.localizedDescription)
}
do {
try session.setActive(true)
} catch let error as NSError {
print("could not make session active")
print(error.localizedDescription)
}
}
func setSessionPlayAndRecord() {
let session = AVAudioSession.sharedInstance()
do {
try session.setCategory(AVAudioSessionCategoryPlayAndRecord)
} catch let error as NSError {
print("could not set session category")
print(error.localizedDescription)
}
do {
try session.setActive(true)
} catch let error as NSError {
print("could not make session active")
print(error.localizedDescription)
}
}
func deleteAllRecordings() {
let docsDir =
NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0]
let fileManager = NSFileManager.defaultManager()
do {
let files = try fileManager.contentsOfDirectoryAtPath(docsDir)
var recordings = files.filter( { (name: String) -> Bool in
return name.hasSuffix("m4a")
})
for var i = 0; i < recordings.count; i++ {
let path = docsDir + "/" + recordings[i]
print("removing \(path)")
do {
try fileManager.removeItemAtPath(path)
} catch let error as NSError {
NSLog("could not remove \(path)")
print(error.localizedDescription)
}
}
} catch let error as NSError {
print("could not get contents of directory at \(docsDir)")
print(error.localizedDescription)
}
}
func askForNotifications() {
NSNotificationCenter.defaultCenter().addObserver(self,
selector:"background:",
name:UIApplicationWillResignActiveNotification,
object:nil)
NSNotificationCenter.defaultCenter().addObserver(self,
selector:"foreground:",
name:UIApplicationWillEnterForegroundNotification,
object:nil)
NSNotificationCenter.defaultCenter().addObserver(self,
selector:"routeChange:",
name:AVAudioSessionRouteChangeNotification,
object:nil)
}
func background(notification:NSNotification) {
print("background")
}
func foreground(notification:NSNotification) {
print("foreground")
}
func routeChange(notification:NSNotification) {
print("routeChange \(notification.userInfo)")
if let userInfo = notification.userInfo {
//print("userInfo \(userInfo)")
if let reason = userInfo[AVAudioSessionRouteChangeReasonKey] as? UInt {
//print("reason \(reason)")
switch AVAudioSessionRouteChangeReason(rawValue: reason)! {
case AVAudioSessionRouteChangeReason.NewDeviceAvailable:
print("NewDeviceAvailable")
print("did you plug in headphones?")
checkHeadphones()
case AVAudioSessionRouteChangeReason.OldDeviceUnavailable:
print("OldDeviceUnavailable")
print("did you unplug headphones?")
checkHeadphones()
case AVAudioSessionRouteChangeReason.CategoryChange:
print("CategoryChange")
case AVAudioSessionRouteChangeReason.Override:
print("Override")
case AVAudioSessionRouteChangeReason.WakeFromSleep:
print("WakeFromSleep")
case AVAudioSessionRouteChangeReason.Unknown:
print("Unknown")
case AVAudioSessionRouteChangeReason.NoSuitableRouteForCategory:
print("NoSuitableRouteForCategory")
case AVAudioSessionRouteChangeReason.RouteConfigurationChange:
print("RouteConfigurationChange")
}
}
}
}
func checkHeadphones() {
// check NewDeviceAvailable and OldDeviceUnavailable for them being plugged in/unplugged
let currentRoute = AVAudioSession.sharedInstance().currentRoute
if currentRoute.outputs.count > 0 {
for description in currentRoute.outputs {
if description.portType == AVAudioSessionPortHeadphones {
print("headphones are plugged in")
break
} else {
print("headphones are unplugged")
}
}
} else {
print("checking headphones requires a connection to a device")
}
}
#IBAction
func trim() {
if self.soundFileURL == nil {
print("no sound file")
return
}
print("trimming \(soundFileURL!.absoluteString)")
print("trimming path \(soundFileURL!.lastPathComponent)")
let asset = AVAsset(URL:self.soundFileURL!)
exportAsset(asset, fileName: "trimmed.m4a")
}
func exportAsset(asset:AVAsset, fileName:String) {
let documentsDirectory = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)[0]
let trimmedSoundFileURL = documentsDirectory.URLByAppendingPathComponent(fileName)
print("saving to \(trimmedSoundFileURL.absoluteString)")
if NSFileManager.defaultManager().fileExistsAtPath(trimmedSoundFileURL.absoluteString) {
print("sound exists, removing \(trimmedSoundFileURL.absoluteString)")
do {
var error:NSError?
if trimmedSoundFileURL.checkResourceIsReachableAndReturnError(&error) {
print("is reachable")
}
if let e = error {
print(e.localizedDescription)
}
try NSFileManager.defaultManager().removeItemAtPath(trimmedSoundFileURL.absoluteString)
} catch let error as NSError {
NSLog("could not remove \(trimmedSoundFileURL)")
print(error.localizedDescription)
}
}
if let exporter = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetAppleM4A) {
exporter.outputFileType = AVFileTypeAppleM4A
exporter.outputURL = trimmedSoundFileURL
let duration = CMTimeGetSeconds(asset.duration)
if (duration < 5.0) {
print("sound is not long enough")
return
}
// e.g. the first 5 seconds
let startTime = CMTimeMake(0, 1)
let stopTime = CMTimeMake(5, 1)
exporter.timeRange = CMTimeRangeFromTimeToTime(startTime, stopTime)
// do it
exporter.exportAsynchronouslyWithCompletionHandler({
switch exporter.status {
case AVAssetExportSessionStatus.Failed:
if let e = exporter.error {
print("export failed \(e)")
switch e.code {
case AVError.FileAlreadyExists.rawValue:
print("File Exists")
break
default: break
}
} else {
print("export failed")
}
case AVAssetExportSessionStatus.Cancelled:
print("export cancelled \(exporter.error)")
default:
print("export complete")
}
})
}
}
#IBAction
func speed() {
let asset = AVAsset(URL:self.soundFileURL!)
exportSpeedAsset(asset, fileName: "trimmed.m4a")
}
func exportSpeedAsset(asset:AVAsset, fileName:String) {
let documentsDirectory = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)[0]
let trimmedSoundFileURL = documentsDirectory.URLByAppendingPathComponent(fileName)
let filemanager = NSFileManager.defaultManager()
if filemanager.fileExistsAtPath(trimmedSoundFileURL.absoluteString) {
print("sound exists")
}
if let exporter = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetAppleM4A) {
exporter.outputFileType = AVFileTypeAppleM4A
exporter.outputURL = trimmedSoundFileURL
exporter.audioTimePitchAlgorithm = AVAudioTimePitchAlgorithmVarispeed
let duration = CMTimeGetSeconds(asset.duration)
if (duration < 5.0) {
print("sound is not long enough")
return
}
// do it
exporter.exportAsynchronouslyWithCompletionHandler({
switch exporter.status {
case AVAssetExportSessionStatus.Failed:
print("export failed \(exporter.error)")
case AVAssetExportSessionStatus.Cancelled:
print("export cancelled \(exporter.error)")
default:
print("export complete")
}
})
}
}
}
// MARK: AVAudioRecorderDelegate
extension RecorderViewController : AVAudioRecorderDelegate {
func audioRecorderDidFinishRecording(recorder: AVAudioRecorder,
successfully flag: Bool) {
print("finished recording \(flag)")
stopButton.enabled = false
playButton.enabled = true
recordButton.setTitle("Record", forState:.Normal)
// iOS8 and later
let alert = UIAlertController(title: "Recorder",
message: "Finished Recording",
preferredStyle: .Alert)
alert.addAction(UIAlertAction(title: "Keep", style: .Default, handler: {action in
print("keep was tapped")
}))
alert.addAction(UIAlertAction(title: "Delete", style: .Default, handler: {action in
print("delete was tapped")
self.recorder.deleteRecording()
}))
self.presentViewController(alert, animated:true, completion:nil)
}
func audioRecorderEncodeErrorDidOccur(recorder: AVAudioRecorder,
error: NSError?) {
if let e = error {
print("\(e.localizedDescription)")
}
}
}
// MARK: AVAudioPlayerDelegate
extension RecorderViewController : AVAudioPlayerDelegate {
func audioPlayerDidFinishPlaying(player: AVAudioPlayer, successfully flag: Bool) {
print("finished playing \(flag)")
recordButton.enabled = true
stopButton.enabled = false
}
func audioPlayerDecodeErrorDidOccur(player: AVAudioPlayer, error: NSError?) {
if let e = error {
print("\(e.localizedDescription)")
}
}
}
reviewing your code I think that you should check first if player is not nil
something like this
#IBAction func slideChange(sender: AnyObject) {
if(player != nil)
{
player.rate = sliderValue.value
}
}
I hope this helps you!

nil while unwrapping an optinal value SWIFT [duplicate]

This question already has answers here:
What does "Fatal error: Unexpectedly found nil while unwrapping an Optional value" mean?
(16 answers)
Closed 6 years ago.
I have downloaded a project from https://github.com/doberman/speaker-gender-detect--ios. I followed the instructions but when I run the app, I get a message that says: fatal error: unexpectedly found nil while unwrapping an Optional value. How can I fix the crash so that the app works.
The app crashes here:
let genderEqualityRatios = self.calcGenderEquality (String (response.result.value!))
This is my code:
import AVFoundation
import Alamofire
import SwiftyJSON
protocol AudioRecorderDelegate {
func audioRecorder(audioRecorder: AudioRecorder?, updatedLevel: Float)
func audioRecorder(audioRecorder: AudioRecorder?, updatedGenderEqualityRatio: (male: Float, female: Float))
}
class AudioRecorder: NSObject {
static let sharedInstance: AudioRecorder = AudioRecorder()
private let kRemoteURL: NSURL = NSURL(string: "xxx.xxx.xxx.xxx")! // change to your API endpoint URL
private let kPostAudioInterval: NSTimeInterval = 10.0 // change to post to API more/less frequently
var delegate: AudioRecorderDelegate?
private let recorderSettings = [
AVSampleRateKey: NSNumber(float: Float(16000.0)),
AVFormatIDKey: NSNumber(int: Int32(kAudioFormatMPEG4AAC)),
AVNumberOfChannelsKey: NSNumber(int: 1),
AVEncoderAudioQualityKey: NSNumber(int: Int32(AVAudioQuality.High.rawValue))
]
private var recorder: AVAudioRecorder?
private var checkLevelsTimer: NSTimer?
private var postTimer: NSTimer?
private var maleDuration: Float = 0.0
private var femaleDuration: Float = 0.0
override init() {
super.init()
do {
let audioSession: AVAudioSession = AVAudioSession.sharedInstance()
try audioSession.setCategory(AVAudioSessionCategoryRecord)
} catch let err as NSError {
print("Failed to initialize AudioRecorder: \(err)")
}
}
func startRecording() {
// print("startRecording")
if self.recorder != nil && self.recorder!.recording {
self.stopRecording()
}
let audioURL: NSURL = self.getAudioURL()
// print("got audioURL: '\(audioURL)'")
do {
self.recorder = try AVAudioRecorder(URL: audioURL, settings: self.recorderSettings)
self.recorder?.meteringEnabled = true
self.recorder?.prepareToRecord()
} catch let err as NSError {
print("Failed to set up AVAudioRecorder instance: \(err)")
}
guard self.recorder != nil else { return }
self.recorder?.record()
let audioSession = AVAudioSession.sharedInstance()
do {
try audioSession.setActive(true)
self.checkLevelsTimer = NSTimer.scheduledTimerWithTimeInterval(0.05, target: self, selector: "checkLevels", userInfo: nil, repeats: true)
self.postTimer = NSTimer.scheduledTimerWithTimeInterval(kPostAudioInterval, target: self, selector: "onPostTimerTrigger", userInfo: nil, repeats: true)
} catch let err as NSError {
print("Failed to activate audio session (or failed to set up checkLevels timer): \(err)")
}
}
func stopRecording(shouldSubmitAudioAfterStop: Bool = false) {
// print("stopRecording")
guard self.recorder != nil else {
print("`self.recorder` is `nil` - no recording to stop")
return
}
self.recorder?.stop()
if let t = self.checkLevelsTimer {
t.invalidate()
self.checkLevelsTimer = nil
}
if let t = self.postTimer {
t.invalidate()
self.postTimer = nil
}
let audioURL: NSURL = self.recorder!.url
self.recorder = nil
let audioSession = AVAudioSession.sharedInstance()
do {
try audioSession.setActive(false)
if shouldSubmitAudioAfterStop {
self.postAudio(audioURL)
} else {
// print("`shouldSubmitAudioAfterStop` is `false` - I won't post audio")
}
} catch let err as NSError {
print("Failed to deactivate audio session (or failed to post audio): \(err)")
}
}
// MARK: -
func checkLevels() {
guard self.recorder != nil else {
print("`self.recorder` is `nil` - can't check levels")
return
}
self.recorder?.updateMeters()
let averagePower: Float = self.recorder!.averagePowerForChannel(0)
if let d = self.delegate {
d.audioRecorder(self, updatedLevel: averagePower)
} else {
print("AudioRecorder - averagePower: \(averagePower)")
}
}
func onPostTimerTrigger() {
// print("onPostTimerTrigger")
guard let r = self.recorder else {
print("`self.recorder` is `nil` - no audio to post")
return
}
if !r.recording {
print("not recording - no audio to post")
}
self.stopRecording(true)
self.startRecording()
}
// MARK: -
private func getAudioURL(filename: String = "recording") -> NSURL {
let fileManager: NSFileManager = NSFileManager.defaultManager()
let urls: [NSURL] = fileManager.URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)
let documentDirectory: NSURL = urls[0] as NSURL
let uniqueFilename = "\(filename)_\(NSDate().timeIntervalSince1970)"
let audioURL: NSURL = documentDirectory.URLByAppendingPathComponent("\(uniqueFilename).m4a")
return audioURL
}
private func postAudio(audioURL: NSURL) {
// print("AudioRecorder.postAudio - audioURL: \(audioURL.absoluteString)")
Alamofire.upload(Method.POST, kRemoteURL, multipartFormData: { multipartFormData in
multipartFormData.appendBodyPart(fileURL: audioURL, name: "file")
}, encodingCompletion: { encodingResult in
switch encodingResult {
case .Success (let upload, _, _):
upload.responseString { response in
//print("response: \(response)")
let genderEqualityRatios = self.calcGenderEquality(String(response.result.value!))
if let eq = genderEqualityRatios, let d = self.delegate {
d.audioRecorder(self, updatedGenderEqualityRatio: eq)
}
}
case .Failure(let encodingError):
print("encodingError: \(encodingError)")
}
})
}
private func calcGenderEquality(response: String) -> (male: Float, female: Float)? {
guard let dataFromString = response.dataUsingEncoding(NSUTF8StringEncoding, allowLossyConversion: false) else {
return nil
}
let json = JSON(data: dataFromString)
for selection in json["selections"].arrayValue {
if selection["gender"] == "M" {
self.maleDuration = self.maleDuration + (selection["endTime"].floatValue - selection["startTime"].floatValue)
} else if selection["gender"] == "F" {
self.femaleDuration = self.maleDuration + (selection["endTime"].floatValue - selection["startTime"].floatValue)
}
}
let spokenDuration = self.maleDuration + self.femaleDuration
let maleFactor = self.maleDuration / spokenDuration
let femaleFactor = self.femaleDuration / spokenDuration
guard !maleFactor.isNaN else {
print("Failed to calculate gender equality (`maleFactor` is `NaN`)")
return nil
}
guard !femaleFactor.isNaN else {
print("Failed to calculate gender equality (`femaleFactor` is `NaN`)")
return nil
}
return (male: maleFactor, female: femaleFactor)
}
}
Try it like this:
if let genderEqualityRatios = response.result.value as? String {
self.calcGenderEquality(genderEqualityRatios)
} else {
print("a problem occurred and we couldn't call calcGenderEquality")
}

AVAudioRecorder record stream audio

I have an app that playing stream audio. How can I record streaming audio from AVPlayer using AVAudioRecorder (or something another?). Thanks.
Swift 3+
Here is simple VC which will save audio session to documents dir (audio.aac)
import UIKit
import AVFoundation
class ViewController: UIViewController {
var audioRecorder: AVAudioRecorder?
override func viewDidLoad() {
super.viewDidLoad()
verifyRecordPermission()
}
#IBAction func recordButonAction(_ sender: UIButton) {
if audioRecorder?.isRecording == true {
audioRecorder?.stop()
sender.setTitle("Record", for: .normal)
}
else {
startRecording()
sender.setTitle("Stop", for: .normal)
}
}
func verifyRecordPermission() {
let permission = AVAudioSession.sharedInstance().recordPermission()
switch permission {
case .denied:
print("recording not allowed")
case .granted:
print("recording allowed")
default:
AVAudioSession.sharedInstance().requestRecordPermission() { (granted) -> Void in
print("recording granted:\(granted)")
}
}
}
func startRecording() {
guard AVAudioSession.sharedInstance().recordPermission() == .granted else {
return
}
let documentsPath = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0]
let audioUrl = URL(fileURLWithPath: "\(documentsPath)/audio.aac")
let session = AVAudioSession.sharedInstance()
do {
try session.setCategory(AVAudioSessionCategoryRecord)
try session.setActive(true)
try audioRecorder = AVAudioRecorder(url: audioUrl,
settings: [AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
AVSampleRateKey: 24000.0,
AVNumberOfChannelsKey: 1 as NSNumber,
AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue])
} catch {
// handle error...
return
}
guard let audioRecorder = audioRecorder else {
return
}
audioRecorder.prepareToRecord()
audioRecorder.record()
}
func stop(session: AVAudioSession = AVAudioSession.sharedInstance()) {
audioRecorder?.stop()
audioRecorder = nil
do {
try session.setActive(false)
} catch {
// handle session errors
}
}
}

Resources