Storyboard segue causes AVAudioEngine to crash - ios

I am working with an AVAudioUnitSampler that is attached to an AVAudioEngine within my app. I've gotten everything to work well except when I segue out of the view controller that the sampler is instantiated in. I get a crash with an error that says:
Terminating app due to uncaught exception 'com.apple.coreaudio.avfaudio', reason: 'required condition is false: outputNode'
I'm guessing that this is because the engine is getting interrupted somehow when I segue back to my previous VC, not sure!
I've tried to stop the engine and also the sequencer inside of viewWillDisappear, but it still crashes.
If I use a UIButton show segue to the previous VC,it sort of works, but I get a crash with an unwind segue and with the current navigation bar segue.
I'm a newbie, so hopefully I've explained this well enough!
I am getting to this VC from a segue triggered from a table view on the previous VC.
Here is the code for the VC in question:
import UIKit
import AVFoundation
class PlayerViewController: UIViewController {
#IBOutlet weak var descriptionLabel: UILabel!
#IBOutlet weak var playPauseButton: UIButton!
#IBOutlet weak var musicView: UIImageView!
let allSounds = SoundBankOnAndOff()
var currentSoundFile: OnandOff?
var engine = AVAudioEngine()
var sampler = AVAudioUnitSampler()
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(true)
self.navigationController?.isNavigationBarHidden = false
setupSequencer()
}
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(true)
engine.stop()
sequencer.stop()
}
override func viewDidLoad() {
super.viewDidLoad()
descriptionLabel.text = exercises[myIndex]
musicView.image = musicNotes[myIndex]
engine = AVAudioEngine()
sampler = AVAudioUnitSampler()
engine.attach(sampler)
engine.connect(sampler, to: engine.mainMixerNode, format: nil)
loadSF2PresetIntoSampler(preset: 0)
startEngine()
setSessionPlayback()
}
func setSessionPlayback() {
let audioSession = AVAudioSession.sharedInstance()
do {
try
audioSession.setCategory(AVAudioSessionCategoryPlayback, with:
AVAudioSessionCategoryOptions.mixWithOthers)
} catch {
print("couldn't set category \(error)")
return
}
do {
try audioSession.setActive(true)
print("category is active")
} catch {
print("couldn't set category to active \(error)")
return
}
}
func startEngine() {
if engine.isRunning {
print("audio engine has already started")
return
}
do {
try engine.start()
print("audio engine started")
} catch {
print("oops \(error)")
print("could not start audio engine")
}
}
var sequencer:AVAudioSequencer!
func setupSequencer() {
let allSounds = SoundBankOnAndOff()
let currentSoundFile = allSounds.list[myIndex]
self.sequencer = AVAudioSequencer(audioEngine: engine)
let options = AVMusicSequenceLoadOptions.smfChannelsToTracks
if let fileURL = Bundle.main.urls(forResourcesWithExtension: "mid", subdirectory: "On & Off MIDI") {
do {
try sequencer.load(from: (currentSoundFile.soundFile), options: options)
print("loaded \(fileURL)")
} catch {
print("something messed up \(error)")
return
}
}
sequencer.prepareToPlay()
}
func play() {
if sequencer.isPlaying {
stop()
}
sequencer.currentPositionInBeats = TimeInterval(0)
do {
try sequencer.start()
} catch {
print("cannot start \(error)")
}
}
func stop() {
sequencer.stop()
}
func loadSF2PresetIntoSampler(preset:UInt8) {
guard let bankURL = Bundle.main.url(forResource: "Pad Sounds", withExtension: "sf2") else {
print("could not load sound font")
return
}
do {
try sampler.loadSoundBankInstrument(at: bankURL, program: preset,bankMSB: UInt8(kAUSampler_DefaultMelodicBankMSB),bankLSB: UInt8(kAUSampler_DefaultBankLSB))
} catch {
print("error loading sound bank instrument")
}
}
#IBAction func playButtonPressed(sender: UIButton) {
if sender.currentTitle == "PLAY"{
play()
sender.setTitle("STOP", for: .normal)
} else if sender.currentTitle == "STOP" {
sender.setTitle("PLAY", for: .normal)
stop()
}
}
#IBAction func BackButtonPressed(_ sender: Any) {
performSegue(withIdentifier: "unwindToDetailVC", sender: self)
}
}

This code is similar to yours and doesn't crash when segueing or unwinding:
class MP3PlayerVC: UIViewController {
let audioEngine = AVAudioEngine()
let audioPlayer = AVAudioPlayerNode()
let distortion = AVAudioUnitDistortion()
override func viewDidLoad() {
super.viewDidLoad()
audioEngine.attach(audioPlayer)
audioEngine.attach(distortion)
audioEngine.connect(audioPlayer, to: distortion, format: nil)
audioEngine.connect(distortion, to: audioEngine.outputNode, format: nil)
}
#IBAction func buttonClicked(_ sender: UIButton) {
if let filePath = Bundle.main.path(forResource: "test", ofType: "mp3") {
let url = URL.init(fileURLWithPath: filePath)
if let audioFile = try? AVAudioFile(forReading: url) {
if audioPlayer.isPlaying {
audioPlayer.stop()
audioEngine.stop()
} else {
audioPlayer.scheduleFile(audioFile, at: nil, completionHandler: nil)
do {
try audioEngine.start()
audioPlayer.play()
} catch {
print("something went wrong")
}
}
}
}
}
}

Related

Voice recording not working on simulator swift ios

I'm working on voice recorder. When I'm running the code in simulator it's not working.I want the when user record the voice of 1 second upload this voice on backend. How can get the url of voice recorder and stored on backend server? Can we testing the voice recording in simulator or in real mobile device? I've watched some resources that tested on simulator. See my code and guide me best way to record the voice and stored on backend server.
func getDocumentsDirectory() -> URL {
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
return paths[0]
}
class ViewController: UIViewController {
#IBOutlet var recordButton: UIButton!
#IBOutlet var playButton: UIButton!
var recordingSession: AVAudioSession!
var audioRecorder: AVAudioRecorder!
var audioPlayer: AVAudioPlayer!
override func viewDidLoad() {
super.viewDidLoad()
recordingSession = AVAudioSession.sharedInstance()
do {
try recordingSession.setCategory(.playAndRecord, mode: .default)
try recordingSession.setActive(true)
recordingSession.requestRecordPermission { [unowned self] allowed in
DispatchQueue.main.async {
if allowed {
self.loadRecordingUI()
} else {
// failed to record
}
}
}
} catch {
// failed to record!
}
}
func loadRecordingUI() {
recordButton.isHidden = false
recordButton.setTitle("Tap to Record", for: .normal)
}
#IBAction func recordButtonPressed(_ sender: UIButton) {
if audioRecorder == nil {
startRecording()
} else {
finishRecording(success: true)
}
}
#IBAction func playButtonPressed(_ sender: UIButton) {
if audioPlayer == nil {
startPlayback()
} else {
finishPlayback()
}
}
func startRecording() {
let audioFilename = getDocumentsDirectory().appendingPathComponent("recording.m4a")
let settings = [
AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
AVSampleRateKey: 12000,
AVNumberOfChannelsKey: 1,
AVEncoderAudioQualityKey: AVAudioQuality.medium.rawValue
]
do {
audioRecorder = try AVAudioRecorder(url: audioFilename, settings: settings)
audioRecorder.delegate = self
audioRecorder.record()
recordButton.setTitle("Tap to Stop", for: .normal)
} catch {
finishRecording(success: false)
}
}
func finishRecording(success: Bool) {
audioRecorder.stop()
audioRecorder = nil
if success {
recordButton.setTitle("Tap to Re-record", for: .normal)
playButton.setTitle("Play Your Recording", for: .normal)
playButton.isHidden = false
} else {
recordButton.setTitle("Tap to Record", for: .normal)
playButton.isHidden = true
// recording failed :(
}
}
func startPlayback() {
let audioFilename = getDocumentsDirectory().appendingPathComponent("recording.m4a")
do {
audioPlayer = try AVAudioPlayer(contentsOf: audioFilename)
audioPlayer.delegate = self
audioPlayer.play()
playButton.setTitle("Stop Playback", for: .normal)
} catch {
playButton.isHidden = true
// unable to play recording!
}
}
func finishPlayback() {
audioPlayer = nil
playButton.setTitle("Play Your Recording", for: .normal)
}
}
extension ViewController: AVAudioRecorderDelegate {
func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully flag: Bool) {
if !flag {
finishRecording(success: false)
}
}
}
extension ViewController: AVAudioPlayerDelegate {
func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool) {
finishPlayback()
}
}

Swift -How to get movie file outputFileURL before or while app goes to/in background

The problem I have is if the camera is recording, when the app has either entered the bg or is about to enter the bg, I stop recording but the outputFileURL is never saved. I always get an error of "Recording Stopped". I individually tried to stop the recording using all 3 of the Notification methods below but nada.
let movieFileOutput = AVCaptureMovieFileOutput()
#objc func appWillEnterBackground() { // UIApplication.willResignActiveNotification triggers this
if movieFileOutput.isRecording {
movieFileOutput.stopRecording()
}
}
#objc func didEnterBackground() { // UIApplication.didEnterBackgroundNotification triggers this
if movieFileOutput.isRecording {
movieFileOutput.stopRecording()
}
}
#objc func sessionWasInterrupted(notification: NSNotification) { // .AVCaptureSessionWasInterrupted triggers this
// ...
let reason = AVCaptureSession.InterruptionReason(rawValue: reasonIntegerValue) {
switch reason {
case .videoDeviceNotAvailableInBackground:
DispatchQueue.main.async { [weak self] in
if self!.movieFileOutput.isRecording {
self!.movieFileOutput.stopRecording()
}
}
}
}
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
if error == nil {
let asset = AVAsset(url: outputFileURL)
self.arrayVideos.append(asset)
} else {
print(error!.localizedDescription) // prints "Recording Stopped"
}
}
Just to be clear, I do not want to record while in the background. I want to get the outputFileURL after movieFileOutput.stopRecording() is triggered while the app is either on its way to the bg or has entered the bg.
This is too long for a comment, so I'll post it here.
I tested this on iOS 13.5.1, it seems to stop recording automatically when App goes to background and the video is saved.
The following is the code I used to test:
import UIKit
import AVKit
class ViewController: UIViewController {
#IBOutlet weak var contentView: UIView!
let captureSession = AVCaptureSession()
var movieFileOutput: AVCaptureMovieFileOutput?
var captureDevice : AVCaptureDevice?
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
startCapturing()
}
func startCapturing() {
captureSession.sessionPreset = .hd1280x720
let discoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: .video, position: .back)
guard let captureDevice = discoverySession.devices.first else {
print("Failed to discovert session")
return
}
guard let input = try? AVCaptureDeviceInput(device: captureDevice) else {
print("Failed to create capture device input")
return
}
captureSession.addInput(input)
let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = contentView.bounds
contentView.layer.addSublayer(previewLayer)
captureSession.startRunning()
}
func startRecording() {
if self.movieFileOutput == nil {
let movieFileOutput = AVCaptureMovieFileOutput()
if let connection = movieFileOutput.connection(with: .video) {
movieFileOutput.setOutputSettings([AVVideoCodecKey:AVVideoCodecType.h264], for: connection)
}
captureSession.addOutput(movieFileOutput)
if let directory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first {
let outputUrl = directory.appendingPathComponent("out.mp4")
movieFileOutput.startRecording(to: outputUrl, recordingDelegate: self)
}
self.movieFileOutput = movieFileOutput
}
}
func stopRecording() {
if let movieFileOutput = self.movieFileOutput {
movieFileOutput.stopRecording()
captureSession.removeOutput(movieFileOutput)
self.movieFileOutput = nil
}
}
#IBAction func onStartClick(_ sender: Any) {
startRecording()
}
#IBAction func onStopClick(_ sender: Any) {
stopRecording()
}
}
extension ViewController: AVCaptureFileOutputRecordingDelegate {
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
print("File saved to: \(outputFileURL), error: \(error)")
}
}

i cant seem to find the size of the file when im recording audio

this is my entire view controller with some basic recording and playing stuff ,it records and plays alright but i want to get th file size printed in the console and somehow it gives me an error that
$Failed to get file attributes for local path: file:///Users/ashutoshmane/Library/Developer/CoreSimulator/Devices/DF46178B-BD38-45A9-87D8-4D4F378EEE8B/data/Containers/Data/Application/B949B5FF-9ACB-485F-BAE5-39F2D32664A0/Documents/sound.caf with error: Error Domain=NSCocoaErrorDomain Code=260 "The file “sound.caf” couldn’t be opened because there is no such file."
import UIKit
import AudioKit
import AVFoundation
class MyViewController: UIViewController, AVAudioPlayerDelegate, AVAudioRecorderDelegate
{
//audio player and rec vars
var audioPlayer: AVAudioPlayer?
var audioRecorder: AVAudioRecorder?
var fileSize:UInt64 = 0
//record settings dictionary
var recordSettings =
[AVEncoderAudioQualityKey: AVAudioQuality.min.rawValue,
AVEncoderBitRateKey : 128,
AVNumberOfChannelsKey:1 ,
AVSampleRateKey: 22050 ] as [String : Any]
//creating file and checking size
func createFile() -> URL {
let dirPaths = FileManager.default.urls(for: .documentDirectory,
in: .userDomainMask)
let soundFileURL = dirPaths[0].appendingPathComponent("sound.caf")
print(soundFileURL)
return soundFileURL
}
func KnowFileSize(filesUrl:URL){
let StringURL=String(describing:filesUrl)
do {
let fileAttribute=try FileManager.default.attributesOfItem(atPath:StringURL )
fileSize=fileAttribute[FileAttributeKey.size] as! UInt64
print( "file size is:", fileSize)
}
catch {
print("Failed to get file attributes for local path: \(StringURL) with error: \(error)")
}
}
#IBOutlet weak var recordButton: UIButton!
#IBOutlet weak var playButton: UIButton!
#IBOutlet weak var stopButton: UIButton!
//record audio
#IBAction func recordAudio(_ sender: Any) {
if audioRecorder?.isRecording == false {
playButton.isEnabled = false
stopButton.isEnabled = true
audioRecorder?.record()
}
}
//stop button
#IBAction func stopAudio(_ sender: Any) {
stopButton.isEnabled = false
playButton.isEnabled = true
recordButton.isEnabled = true
if audioRecorder?.isRecording == true {
audioRecorder?.stop()
} else {
audioPlayer?.stop()
}
}
//play button
#IBAction func playAudio(_ sender: Any) {
if audioRecorder?.isRecording == false {
stopButton.isEnabled = true
recordButton.isEnabled = false
do {
try audioPlayer = AVAudioPlayer(contentsOf:
(audioRecorder?.url)!)
audioPlayer!.delegate = self
audioPlayer!.prepareToPlay()
audioPlayer!.play()
} catch let error as NSError {
print("audioPlayer error: \(error.localizedDescription)")
}
}
}
//view did load//
override func viewDidLoad() {
super.viewDidLoad()
print("in view didload")
playButton.isEnabled = false
stopButton.isEnabled = false
//creating file in VDL
let soundFile=createFile()
print(AVSampleRateKey)
let audioSession = AVAudioSession.sharedInstance()
do {
try audioSession.setCategory(
AVAudioSessionCategoryPlayAndRecord)
} catch let error as NSError {
print("audioSession error: \(error.localizedDescription)")
}
do {
try audioRecorder = AVAudioRecorder(url: soundFile,
settings: recordSettings as [String : AnyObject])
audioRecorder?.prepareToRecord()
print(recordSettings)
} catch let error as NSError {
print("audioSession error: \(error.localizedDescription)")
}
//ending audio recorder task and checking file size
audioRecorderDidFinishRecording(audioRecorder!, successfully: true)
print(soundFile)
KnowFileSize(filesUrl:soundFile)
}
//delegate functions
func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool) {
recordButton.isEnabled = true
stopButton.isEnabled = false
}
func audioPlayerDecodeErrorDidOccur(_ player: AVAudioPlayer, error: Error?) {
print("Audio Play Decode Error")
}
func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully flag: Bool) {
}
func audioRecorderEncodeErrorDidOccur(_ recorder: AVAudioRecorder, error: Error?) {
print("Audio Record Encode Error")
}
}

Do not trigger remoteControlReceived in UIView after load the view

I implement an audio player using AVAudioPlayer as the following.
class ViewController: UIViewController {
override func viewDidLoad() {
super.viewDidLoad()
try? AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategorySoloAmbient, with: AVAudioSessionCategoryOptions.allowBluetooth)
try? AVAudioSession.sharedInstance().setActive(true)
if let path = Bundle.main.path(forResource: "LadyGaga-MillionReasons", ofType: "mp3") {
let url = URL(string:path)
self.audioPlayer = try? AVAudioPlayer(contentsOf: url!)
self.audioPlayer?.prepareToPlay()
}
self.becomeFirstResponder()
UIApplication.shared.beginReceivingRemoteControlEvents()
}
override var canBecomeFirstResponder: Bool{
get{
return true
}
}
override var canResignFirstResponder: Bool{
get{
return true
}
}
#IBAction func btnPlay_TouchUpInsde(_ sender: Any) {
if (self.audioPlayer?.isPlaying)! {
self.audioPlayer?.pause()
self.btnPlay.setTitle("Play", for: .normal)
}else{
self.audioPlayer?.play()
self.btnPlay.setTitle("Pause", for: .normal)
}
}
override func remoteControlReceived(with event: UIEvent?) {
if let e = event , e.type == .remoteControl {
if e.subtype == UIEventSubtype.remoteControlPause {
self.audioPlayer?.pause()
}else if(e.subtype == .remoteControlPlay){
self.audioPlayer?.play()
}else if(e.subtype == .remoteControlTogglePlayPause){
if self.audioPlayer!.isPlaying {
self.audioPlayer?.pause()
}else{
self.audioPlayer?.play()
}
}
}
}
}
The app is launched, then click the play button, the audio plays ok.
Then I pause the audio by a headset,and all works ok.
Another situation:
The app is launched, then I want to start the audio by a headset, it does not work.
It seems to that the view is not the first responder before I click the button, even I add self.becomeFirstResponder in init function.
Who know the why the app can not get the remoteControlReceived event when do not click the button.
I implement a sample. https://github.com/leogeng/AuidoTest.git

Swift - Stop avaudioplayer

I am trying to build a soundboard into an app and have figured out an efficient way of using tags to control playing the sounds. However I am now trying to integrate a pause button that can be used with the .stop() method on the AVAudioPlayer however I get an error with my current code:
EXC_BAD_ACCESS
This is what I am using at the moment, any ideas?
import UIKit
import AVFoundation
let soundFilenames = ["sound","sound2","sound3"]
var audioPlayers = [AVAudioPlayer]()
class SecondViewController: UIViewController {
var audioPlayer = AVAudioPlayer()
override func viewDidLoad() {
super.viewDidLoad()
for sound in soundFilenames {
do {
let url = NSURL(fileURLWithPath: NSBundle.mainBundle().pathForResource(sound, ofType: "mp3")!)
let audioPlayer = try AVAudioPlayer(contentsOfURL: url)
audioPlayers.append(audioPlayer)
} catch {
//Catch error thrown
audioPlayers.append(AVAudioPlayer())
}
}
}
#IBAction func buttonPressed(sender: UIButton) {
let audioPlayer = audioPlayers[sender.tag]
audioPlayer.play()
}
#IBAction func stop(sender: UIButton) {
audioPlayer.stop()
}
}
Your audioPlayer in stop function is not the playing player. You should assign it in buttonPressed function.
#IBAction func buttonPressed(sender: UIButton) {
audioPlayer = audioPlayers[sender.tag]
audioPlayer.play()
}
By the way, You can mark audioPlayer as a "?" property, it will be more efficient when init this Controller.
class SecondViewController: UIViewController {
var audioPlayer: AVAudioPlayer?
let enableMuiltPlayers = false
....
#IBAction func buttonPressed(sender: UIButton) {
if sender.tag < audioPlayers.count else {
print("out of range")
return
}
if enableMuiltPlayers {
audioPlayers[sender.tag].play()
} else {
audioPlayer?.stop()
//set the current playing player
audioPlayer = audioPlayers[sender.tag]
audioPlayer?.play()
}
}
#IBAction func stop(sender: UIButton) {
let wantToStopAll = false
if enableMuiltPlayers && wantToStopAll {
stopAll()
} else {
audioPlayer?.stop()
}
audioPlayer = nil
}
}
to stop all:
fun stopAll() {
for player in audioPlayers {
player.stop()
}
}
Your code may have other faults, but there's one thing sure:
You should not instantiate AVAudioPlayer using default initializer AVAudioPlayer().
Change this line:
var audioPlayer = AVAudioPlayer()
to:
var playingAudioPlayer: AVAudioPlayer?
And change this part:
} catch {
//Catch error thrown
audioPlayers.append(AVAudioPlayer())
}
to something like this:
} catch {
//Catch error thrown
fatalError("Sound resource: \(sound) could not be found")
}
(The latter part is very important to solve the issue. But I found it had become just a duplicate of some part of Hao's answer after I edited it...)
And start method:
#IBAction func start(sender: UIButton) {
let audioPlayer = audioPlayers[sender.tag]
audioPlayer.start()
playingAudioPlayer = audioPlayer
}
And stop should be:
#IBAction func start(sender: UIButton) {
playingAudioPlayer?.stop()
}
if audioPlayer != nil {
if audioPlayer.playing {
audioPlayer.stop()
}
}

Resources