import UIKit
import Photos
import AudioKit
import AudioKitUI
class VoicesController: CaptureContentController {
#IBOutlet weak var nodeOutputPlot: AKNodeOutputPlot!
#IBOutlet weak var recordButton: UIButton!
#IBAction func recordButtonAction(_ sender: UIButton, forEvent event: UIEvent) {
guard let phase = event.allTouches?.first?.phase else {
return
}
if phase == .began {
try? nodeRecorder.record()
}
if phase == .ended {
nodeRecorder.stop()
try? audioPlayer.reloadFile()
let fileName = NSUUID().uuidString + ".wav"
audioPlayer.audioFile.exportAsynchronously(name: fileName, baseDir: .documents, exportFormat: .wav, callback: { [weak self] (audioFile, error) in
if let error = error {
print(#line, error)
return
}
// // FIXME:
// if let url = audioFile?.avAsset.url {
// _ = PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: url)!
// // creationRequest.location
// }
if let fileName = audioFile?.avAsset.url.lastPathComponent {
let asset = VPFAsset()
asset.mediaType = .audio
asset.urlString = fileName
asset.thumbUrlString = ""
self?.captureDelegate?.append(asset)
}
})
}
}
var audioPlayer: AKAudioPlayer!
var nodeRecorder: AKNodeRecorder!
var micMixer: AKMixer!
var mainMixer: AKMixer!
var booster: AKBooster!
var moogLadder: AKMoogLadder!
let microphone = AKMicrophone()
#objc func didBecomeActive(_ notification: Notification) {
AudioKit.start()
}
#objc func willResignActive(_ notification: Notification) {
AudioKit.stop()
}
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
AKAudioFile.cleanTempDirectory()
AKSettings.bufferLength = .medium
do {
try AKSettings.setSession(category: .playAndRecord)
} catch {
print(error)
}
AKSettings.defaultToSpeaker = true
nodeOutputPlot.node = microphone
micMixer = AKMixer(microphone)
booster = AKBooster(micMixer)
booster.gain = 0
nodeRecorder = try? AKNodeRecorder(node: micMixer)
if let audioFile = nodeRecorder.audioFile {
audioPlayer = try? AKAudioPlayer(file: audioFile)
audioPlayer.completionHandler = {
print(#line)
}
}
moogLadder = AKMoogLadder(audioPlayer)
mainMixer = AKMixer(moogLadder, booster)
AudioKit.output = mainMixer
AudioKit.start()
NotificationCenter.default
.addObserver(self, selector: #selector(didBecomeActive), name: .UIApplicationDidBecomeActive, object: nil)
NotificationCenter.default
.addObserver(self, selector: #selector(willResignActive), name: .UIApplicationWillResignActive, object: nil)
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
deinit {
print("deinit VoicesController")
AudioKit.stop()
NotificationCenter.default
.removeObserver(self)
}
}
Hello, my English is not good, I am a Chinese, I really like AudioKit, because it is convenient, but in the recent system update, I encountered a crashes, but not always crashes, it seems that only collapse once, After that no crashes, I made a screenshot, because the system does not always update, hoping to get help,thank you very much,I am a novice, please advise
这是截图
Thread 1: Fatal error: AudioKit: Could not start engine. error: Error Domain=com.apple.coreaudio.avfaudio Code=-10875 "(null)" UserInfo={failed call=err = AUGraphParser::InitializeActiveNodesInOutputChain(ThisGraph, kOutputChainOptimizedTraversal, *GetOutputNode(), isOutputChainActive)}.
In the latest develop branch (not yet released), you have to wrap AudioKit.start() in a do try catch block
do {
try AudioKit.start()
} catch {
AKLog("Something went wrong.")
}
All of the examples included with AudioKit show how to implement this change.
Related
The problem I have is if the camera is recording, when the app has either entered the bg or is about to enter the bg, I stop recording but the outputFileURL is never saved. I always get an error of "Recording Stopped". I individually tried to stop the recording using all 3 of the Notification methods below but nada.
let movieFileOutput = AVCaptureMovieFileOutput()
#objc func appWillEnterBackground() { // UIApplication.willResignActiveNotification triggers this
if movieFileOutput.isRecording {
movieFileOutput.stopRecording()
}
}
#objc func didEnterBackground() { // UIApplication.didEnterBackgroundNotification triggers this
if movieFileOutput.isRecording {
movieFileOutput.stopRecording()
}
}
#objc func sessionWasInterrupted(notification: NSNotification) { // .AVCaptureSessionWasInterrupted triggers this
// ...
let reason = AVCaptureSession.InterruptionReason(rawValue: reasonIntegerValue) {
switch reason {
case .videoDeviceNotAvailableInBackground:
DispatchQueue.main.async { [weak self] in
if self!.movieFileOutput.isRecording {
self!.movieFileOutput.stopRecording()
}
}
}
}
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
if error == nil {
let asset = AVAsset(url: outputFileURL)
self.arrayVideos.append(asset)
} else {
print(error!.localizedDescription) // prints "Recording Stopped"
}
}
Just to be clear, I do not want to record while in the background. I want to get the outputFileURL after movieFileOutput.stopRecording() is triggered while the app is either on its way to the bg or has entered the bg.
This is too long for a comment, so I'll post it here.
I tested this on iOS 13.5.1, it seems to stop recording automatically when App goes to background and the video is saved.
The following is the code I used to test:
import UIKit
import AVKit
class ViewController: UIViewController {
#IBOutlet weak var contentView: UIView!
let captureSession = AVCaptureSession()
var movieFileOutput: AVCaptureMovieFileOutput?
var captureDevice : AVCaptureDevice?
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
startCapturing()
}
func startCapturing() {
captureSession.sessionPreset = .hd1280x720
let discoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: .video, position: .back)
guard let captureDevice = discoverySession.devices.first else {
print("Failed to discovert session")
return
}
guard let input = try? AVCaptureDeviceInput(device: captureDevice) else {
print("Failed to create capture device input")
return
}
captureSession.addInput(input)
let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = contentView.bounds
contentView.layer.addSublayer(previewLayer)
captureSession.startRunning()
}
func startRecording() {
if self.movieFileOutput == nil {
let movieFileOutput = AVCaptureMovieFileOutput()
if let connection = movieFileOutput.connection(with: .video) {
movieFileOutput.setOutputSettings([AVVideoCodecKey:AVVideoCodecType.h264], for: connection)
}
captureSession.addOutput(movieFileOutput)
if let directory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first {
let outputUrl = directory.appendingPathComponent("out.mp4")
movieFileOutput.startRecording(to: outputUrl, recordingDelegate: self)
}
self.movieFileOutput = movieFileOutput
}
}
func stopRecording() {
if let movieFileOutput = self.movieFileOutput {
movieFileOutput.stopRecording()
captureSession.removeOutput(movieFileOutput)
self.movieFileOutput = nil
}
}
#IBAction func onStartClick(_ sender: Any) {
startRecording()
}
#IBAction func onStopClick(_ sender: Any) {
stopRecording()
}
}
extension ViewController: AVCaptureFileOutputRecordingDelegate {
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
print("File saved to: \(outputFileURL), error: \(error)")
}
}
I do not know if is this possible, but I have a problem achieving this.
I have UIView (VideoView) who has AVPlayerLayer, and plays some video from assets and has its own audio.
Over this view, have another (smaller) UIView (CameraView) who have:
AVCaptureVideoPreviewLayer (show front camera)
AVCaptureSession
AVCaptureDevice
AVAudioSession
to show a preview of the user front camera and record audio & video from self.
And run at the same time. Similar to FaceTime Call.
AVAudioSession in CameraView is set up something like this:
let audioSession = AVAudioSession.sharedInstance()
do {
if #available(iOS 11.0, *) {
try audioSession.setCategory(.playAndRecord, mode: .spokenAudio, policy: AVAudioSession.RouteSharingPolicy.default, options: [.duckOthers])
} else {
try audioSession.setCategory(.playAndRecord, mode: .spokenAudio)
}
try audioSession.setActive(true)
} catch let error {
print("Audio setup error", #file, #function, #line, error.localizedDescription)
}
But still, I have some kind of echo in the recorded video (recorded by CameraView) from playing video (VideoView).
I already try to save audio separately but the problem is the same.
This is my first AV* app, and I do not have so much experience with audio/video at all.
Could someone point me where is my errors or point in the right direction...
VideView.swift
protocol VideoViewDelegate {
func didEndVideo()
func didEndVideo(of instance: VideoView)
func isVideoReady()
}
class VideoView: UIView {
var playerLayer: AVPlayerLayer?
var player: AVPlayer?
var isLoop: Bool = false
var delegate: VideoViewDelegate!
var observer: NSKeyValueObservation?
override init(frame: CGRect) {
super.init(frame: frame)
backgroundColor = .black
}
required init?(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
}
func ply(for url: URL) {
player = AVPlayer(url: url)
playerLayer = AVPlayerLayer(player: player)
playerLayer?.frame = bounds
playerLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
if let playerLayer = self.playerLayer {
layer.addSublayer(playerLayer)
}
NotificationCenter.default.addObserver(self, selector: #selector(reachTheEndOfTheVideo), name: NSNotification.Name.AVPlayerItemDidPlayToEndTime, object: self.player?.currentItem)
}
func prepare(url: URL) {
player?.replaceCurrentItem(with: AVPlayerItem(url: url))
NotificationCenter.default.addObserver(self, selector: #selector(reachTheEndOfTheVideo), name: NSNotification.Name.AVPlayerItemDidPlayToEndTime, object: self.player?.currentItem)
}
func destroyMe() {
stop()
NotificationCenter.default.removeObserver(self)
observer?.invalidate()
}
func play() {
if player?.timeControlStatus != AVPlayer.TimeControlStatus.playing {
player?.play()
}
}
func pause() {
player?.pause()
}
func stop() {
player?.pause()
player?.seek(to: CMTime.zero)
}
#objc
func reachTheEndOfTheVideo(_ notification: Notification) {
if isLoop {
player?.pause()
player?.seek(to: CMTime.zero)
player?.play()
} else {
player?.pause()
player?.seek(to: CMTime.zero)
UIView.animate(withDuration: 0.4) { [weak self] in
self!.transform = .identity
}
if delegate != nil { delegate.didEndVideo() }
if delegate != nil { delegate.didEndVideo(of: self) }
}
}
}
and,
CameraView.swift
final class CameraView: UIView {
private lazy var videoDataOutput: AVCaptureVideoDataOutput = {
let v = AVCaptureVideoDataOutput()
v.alwaysDiscardsLateVideoFrames = true
v.setSampleBufferDelegate(self, queue: videoDataOutputQueue)
v.connection(with: .video)?.isEnabled = true
return v
}()
private lazy var session: AVCaptureSession = {
let s = AVCaptureSession()
s.sessionPreset = .hd1280x720 //.cif352x288 //.vga640x480
s.automaticallyConfiguresApplicationAudioSession = false
return s
}()
private lazy var previewLayer: AVCaptureVideoPreviewLayer = {
let l = AVCaptureVideoPreviewLayer(session: session)
l.videoGravity = .resizeAspect
return l
}()
private let videoDataOutputQueue: DispatchQueue = DispatchQueue(label: "\(Bundle.main.bundleIdentifier ?? "").videoDataOutputQueue")
private let captureDevice: AVCaptureDevice? = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front)
private let audioCaptureDevice: AVCaptureDevice? = AVCaptureDevice.default(for: .audio)
private var audioSession: AVAudioSession!
var movieOutput = AVCaptureMovieFileOutput()
override init(frame: CGRect) {
super.init(frame: frame)
commonInit()
}
required init?(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
commonInit()
}
private func commonInit() {
contentMode = .scaleAspectFit
beginSession()
}
private func beginSession() {
do {
guard let captureDevice = captureDevice else { return }
guard let audioCaptureDevice = audioCaptureDevice else { return }
let deviceInput = try AVCaptureDeviceInput(device: captureDevice)
if session.canAddInput(deviceInput) {
session.addInput(deviceInput)
}
let aDeviceInput = try AVCaptureDeviceInput(device: audioCaptureDevice)
audioSession = AVAudioSession.sharedInstance()
do {
if #available(iOS 11.0, *) {
try audioSession.setCategory(.playAndRecord, mode: .spokenAudio, policy: AVAudioSession.RouteSharingPolicy.default, options: [.duckOthers])
} else {
try audioSession.setCategory(.playAndRecord, mode: .spokenAudio)
}
try audioSession.setActive(true)
} catch let error {
print("Audio setup error", #file, #function, #line, error.localizedDescription)
}
if session.canAddInput(aDeviceInput) {
session.addInput(aDeviceInput)
}
if session.canAddOutput(videoDataOutput) {
session.addOutput(videoDataOutput)
}
layer.masksToBounds = true
layer.addSublayer(previewLayer)
previewLayer.frame = bounds
session.addOutput(movieOutput)
session.startRunning()
startRecording()
} catch let error {
debugPrint("\(self.self): \(#function) line: \(#line). \(error.localizedDescription)")
}
}
func startRecording() {
// FileSys is class that find current working directory
let exportURL = FileSys.getCurrentWorkingDir().appendingPathComponent("Feed.mov")
var videoConnection: AVCaptureConnection?
for connection in self.movieOutput.connections {
for port in connection.inputPorts {
if port.mediaType == AVMediaType.video {
videoConnection = connection
if videoConnection!.isVideoMirroringSupported {
videoConnection!.isVideoMirrored = true
}
}
}
}
videoConnection?.videoOrientation = .portrait
movieOutput.startRecording(to: exportURL, recordingDelegate: self)
}
override func layoutSubviews() {
super.layoutSubviews()
previewLayer.frame = bounds
}
}
extension CameraView: AVCaptureFileOutputRecordingDelegate {
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
print("FILE OUTPUT:", #function, error.debugDescription)
print(outputFileURL.path)
}
}
extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate {}
Thanks
I want to make an ear training app, so I want to make a sound while the microphone analyzes the frequency. I'm at the beginning to prove the concept, so for now, I just took AudioKit's sample app MicrophoneAnalisys and added some codes to make a sound.
import AudioKit
import AudioKitUI
import UIKit
class ViewController: UIViewController {
var oscillator1 = AKOscillator()
var oscillator2 = AKOscillator()
var mixer = AKMixer()
required init?(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
mixer = AKMixer(oscillator1, oscillator2)
mixer.volume = 0.5
AudioKit.output = mixer
do {
try AudioKit.start()
} catch {
AKLog("AudioKit did not start!")
}
}
var mic: AKMicrophone!
var tracker: AKFrequencyTracker!
var silence: AKBooster!
override func viewDidLoad() {
super.viewDidLoad()
AKSettings.audioInputEnabled = true
mic = AKMicrophone()
tracker = AKFrequencyTracker(mic)
silence = AKBooster(tracker, gain: 0)
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
AudioKit.output = silence
do {
try AudioKit.start()
} catch {
AKLog("AudioKit did not start!")
}
setupPlot()
Timer.scheduledTimer(timeInterval: 0.1,
target: self,
selector: #selector(ViewController.updateUI),
userInfo: nil,
repeats: true)
}
#objc func updateUI() {
if tracker.amplitude > 0.1 {
frequencyLabel.text = String(format: "%0.1f", tracker.frequency)
var frequency = Float(tracker.frequency)
while frequency > Float(noteFrequencies[noteFrequencies.count - 1]) {
frequency /= 2.0
}
while frequency < Float(noteFrequencies[0]) {
frequency *= 2.0
}
var minDistance: Float = 10_000.0
var index = 0
for i in 0..<noteFrequencies.count {
let distance = fabsf(Float(noteFrequencies[i]) - frequency)
if distance < minDistance {
index = i
minDistance = distance
}
}
let octave = Int(log2f(Float(tracker.frequency) / frequency))
noteNameWithSharpsLabel.text = "\(noteNamesWithSharps[index])\
(octave)"
noteNameWithFlatsLabel.text = "\(noteNamesWithFlats[index])\(octave)"
}
amplitudeLabel.text = String(format: "%0.2f", tracker.amplitude)
}
#IBAction func didTapASound(_ sender: Any) {
print("didTapASound")
mixer = AKMixer(oscillator1, oscillator2)
// Cut the volume in half since we have two oscillators
mixer.volume = 0.5
AudioKit.output = mixer
do {
try AudioKit.start()
} catch {
AKLog("AudioKit did not start!")
}
if oscillator1.isPlaying {
oscillator1.stop()
oscillator2.stop()
} else {
oscillator1.frequency = random(in: 220 ... 880)
oscillator1.start()
oscillator2.frequency = random(in: 220 ... 880)
// oscillator2.start()
}
}
I know I'm definitely doing wrong by trying to run
AudioKit.output = mixer
do {
try AudioKit.start()
} catch {
AKLog("AudioKit did not start!")
}
and
AudioKit.output = mixer
do {
try AudioKit.start()
} catch {
AKLog("AudioKit did not start!")
}
simultaneously.
I get the following errors.
[avae] AVAEInternal.h:70:_AVAE_Check: required condition is false: [AVAudioIONodeImpl.mm:910:SetOutputFormat: (IsFormatSampleRateAndChannelCountValid(hwFormat))]
*** Terminating app due to uncaught exception 'com.apple.coreaudio.avfaudio', reason: 'required condition is false: IsFormatSampleRateAndChannelCountValid(hwFormat)'
I read this article but couldn't understand whether the person asking the question was able to solve the problem. Can someone tell me if I can make a sound while putting the mic on with AudioKit and point me to the right direction where I can learn how I can do it? Thanks!
I'm answering my own question. Of course, it can make a sound and listen to a microphone simultaneously. All I had to do was to put them together with AKMixer.
var oscillator = AKOscillator()
var mic: AKMicrophone!
var tracker: AKFrequencyTracker!
var silence: AKBooster!
override func viewDidLoad() {
super.viewDidLoad()
AKSettings.audioInputEnabled = true
mic = AKMicrophone()
tracker = AKFrequencyTracker(mic)
silence = AKBooster(tracker, gain: 0)
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
AudioKit.output = AKMixer(silence,oscillator)
//AudioKit.output = silence
do {
try AudioKit.start()
} catch {
AKLog("AudioKit did not start!")
}
setupPlot()
Timer.scheduledTimer(timeInterval: 0.1,
target: self,
selector: #selector(ViewController.updateUI),
userInfo: nil,
repeats: true)
}
#IBAction func makeASound(_ sender: Any) {
if oscillator.isPlaying {
print("Stop A Sound")
oscillator.stop()
} else {
print("Make A Sound")
oscillator.amplitude = 1.0
oscillator.frequency = 440.0
oscillator.start()
}
}
Just in case someone has the same question..
I am working with an AVAudioUnitSampler that is attached to an AVAudioEngine within my app. I've gotten everything to work well except when I segue out of the view controller that the sampler is instantiated in. I get a crash with an error that says:
Terminating app due to uncaught exception 'com.apple.coreaudio.avfaudio', reason: 'required condition is false: outputNode'
I'm guessing that this is because the engine is getting interrupted somehow when I segue back to my previous VC, not sure!
I've tried to stop the engine and also the sequencer inside of viewWillDisappear, but it still crashes.
If I use a UIButton show segue to the previous VC,it sort of works, but I get a crash with an unwind segue and with the current navigation bar segue.
I'm a newbie, so hopefully I've explained this well enough!
I am getting to this VC from a segue triggered from a table view on the previous VC.
Here is the code for the VC in question:
import UIKit
import AVFoundation
class PlayerViewController: UIViewController {
#IBOutlet weak var descriptionLabel: UILabel!
#IBOutlet weak var playPauseButton: UIButton!
#IBOutlet weak var musicView: UIImageView!
let allSounds = SoundBankOnAndOff()
var currentSoundFile: OnandOff?
var engine = AVAudioEngine()
var sampler = AVAudioUnitSampler()
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(true)
self.navigationController?.isNavigationBarHidden = false
setupSequencer()
}
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(true)
engine.stop()
sequencer.stop()
}
override func viewDidLoad() {
super.viewDidLoad()
descriptionLabel.text = exercises[myIndex]
musicView.image = musicNotes[myIndex]
engine = AVAudioEngine()
sampler = AVAudioUnitSampler()
engine.attach(sampler)
engine.connect(sampler, to: engine.mainMixerNode, format: nil)
loadSF2PresetIntoSampler(preset: 0)
startEngine()
setSessionPlayback()
}
func setSessionPlayback() {
let audioSession = AVAudioSession.sharedInstance()
do {
try
audioSession.setCategory(AVAudioSessionCategoryPlayback, with:
AVAudioSessionCategoryOptions.mixWithOthers)
} catch {
print("couldn't set category \(error)")
return
}
do {
try audioSession.setActive(true)
print("category is active")
} catch {
print("couldn't set category to active \(error)")
return
}
}
func startEngine() {
if engine.isRunning {
print("audio engine has already started")
return
}
do {
try engine.start()
print("audio engine started")
} catch {
print("oops \(error)")
print("could not start audio engine")
}
}
var sequencer:AVAudioSequencer!
func setupSequencer() {
let allSounds = SoundBankOnAndOff()
let currentSoundFile = allSounds.list[myIndex]
self.sequencer = AVAudioSequencer(audioEngine: engine)
let options = AVMusicSequenceLoadOptions.smfChannelsToTracks
if let fileURL = Bundle.main.urls(forResourcesWithExtension: "mid", subdirectory: "On & Off MIDI") {
do {
try sequencer.load(from: (currentSoundFile.soundFile), options: options)
print("loaded \(fileURL)")
} catch {
print("something messed up \(error)")
return
}
}
sequencer.prepareToPlay()
}
func play() {
if sequencer.isPlaying {
stop()
}
sequencer.currentPositionInBeats = TimeInterval(0)
do {
try sequencer.start()
} catch {
print("cannot start \(error)")
}
}
func stop() {
sequencer.stop()
}
func loadSF2PresetIntoSampler(preset:UInt8) {
guard let bankURL = Bundle.main.url(forResource: "Pad Sounds", withExtension: "sf2") else {
print("could not load sound font")
return
}
do {
try sampler.loadSoundBankInstrument(at: bankURL, program: preset,bankMSB: UInt8(kAUSampler_DefaultMelodicBankMSB),bankLSB: UInt8(kAUSampler_DefaultBankLSB))
} catch {
print("error loading sound bank instrument")
}
}
#IBAction func playButtonPressed(sender: UIButton) {
if sender.currentTitle == "PLAY"{
play()
sender.setTitle("STOP", for: .normal)
} else if sender.currentTitle == "STOP" {
sender.setTitle("PLAY", for: .normal)
stop()
}
}
#IBAction func BackButtonPressed(_ sender: Any) {
performSegue(withIdentifier: "unwindToDetailVC", sender: self)
}
}
This code is similar to yours and doesn't crash when segueing or unwinding:
class MP3PlayerVC: UIViewController {
let audioEngine = AVAudioEngine()
let audioPlayer = AVAudioPlayerNode()
let distortion = AVAudioUnitDistortion()
override func viewDidLoad() {
super.viewDidLoad()
audioEngine.attach(audioPlayer)
audioEngine.attach(distortion)
audioEngine.connect(audioPlayer, to: distortion, format: nil)
audioEngine.connect(distortion, to: audioEngine.outputNode, format: nil)
}
#IBAction func buttonClicked(_ sender: UIButton) {
if let filePath = Bundle.main.path(forResource: "test", ofType: "mp3") {
let url = URL.init(fileURLWithPath: filePath)
if let audioFile = try? AVAudioFile(forReading: url) {
if audioPlayer.isPlaying {
audioPlayer.stop()
audioEngine.stop()
} else {
audioPlayer.scheduleFile(audioFile, at: nil, completionHandler: nil)
do {
try audioEngine.start()
audioPlayer.play()
} catch {
print("something went wrong")
}
}
}
}
}
}
I made a custom class that handles audio recording/playback and put a Protocol in that class. I implemented the Protocol in a UIViewController class and called my setDelegate method for my AudioHelper class.
I am getting a compile error that has to do with my init(). Not exactly sure how to get rid of the error:
use of 'self' in method call 'setupAudioSession' before super.init initializes self
override init() {
setupAudioSession()
super.init()
}
How do I resolve this error? And why do I have to override init()?
My AudioHelper class
import Foundation
import AVFoundation
class AudioHelper: NSObject, AVAudioRecorderDelegate {
var audioSession: AVAudioSession?
var audioRecorder: AVAudioRecorder?
var delegate: AudioRecorderProtocol?
class var sharedInstance: AudioHelper {
struct Static {
static var instance: AudioHelper?
static var token: dispatch_once_t = 0
}
dispatch_once(&Static.token) {
Static.instance = AudioHelper()
}
return Static.instance!
}
override init() {
setupAudioSession()
super.init()
}
func setDelegate(delegate: AudioRecorderProtocol) {
self.delegate = delegate
}
func setupAudioSession() {
audioSession = AVAudioSession.sharedInstance()
audioSession?.setCategory(AVAudioSessionCategoryPlayAndRecord, error: nil)
audioSession?.setActive(true, error: nil)
}
func createAudioMessageDirectory() {
let fm = NSFileManager.defaultManager()
if !fm.fileExistsAtPath(GlobalVars.kAudioMessageDirectory) {
var error: NSError?
if !fm.createDirectoryAtPath(GlobalVars.kAudioMessageDirectory, withIntermediateDirectories: true, attributes: nil, error: &error) {
println("Unable to create audio message directory: \(error)")
}
}
}
// MARK: Recording
func beginRecordingAudio() {
createAudioMessageDirectory()
var filepath = GlobalVars.kAudioMessageDirectory.stringByAppendingPathComponent("audiofile.aac")
var url = NSURL(fileURLWithPath: filepath)
var recordSettings = [
AVFormatIDKey: kAudioFormatMPEG4AAC,
AVSampleRateKey: 8000.0,
AVNumberOfChannelsKey: 1,
AVEncoderBitRateKey: 12800,
AVLinearPCMBitDepthKey: 16,
AVEncoderAudioQualityKey: AVAudioQuality.Max.rawValue
]
println("Recorded Audio Message Saved: \(url!)")
var error: NSError?
audioRecorder = AVAudioRecorder(URL: url, settings: recordSettings as [NSObject : AnyObject], error: &error)
if error == nil {
if audioRecorder != nil {
audioRecorder!.delegate = self
audioRecorder!.record()
}
}
else {
println(error!.localizedDescription)
}
}
func stopRecordingAudio() {
if audioRecorder != nil {
audioRecorder!.stop()
}
}
func handleRecordAudioButtonLongPressGestureForState(state: UIGestureRecognizerState) {
if state == UIGestureRecognizerState.Ended {
stopRecordingAudio()
delegate?.onRecordAudioStop()
}
else if state == UIGestureRecognizerState.Began {
beginRecordingAudio()
delegate?.onRecordAudioStop()
}
}
func audioRecorderDidFinishRecording(recorder: AVAudioRecorder!, successfully flag: Bool) {
println("Record Audio Success: \(flag)")
delegate?.onRecordAudioFinished()
}
func audioRecorderEncodeErrorDidOccur(recorder: AVAudioRecorder!, error: NSError!) {
println("Record Audio Encode Error: \(error.localizedDescription)")
}
// MARK: Playback
func playAudioMessageFromUrl(messageId: String) {
if let url = NSURL(string: GlobalVars.kUrlAudioMessage + messageId) {
if let data = NSData(contentsOfURL: url) {
var error: NSError? = nil
let audioPlayer = AVAudioPlayer(data: data, error: &error)
if error == nil {
if audioPlayer != nil {
audioPlayer.numberOfLoops = 0
audioPlayer.volume = 1.0
audioPlayer.prepareToPlay()
audioPlayer.play()
}
}
else {
println("Audio playback error: \(error?.localizedDescription)")
}
}
}
}
}
protocol AudioRecorderProtocol {
func onRecordAudioStart()
func onRecordAudioStop()
func onRecordAudioFinished()
}
My UIViewController that implements the protocol (cut out extraneous code)
class ChatViewController: UIViewController, UITableViewDelegate, UITableViewDataSource, AudioRecorderProtocol {
let audioHelper = AudioHelper.sharedInstance
let appDelegate = UIApplication.sharedApplication().delegate as! AppDelegate
override func viewDidLoad() {
super.viewDidLoad()
// addDemoMessages()
setupGestureRecognizer()
setupKeyboardObserver()
setupViews()
setupTableView()
audioHelper.setDelegate(self)
}
override func viewWillAppear(animated: Bool) {
super.viewWillAppear(animated)
getUsersFromDb()
getMessagesFromDb()
}
override func viewDidAppear(animated: Bool) {
super.viewDidAppear(animated)
setCurrentVC()
tableView.reloadData()
if partnerUserId != nil && !db.doesUserExist(partnerUserId!) {
HttpPostHelper.profileGet(userId: partnerUserId!)
}
requestMessagesFromServer()
}
override func viewDidLayoutSubviews() {
super.viewDidLayoutSubviews()
ViewHelper.scrollTableViewToBottom(tableView)
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
func handleRecordAudioButtonHold(sender: UILongPressGestureRecognizer) {
audioHelper.handleRecordAudioButtonLongPressGestureForState(sender.state)
}
func onRecordAudioStart() {
dispatch_async(dispatch_get_main_queue(), {
ViewHelper.showToast(NSLocalizedString("RECORDING", comment: ""))
self.recordAudioButton.imageView!.image = UIImage(named: "RecordAudioClicked")
})
}
func onRecordAudioStop() {
dispatch_async(dispatch_get_main_queue(), {
self.recordAudioButton.imageView!.image = UIImage(named: "RecordAudio")
})
}
func onRecordAudioFinished() {
HttpPostHelper.messageAudio(partnerUserId: partnerUserId)
}
func playAudioFromUrl(sender: UIButton) {
let messageId = messages[sender.tag].id
audioHelper.playAudioMessageFromUrl(messageId)
}
}
Just place it under super.init().
The object needs to be initialized first by the super class and then you can do your custom initialization.
override init() {
super.init()
setupAudioSession()
}
If you are working with the MVVM pattern, and you really need to call to some function before the super.init, you can always move that function to the ViewModel and call it from there. Just inject the viewModel as an injected dependency.