I am working on an audio streaming application with recording functionality for a receiver.
I got stuck at the point where the user want to record audio stream on the receiver side.
Below is my code
Initialisation
var engine = AVAudioEngine()
var recordingFile: AVAudioFile?
var audioPlayer: AVAudioPlayer?
let player = AVAudioPlayerNode()
var isRecording: Bool = false
Initialise AudioEngine
func initializeAudioEngine() {
let input = self.engine.inputNode
let format = input.inputFormat(forBus: 0)
self.engine.attach(self.player)
let mainMixerNode = self.engine.mainMixerNode
self.engine.connect(input, to:mainMixerNode, format: format)
self.engine.prepare()
do {
try self.engine.start()
self.startRecording()
} catch (let error) {
print("START FAILED", error)
}
}
Start Recording
func startRecording() {
self.createRecordingFile()
self.engine.mainMixerNode.installTap(onBus: 0,
bufferSize: 1024,
format: self.engine.mainMixerNode.outputFormat(forBus: 0)) { (buffer, time) -> Void in
do {
self.isRecording = true
try self.recordingFile?.write(from: buffer)
} catch (let error) {
print("RECORD ERROR", error);
}
return
}
}
Create Buffer
private func createBuffer(forFileNamed fileName: String) -> AVAudioPCMBuffer? {
var res: AVAudioPCMBuffer?
if let fileURL = Bundle.main.url(forResource: fileName, withExtension: "caf") {
do {
let file = try AVAudioFile(forReading: fileURL)
res = AVAudioPCMBuffer(pcmFormat: file.processingFormat, frameCapacity:AVAudioFrameCount(file.length))
if let _ = res {
do {
try file.read(into: res!)
} catch (let error) {
print("ERROR read file", error)
}
}
} catch (let error) {
print("ERROR file creation", error)
}
}
return res
}
Stop Recording
func stopRecording() {
self.engine.mainMixerNode.removeTap(onBus: 0)
}
I am trying to record using earphone, but It's not working
Its will work because once you setup
let audiosession = AVAudioSession()
As AVAudioSessionCategoryPlayAndRecord and set
audiosession.setActive(true)
It will start recording whichever audio dump to device.
WebRTC does not have any Internal API to start or stop recording.
We can try using AVAudioSession instead.
First setUp Audio session
func setUPAudioSession() -> Bool {
let audiosession = AVAudioSession()
do {
try audiosession.setCategory(AVAudioSessionCategoryPlayAndRecord)
} catch(let error) {
print("--> \(error.localizedDescription)")
}
do {
try audiosession.setActive(true)
} catch (let error) {
print("--> \(error.localizedDescription)")
}
return audiosession.isInputAvailable;
}
After setUp the audio session now start recording as below
func startRecording() -> Bool {
var settings: [String: Any] = [String: String]()
settings[AVFormatIDKey] = kAudioFormatLinearPCM
settings[AVSampleRateKey] = 8000.0
settings[AVNumberOfChannelsKey] = 1
settings[AVLinearPCMBitDepthKey] = 16
settings[AVLinearPCMIsBigEndianKey] = false
settings[AVLinearPCMIsFloatKey] = false
settings[AVAudioQualityMax] = AVEncoderAudioQualityKey
//Create device directory where recorded file will be save automatically
let searchPaths: [String] = NSSearchPathForDirectoriesInDomains(.documentDirectory, .allDomainsMask, true)
let documentPath_ = searchPaths.first
let pathToSave = "\(documentPath_)/\(dateString)"
let url: URL = URL(pathToSave)
recorder = try? AVAudioRecorder(url: url, settings: settings)
// Initialize degate, metering, etc.
recorder.delegate = self;
recorder.meteringEnabled = true;
recorder?.prepareToRecord()
if let recordIs = recorder {
return recordIs.record()
}
return false
}
Play recorded file
func playrecodingFile() {
//Get the path of recorded file saved in previous method
let searchPaths: [String] = NSSearchPathForDirectoriesInDomains(.documentDirectory, .allDomainsMask, true)
let documentPath_ = searchPaths.first
let fileManager = FileManager.default
let arrayListOfRecordSound: [String]
if fileManager.fileExists(atPath: recordingFolder()) {
let arrayListOfRecordSound = try? fileManager.contentsOfDirectory(atPath: documentPath_)
}
let selectedSound = "\(documentPath_)/\(arrayListOfRecordSound.first)"
let url = URL.init(fileURLWithPath: selectedSound)
let player = try? AVAudioPlayer(contentsOf: url)
player?.delegate = self;
try? AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayback)
player?.prepareToPlay()
player?.play()
}
Stop recording
func stopRecording() {
recorder?.stop()
}
pauseRecording
func pauseRecording() {
recorder?.pause()
}
Stop recording
func stopRecording() {
recorder?.stop()
}
Related
I'm working on a project that needs to record video segments and then merge these segments in one video. I'm using AVFoundation framework to do so.
The problem is that when I merge the segments, there's always a black frame or no sound between the chunks. Also, sometimes the sound is not synchronised after merging.
I've tried many options to solve this problem but didn't find a good solution.
I've tried many solutions on Stack overflow but none of them worked.
I've also tried to use MKOVideoMerge but I still have the problem.
I've made a small View controller below that records video camera and create a new segment every 10 seconds. When the user taps on "stop", all the segments are merged and saved to camera roll :
If anyone have managed to merge two video segments without dropping frame or sound, help would be much appreciated :)
import UIKit
import AVFoundation
import Photos
class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureAudioDataOutputSampleBufferDelegate {
private enum SessionSetupResult {
case success
case notAuthorized
case configurationFailed
}
#IBOutlet weak var btnStartStop: UIButton!
#IBOutlet weak var previewView: PreviewView!
private let session = AVCaptureSession()
private let sessionQueue = DispatchQueue(label: "session queue")
private let mergeQueue = DispatchQueue(label: "merge queue")
private var setupResult: SessionSetupResult = .success
private var videoDeviceInput: AVCaptureDeviceInput!
lazy private var movieBufferOutput = AVCaptureVideoDataOutput()
lazy private var audioBufferOutput = AVCaptureAudioDataOutput()
private var movieConnection: AVCaptureConnection!
private var audioConnection: AVCaptureConnection!
private var assetWriter: AVAssetWriter! = nil
private var assetWriterInput: AVAssetWriterInput! = nil
private var audioWriterInput: AVAssetWriterInput! = nil
private var chunkNumber = 0
private let chunkMaxDuration = 10.0
private var chunkStartTime: CMTime! = nil
private var chunkOutputURL: URL! = nil
private var stopRecording: Bool = false
#IBAction func startStop(_ sender: Any) {
stopRecording = true
btnStartStop.setTitle("recording", for: .normal)
}
override func viewDidLoad() {
super.viewDidLoad()
previewView.session = session
previewView.videoPreviewLayer.videoGravity = .resizeAspectFill
switch AVCaptureDevice.authorizationStatus(for: .video) {
case .authorized:
break
case .notDetermined:
sessionQueue.suspend()
AVCaptureDevice.requestAccess(for: .video, completionHandler: { granted in
if !granted {
self.setupResult = .notAuthorized
}
self.sessionQueue.resume()
})
default:
setupResult = .notAuthorized
}
sessionQueue.async {
self.configureSession()
}
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
self.cleanTempDirectory()
sessionQueue.async {
switch self.setupResult {
case .success:
break
case .notAuthorized:
DispatchQueue.main.async {
let changePrivacySetting = "Not authorized"
let message = NSLocalizedString(changePrivacySetting, comment: "Alert message when the user has denied access to the camera")
let alertController = UIAlertController(title: "Not authorized", message: message, preferredStyle: .alert)
alertController.addAction(UIAlertAction(title: NSLocalizedString("OK", comment: "Alert OK button"),
style: .cancel,
handler: nil))
alertController.addAction(UIAlertAction(title: NSLocalizedString("Settings", comment: "Alert button to open Settings"),
style: .`default`,
handler: { _ in
UIApplication.shared.open(URL(string: UIApplicationOpenSettingsURLString)!, options: [:], completionHandler: nil)
}))
self.present(alertController, animated: true, completion: nil)
}
case .configurationFailed:
DispatchQueue.main.async {
let alertMsg = "Error"
let message = NSLocalizedString("Error", comment: alertMsg)
let alertController = UIAlertController(title: "Error", message: message, preferredStyle: .alert)
alertController.addAction(UIAlertAction(title: NSLocalizedString("OK", comment: "Alert OK button"),
style: .cancel,
handler: nil))
self.present(alertController, animated: true, completion: nil)
}
}
}
}
private func configureSession() {
if setupResult != .success {
return
}
session.beginConfiguration()
session.sessionPreset = .high
do {
var defaultVideoDevice: AVCaptureDevice?
if let dualCameraDevice = AVCaptureDevice.default(.builtInDualCamera, for: .video, position: .back) {
defaultVideoDevice = dualCameraDevice
} else if let backCameraDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back) {
defaultVideoDevice = backCameraDevice
} else if let frontCameraDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front) {
defaultVideoDevice = frontCameraDevice
}
let videoDeviceInput = try AVCaptureDeviceInput(device: defaultVideoDevice!)
if session.canAddInput(videoDeviceInput) {
session.addInput(videoDeviceInput)
self.videoDeviceInput = videoDeviceInput
DispatchQueue.main.async {
self.previewView.videoPreviewLayer.connection?.videoOrientation = .landscapeRight
}
} else {
setupResult = .configurationFailed
session.commitConfiguration()
return
}
} catch {
setupResult = .configurationFailed
session.commitConfiguration()
return
}
do {
let audioDevice = AVCaptureDevice.default(for: .audio)
let audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice!)
if session.canAddInput(audioDeviceInput) {
session.addInput(audioDeviceInput)
}
} catch {
}
movieBufferOutput.videoSettings = [
String(kCVPixelBufferPixelFormatTypeKey) : Int(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
]
if self.session.canAddOutput(movieBufferOutput) {
self.session.addOutput(movieBufferOutput)
if let connection = self.movieBufferOutput.connection(with: .video) {
movieConnection = connection
connection.videoOrientation = .landscapeRight
if connection.isVideoStabilizationSupported {
connection.preferredVideoStabilizationMode = .auto
}
}
} else {
setupResult = .configurationFailed
session.commitConfiguration()
return
}
if self.session.canAddOutput(audioBufferOutput) {
self.session.addOutput(audioBufferOutput)
if let connection = self.audioBufferOutput.connection(with: .audio) {
audioConnection = connection
}
} else {
print("Could not add audio output to the session")
setupResult = .configurationFailed
session.commitConfiguration()
return
}
let queue: DispatchQueue = DispatchQueue(label: "MediaOutputQueue")
let audioQueue: DispatchQueue = DispatchQueue(label: "AudioOutputQueue")
self.movieBufferOutput.setSampleBufferDelegate(self, queue: queue)
self.audioBufferOutput.setSampleBufferDelegate(self, queue: audioQueue)
self.movieBufferOutput.alwaysDiscardsLateVideoFrames = true
session.commitConfiguration()
self.session.startRunning()
}
func createWriterInput(for presentationTimeStamp: CMTime) {
self.stopRecording = false
let fileManager = FileManager.default
let outputFileName = "chunk\(chunkNumber)"
let outputFilePath = (NSTemporaryDirectory() as NSString).appendingPathComponent((outputFileName as NSString).appendingPathExtension("mp4")!)
chunkOutputURL = URL(fileURLWithPath: outputFilePath)
try? fileManager.removeItem(at: chunkOutputURL)
assetWriter = try! AVAssetWriter(outputURL: chunkOutputURL, fileType: .mp4)
let outputSettings: [String: Any] = [AVVideoCodecKey:AVVideoCodecH264, AVVideoWidthKey: 1280, AVVideoHeightKey: 720]
assetWriterInput = AVAssetWriterInput(mediaType: .video, outputSettings: outputSettings)
assetWriterInput.expectsMediaDataInRealTime = true
assetWriter.add(assetWriterInput)
let audioSettings = [
AVFormatIDKey : kAudioFormatMPEG4AAC,
AVNumberOfChannelsKey : 1,
AVSampleRateKey : 44100.0,
AVEncoderBitRateKey: 192000
] as [String : Any]
audioWriterInput = AVAssetWriterInput(mediaType: .audio, outputSettings: audioSettings)
audioWriterInput.expectsMediaDataInRealTime = true;
assetWriter.add(audioWriterInput)
chunkNumber += 1
chunkStartTime = presentationTimeStamp
assetWriter.startWriting()
assetWriter.startSession(atSourceTime: chunkStartTime)
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
override var shouldAutorotate: Bool {
return false
}
override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
return .landscapeRight
}
override var preferredInterfaceOrientationForPresentation: UIInterfaceOrientation {
return .landscapeRight
}
func cleanTempDirectory() {
let tempVideosPath = NSTemporaryDirectory()
var isDirectory = ObjCBool(true)
if !FileManager.default.fileExists(atPath: tempVideosPath, isDirectory: &isDirectory) {
return
}
let tempVideosURL = URL(fileURLWithPath: tempVideosPath)
do {
let directoryContents = try FileManager.default.contentsOfDirectory(at: tempVideosURL, includingPropertiesForKeys: [.contentModificationDateKey], options: [.skipsHiddenFiles,.skipsSubdirectoryDescendants])
let mp4Files = directoryContents.filter{ $0.pathExtension == "mp4" }.map { url in
(url, (try? url.resourceValues(forKeys: [.creationDateKey]))?.creationDate ?? Date.distantPast)
}.sorted(by: { $0.1 < $1.1 })
for mp4 in mp4Files {
try? FileManager.default.removeItem(at: mp4.0)
}
} catch {
}
}
func getTempVideos() -> [URL] {
let tempVideosPath = NSTemporaryDirectory()
var isDirectory = ObjCBool(true)
if !FileManager.default.fileExists(atPath: tempVideosPath, isDirectory: &isDirectory) {
return []
}
var videosURL: [URL] = []
let videosUrl = URL(fileURLWithPath: tempVideosPath)
do {
let directoryContents = try FileManager.default.contentsOfDirectory(at: videosUrl, includingPropertiesForKeys: [.contentModificationDateKey], options: [.skipsHiddenFiles,.skipsSubdirectoryDescendants])
let mp4Files = directoryContents.filter{ $0.pathExtension == "mp4" }.map { url in
(url, (try? url.resourceValues(forKeys: [.creationDateKey]))?.creationDate ?? Date.distantPast)
}.sorted(by: { $0.1 > $1.1 })
var i = 0
for mp4 in mp4Files {
videosURL.append(mp4.0)
i = i + 1
if i > 5 {
break
}
}
} catch {
return []
}
return videosURL
}
func getRecordedVideoURL() -> URL {
var tempVideosPath = NSTemporaryDirectory()
tempVideosPath = (tempVideosPath as NSString).appendingPathComponent("videos")
var isDirectory = ObjCBool(true)
if !FileManager.default.fileExists(atPath: tempVideosPath, isDirectory: &isDirectory) {
do {
try FileManager.default.createDirectory(at: URL(fileURLWithPath: tempVideosPath), withIntermediateDirectories: true, attributes: nil)
} catch {
}
}
let outputFileName = "test-" + NSUUID().uuidString
let outputFileURL = URL(fileURLWithPath: tempVideosPath).appendingPathComponent(outputFileName).appendingPathExtension("mp4")
try? FileManager.default.removeItem(at: outputFileURL)
return outputFileURL
}
func mergeVideos(urls:[URL], excludedUrl: URL, completion:#escaping (_ exporter: AVAssetExportSession?) -> ()) -> Void {
let mainComposition = AVMutableComposition()
let compositionVideoTrack = mainComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid)
let soundtrackTrack = mainComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid)
let assetOptions = [AVURLAssetPreferPreciseDurationAndTimingKey: true]
var insertTime = kCMTimeZero
var audioInsertTime = kCMTimeZero
var videos: [(asset: AVURLAsset, videoTrack: AVAssetTrack, videoDuration: CMTime)] = []
for url in urls {
if url.path != excludedUrl.path {
let videoAsset = AVURLAsset(url: url, options : assetOptions)
if videoAsset.tracks(withMediaType: .video).count > 0 && videoAsset.tracks(withMediaType: .audio).count > 0 {
let videoTrack = videoAsset.tracks(withMediaType: .video)[0]
let videoDuration = videoTrack.timeRange.duration
videos.append((asset: videoAsset, videoTrack: videoTrack, videoDuration: videoDuration))
} else {
break
}
}
}
var hasError: Bool = false
for video in videos.reversed() {
let audioTrack = video.asset.tracks(withMediaType: .audio)[0]
let audioDuration = audioTrack.timeRange.duration
do {
try compositionVideoTrack?.insertTimeRange(CMTimeRangeMake(kCMTimeZero, video.videoDuration), of: video.videoTrack, at: insertTime)
} catch let error {
hasError = true
print(error)
}
do {
try soundtrackTrack?.insertTimeRange(CMTimeRangeMake(kCMTimeZero, audioDuration), of: audioTrack, at: audioInsertTime)
} catch let error {
hasError = true
print(error)
}
insertTime = CMTimeAdd(insertTime, video.videoDuration)
audioInsertTime = CMTimeAdd(audioInsertTime, audioDuration)
}
if videos.count == 0 {
hasError = true
}
if !hasError {
let outputFileURL = getRecordedVideoURL()
let exporter = AVAssetExportSession(asset: mainComposition, presetName: AVAssetExportPresetHighestQuality)
exporter?.outputURL = outputFileURL
exporter?.outputFileType = AVFileType.mp4
exporter?.shouldOptimizeForNetworkUse = true
exporter?.exportAsynchronously {
DispatchQueue.main.async {
completion(exporter!)
}
}
} else {
completion(nil)
}
}
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
if connection == self.audioConnection {
if let audioInput = self.audioWriterInput, audioInput.isReadyForMoreMediaData {
if !audioInput.append(sampleBuffer) {
print("Error writing audio buffer");
}
}
} else {
if let videoInput = self.assetWriterInput, videoInput.isReadyForMoreMediaData {
if !videoInput.append(sampleBuffer) {
print("Error writing video buffer");
}
}
}
if connection == movieConnection {
let presentationTimeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
if assetWriter == nil {
createWriterInput(for: presentationTimeStamp)
} else {
let currentChunkDuration = CMTimeGetSeconds(CMTimeSubtract(presentationTimeStamp, chunkStartTime))
if currentChunkDuration >= chunkMaxDuration || self.stopRecording == true {
let chunkAssetWriter = assetWriter!
let assetWriterInput = self.assetWriterInput
let audioWriterInput = self.audioWriterInput
let stopRecording = self.stopRecording
createWriterInput(for: presentationTimeStamp)
assetWriterInput?.markAsFinished()
audioWriterInput?.markAsFinished()
chunkAssetWriter.endSession(atSourceTime: presentationTimeStamp)
chunkAssetWriter.finishWriting {
DispatchQueue.main.async {
self.btnStartStop.setTitle("stop", for: .normal)
}
if stopRecording {
self.mergeQueue.async {
self.mergeVideos(urls: self.getTempVideos(), excludedUrl: self.chunkOutputURL!, completion: { exportSession in
if let exportSession = exportSession {
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: exportSession.outputURL!)
}) { saved, error in
DispatchQueue.main.async {
if saved {
let alertController = UIAlertController(title: "Your video was successfully saved", message: nil, preferredStyle: .alert)
let defaultAction = UIAlertAction(title: "OK", style: .default, handler: nil)
alertController.addAction(defaultAction)
self.present(alertController, animated: true, completion: nil)
}
}
}
}
})
}
}
}
}
}
}
}}
Thanks for your help :)
I've been trying to upload audio recording right after user stops recording to the Firebase. But it doesn't do anything apart from creating a new folder named "audio".
Code I'm using for starting and stopping recording
#IBAction func recordAudio(_ sender: AnyObject) {
recordingLabel.text = "Recording in progress"
stopRecordingButton.isEnabled = true
recordButton.isEnabled = false
let dirPath = NSSearchPathForDirectoriesInDomains(.documentDirectory,.userDomainMask, true)[0] as String
let recordingName = "recordedVoice.wav"
let pathArray = [dirPath, recordingName]
let filePath = URL(string: pathArray.joined(separator: "/"))
let session = AVAudioSession.sharedInstance()
try! session.setCategory(AVAudioSessionCategoryPlayAndRecord, with:AVAudioSessionCategoryOptions.defaultToSpeaker)
try! audioRecorder = AVAudioRecorder(url: filePath!, settings: [:])
audioRecorder.delegate = self
audioRecorder.isMeteringEnabled = true
audioRecorder.prepareToRecord()
audioRecorder.record()
}
#IBAction func stopRecording(_ sender: AnyObject) {
print("Stop recording button was pressed")
recordButton.isEnabled = true
stopRecordingButton.isEnabled = false
recordingLabel.text = "Tap to Record"
audioRecorder.stop()
let audioSession = AVAudioSession.sharedInstance()
try! audioSession.setActive(false)
}
code I'm using for uploading to Firebase
func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully flag: Bool) {
print("finished recording")
let storageRef = Storage.storage().reference().child("audio/recordedVoice.wav")
if let uploadData = AVFileType(self.recordedVoice.wav!) {
storageRef.put(uploadData, metadata: nil) {(metadata, error) in
if error != nil {
print(error)
return
}
}
}
}
Please help me!
Try:
let audioName = NSUUID().uuidString //You'll get unique audioFile name
let storageRef = Storage.storage().reference().child("audio").child(audioName)
let metadata = StorageMetadata()
metadata.contentType = "audio/wav"
if let uploadData = AVFileType(self.recordedVoice.wav!) {
storageRef.putData(uploadData, metadata: metadata) { (metadata, err) in
if err != nil {
//print(err)
return
}
if let _ = metadata?.downloadURL()?.absoluteString {
print("uploading done!")
}
}
}
so I have been working on this for hours and here is my answer:
func getDocumentsDirectory() -> URL {
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
return paths[0]
}
func getFileURL() -> URL {
let path = getDocumentsDirectory()
let filePath = path.appendingPathComponent(K.fileName)
return filePath
}
let referance = storage.reference()
let mediaFolder = referance.child("media")
let id = UUID().uuidString // using uuid to give uniq names to audiofiles preventing overwrite
let mediaRef = mediaFolder.child(id + K.fileName) // creating file referance using uuid + filename
let path = getFileURL() // getting filepath
do {
let data = try Data(contentsOf: path) // getting data from filepath
mediaRef.putData(data) { metadata, error in
if error != nil {
self.showAlert(title: "Error", message: error?.localizedDescription, cancelButtonTitle: "cancel", handler: nil)
} else {
mediaRef.downloadURL { url, error in
let url = url?.absoluteString
print(url)
}
}
}
print("record has come")
} catch {
print("error cant get audio file")
}
I'm relatively new at coding and still learning. So that my answer may not be the best and shortest. But This worked for me.
I'm trying to record audio, then save offline with AudioKit.renderToFile, then use AKPlayer to play the original recorded audio file.
import UIKit
import AudioKit
class ViewController: UIViewController {
private var recordUrl:URL!
private var isRecording:Bool = false
public var player:AKPlayer!
private let format = AVAudioFormat(commonFormat: .pcmFormatFloat64, sampleRate: 44100, channels: 2, interleaved: true)!
private var amplitudeTracker:AKAmplitudeTracker!
private var boostedMic:AKBooster!
private var mic:AKMicrophone!
private var micMixer:AKMixer!
private var silence:AKBooster!
public var recorder: AKNodeRecorder!
#IBOutlet weak var recordButton: UIButton!
override func viewDidLoad() {
super.viewDidLoad()
//self.recordUrl = Bundle.main.url(forResource: "sound", withExtension: "caf")
//self.startAudioPlayback(url: self.recordUrl!)
self.recordUrl = self.urlForDocument("record.caf")
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
func requestMic(completion: #escaping () -> Void) {
AVAudioSession.sharedInstance().requestRecordPermission({ (granted: Bool) in
if granted { completion()}
})
}
public func switchToMicrophone() {
stopEngine()
do {
try AKSettings.setSession(category: .playAndRecord, with: .allowBluetoothA2DP)
} catch {
AKLog("Could not set session category.")
}
mic = AKMicrophone()
micMixer = AKMixer(mic)
boostedMic = AKBooster(micMixer, gain: 5)
amplitudeTracker = AKAmplitudeTracker(boostedMic)
silence = AKBooster(amplitudeTracker, gain: 0)
AudioKit.output = silence
startEngine()
}
#IBAction func startStopRecording(_ sender: Any) {
self.isRecording = !self.isRecording
if self.isRecording {
self.startRecording()
self.recordButton.setTitle("Stop Recording", for: .normal)
} else {
self.stopRecording()
self.recordButton.setTitle("Start Recording", for: .normal)
}
}
func startRecording() {
self.requestMic() {
self.switchToMicrophone()
if let url = self.recordUrl {
do {
let audioFile = try AKAudioFile(forWriting: url, settings: self.format.settings, commonFormat: .pcmFormatFloat64, interleaved: true)
self.recorder = try AKNodeRecorder(node: self.micMixer, file: audioFile)
try self.recorder.reset()
try self.recorder.record()
} catch {
print("error setting up recording", error)
}
}
}
}
func stopRecording() {
recorder.stop()
startAudioPlayback(url: self.recordUrl)
}
#IBAction func saveToDisk(_ sender: Any) {
if let source = self.player, let saveUrl = self.urlForDocument("pitchAudio.caf") {
do {
source.stop()
let audioFile = try AKAudioFile(forWriting: saveUrl, settings: self.format.settings, commonFormat: .pcmFormatFloat64, interleaved: true)
try AudioKit.renderToFile(audioFile, duration: source.duration, prerender: {
source.play()
})
print("audio file rendered")
} catch {
print("error rendering", error)
}
// PROBLEM STARTS HERE //
self.startAudioPlayback(url: self.recordUrl)
}
}
public func startAudioPlayback(url:URL) {
print("loading playback audio", url)
self.stopEngine()
do {
try AKSettings.setSession(category: .playback)
player = AKPlayer.init()
try player.load(url: url)
}
catch {
print("error setting up audio playback", error)
return
}
player.prepare()
player.isLooping = true
self.setPitch(pitch: self.getPitch(), saveValue: false)
AudioKit.output = player
startEngine()
startPlayer()
}
public func startPlayer() {
if AudioKit.engine.isRunning { self.player.play() }
else { print("audio engine not running, can't play") }
}
public func startEngine() {
if !AudioKit.engine.isRunning {
print("starting engine")
do { try AudioKit.start() }
catch {
print("error starting audio", error)
}
}
}
public func stopEngine(){
if AudioKit.engine.isRunning {
print("stopping engine")
do {
try AudioKit.stop()
}
catch {
print("error stopping audio", error)
}
}
//playback doesn't work without this?
mic = nil
}
#IBAction func changePitch(_ sender: UISlider) {
self.setPitch(pitch:Double(sender.value))
}
public func getPitch() -> Double {
return UserDefaults.standard.double(forKey: "pitchFactor")
}
public func setPitch(pitch:Double, saveValue:Bool = true) {
player.pitch = pitch * 1000.0
if saveValue {
UserDefaults.standard.set(pitch, forKey: "pitchFactor")
UserDefaults.standard.synchronize()
}
}
func urlForDocument(_ named:String) -> URL? {
let path = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0] as String
let url = NSURL(fileURLWithPath: path)
if let pathComponent = url.appendingPathComponent(named) {
return pathComponent
}
return nil
}
}
The order of calls is switchToMicrophone, startRecording, stopRecording, startAudioPlayback, saveToDisk, and again, startAudioPlayback
Please see the github repo for full code in ViewController.swift
After the renderToFile function, when restarting AudioKit for the player, the following errors occur:
[mcmx] 338: input bus 0 sample rate is 0
[avae] AVAEInternal.h:103:_AVAE_CheckNoErr: [AVAudioEngineGraph.mm:1265:Initialize: (err = AUGraphParser::InitializeActiveNodesInOutputChain(ThisGraph, kOutputChainOptimizedTraversal, *GetOutputNode(), isOutputChainActive)): error -10875
[avae] AVAudioEngine.mm:149:-[AVAudioEngine prepare]: Engine#0x1c4008ae0: could not initialize, error = -10875
[mcmx] 338: input bus 0 sample rate is 0
[avae] AVAEInternal.h:103:_AVAE_CheckNoErr: [AVAudioEngineGraph.mm:1265:Initialize: (err = AUGraphParser::InitializeActiveNodesInOutputChain(ThisGraph, kOutputChainOptimizedTraversal, *GetOutputNode(), isOutputChainActive)): error -10875
error starting audio Error Domain=com.apple.coreaudio.avfaudio Code=-10875 "(null)" UserInfo={failed call=err = AUGraphParser::InitializeActiveNodesInOutputChain(ThisGraph, kOutputChainOptimizedTraversal, *GetOutputNode(), isOutputChainActive)} ***
This all works flawlessly if I take the recording piece out, or the offline render out, but not with both included.
It might be that the issue is with your order of execution try swapping startAudioPlayback, saveToDisk, so that it first does saveToDisk, and then reads the file back and plays it, startAudioPlayback.
EDIT: So far by playing around with it I believe I have identified the issue. Once you save the file the other tempfile which was the recording is disappearing for some reason. I think that needs to be narrowed down why is that.
Or perhaps to playaround and send the whole saveToDisk method to a background thread without interrupting the currently playing file.
In my spare time I'll try to tweak it a little more and let you know.
EDIT 2:
check this https://stackoverflow.com/a/48133092/9497657
if you get nowhere try to post your issue here:
https://github.com/audiokit/AudioKit/issues/
also check this tutorial out as well:
https://www.raywenderlich.com/145770/audiokit-tutorial-getting-started
also it might be useful to message Aurelius Prochazka as he is a developer of AudioKit who could help.
I was able to get it working by combining the recording and playback into a single pipeline:
mixer = AKMixer(mic)
boostedMic = AKBooster(mixer, gain: 5)
amplitudeTracker = AKAmplitudeTracker(boostedMic)
micBooster = AKBooster(amplitudeTracker, gain: 0)
player = AKPlayer()
try? player.load(url: self.recordUrl)
player.prepare()
player.gain = 2.0
outputMixer = AKMixer(micBooster, player)
AudioKit.output = outputMixer
This question already has answers here:
What does "Fatal error: Unexpectedly found nil while unwrapping an Optional value" mean?
(16 answers)
Closed 6 years ago.
I have downloaded a project from https://github.com/doberman/speaker-gender-detect--ios. I followed the instructions but when I run the app, I get a message that says: fatal error: unexpectedly found nil while unwrapping an Optional value. How can I fix the crash so that the app works.
The app crashes here:
let genderEqualityRatios = self.calcGenderEquality (String (response.result.value!))
This is my code:
import AVFoundation
import Alamofire
import SwiftyJSON
protocol AudioRecorderDelegate {
func audioRecorder(audioRecorder: AudioRecorder?, updatedLevel: Float)
func audioRecorder(audioRecorder: AudioRecorder?, updatedGenderEqualityRatio: (male: Float, female: Float))
}
class AudioRecorder: NSObject {
static let sharedInstance: AudioRecorder = AudioRecorder()
private let kRemoteURL: NSURL = NSURL(string: "xxx.xxx.xxx.xxx")! // change to your API endpoint URL
private let kPostAudioInterval: NSTimeInterval = 10.0 // change to post to API more/less frequently
var delegate: AudioRecorderDelegate?
private let recorderSettings = [
AVSampleRateKey: NSNumber(float: Float(16000.0)),
AVFormatIDKey: NSNumber(int: Int32(kAudioFormatMPEG4AAC)),
AVNumberOfChannelsKey: NSNumber(int: 1),
AVEncoderAudioQualityKey: NSNumber(int: Int32(AVAudioQuality.High.rawValue))
]
private var recorder: AVAudioRecorder?
private var checkLevelsTimer: NSTimer?
private var postTimer: NSTimer?
private var maleDuration: Float = 0.0
private var femaleDuration: Float = 0.0
override init() {
super.init()
do {
let audioSession: AVAudioSession = AVAudioSession.sharedInstance()
try audioSession.setCategory(AVAudioSessionCategoryRecord)
} catch let err as NSError {
print("Failed to initialize AudioRecorder: \(err)")
}
}
func startRecording() {
// print("startRecording")
if self.recorder != nil && self.recorder!.recording {
self.stopRecording()
}
let audioURL: NSURL = self.getAudioURL()
// print("got audioURL: '\(audioURL)'")
do {
self.recorder = try AVAudioRecorder(URL: audioURL, settings: self.recorderSettings)
self.recorder?.meteringEnabled = true
self.recorder?.prepareToRecord()
} catch let err as NSError {
print("Failed to set up AVAudioRecorder instance: \(err)")
}
guard self.recorder != nil else { return }
self.recorder?.record()
let audioSession = AVAudioSession.sharedInstance()
do {
try audioSession.setActive(true)
self.checkLevelsTimer = NSTimer.scheduledTimerWithTimeInterval(0.05, target: self, selector: "checkLevels", userInfo: nil, repeats: true)
self.postTimer = NSTimer.scheduledTimerWithTimeInterval(kPostAudioInterval, target: self, selector: "onPostTimerTrigger", userInfo: nil, repeats: true)
} catch let err as NSError {
print("Failed to activate audio session (or failed to set up checkLevels timer): \(err)")
}
}
func stopRecording(shouldSubmitAudioAfterStop: Bool = false) {
// print("stopRecording")
guard self.recorder != nil else {
print("`self.recorder` is `nil` - no recording to stop")
return
}
self.recorder?.stop()
if let t = self.checkLevelsTimer {
t.invalidate()
self.checkLevelsTimer = nil
}
if let t = self.postTimer {
t.invalidate()
self.postTimer = nil
}
let audioURL: NSURL = self.recorder!.url
self.recorder = nil
let audioSession = AVAudioSession.sharedInstance()
do {
try audioSession.setActive(false)
if shouldSubmitAudioAfterStop {
self.postAudio(audioURL)
} else {
// print("`shouldSubmitAudioAfterStop` is `false` - I won't post audio")
}
} catch let err as NSError {
print("Failed to deactivate audio session (or failed to post audio): \(err)")
}
}
// MARK: -
func checkLevels() {
guard self.recorder != nil else {
print("`self.recorder` is `nil` - can't check levels")
return
}
self.recorder?.updateMeters()
let averagePower: Float = self.recorder!.averagePowerForChannel(0)
if let d = self.delegate {
d.audioRecorder(self, updatedLevel: averagePower)
} else {
print("AudioRecorder - averagePower: \(averagePower)")
}
}
func onPostTimerTrigger() {
// print("onPostTimerTrigger")
guard let r = self.recorder else {
print("`self.recorder` is `nil` - no audio to post")
return
}
if !r.recording {
print("not recording - no audio to post")
}
self.stopRecording(true)
self.startRecording()
}
// MARK: -
private func getAudioURL(filename: String = "recording") -> NSURL {
let fileManager: NSFileManager = NSFileManager.defaultManager()
let urls: [NSURL] = fileManager.URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)
let documentDirectory: NSURL = urls[0] as NSURL
let uniqueFilename = "\(filename)_\(NSDate().timeIntervalSince1970)"
let audioURL: NSURL = documentDirectory.URLByAppendingPathComponent("\(uniqueFilename).m4a")
return audioURL
}
private func postAudio(audioURL: NSURL) {
// print("AudioRecorder.postAudio - audioURL: \(audioURL.absoluteString)")
Alamofire.upload(Method.POST, kRemoteURL, multipartFormData: { multipartFormData in
multipartFormData.appendBodyPart(fileURL: audioURL, name: "file")
}, encodingCompletion: { encodingResult in
switch encodingResult {
case .Success (let upload, _, _):
upload.responseString { response in
//print("response: \(response)")
let genderEqualityRatios = self.calcGenderEquality(String(response.result.value!))
if let eq = genderEqualityRatios, let d = self.delegate {
d.audioRecorder(self, updatedGenderEqualityRatio: eq)
}
}
case .Failure(let encodingError):
print("encodingError: \(encodingError)")
}
})
}
private func calcGenderEquality(response: String) -> (male: Float, female: Float)? {
guard let dataFromString = response.dataUsingEncoding(NSUTF8StringEncoding, allowLossyConversion: false) else {
return nil
}
let json = JSON(data: dataFromString)
for selection in json["selections"].arrayValue {
if selection["gender"] == "M" {
self.maleDuration = self.maleDuration + (selection["endTime"].floatValue - selection["startTime"].floatValue)
} else if selection["gender"] == "F" {
self.femaleDuration = self.maleDuration + (selection["endTime"].floatValue - selection["startTime"].floatValue)
}
}
let spokenDuration = self.maleDuration + self.femaleDuration
let maleFactor = self.maleDuration / spokenDuration
let femaleFactor = self.femaleDuration / spokenDuration
guard !maleFactor.isNaN else {
print("Failed to calculate gender equality (`maleFactor` is `NaN`)")
return nil
}
guard !femaleFactor.isNaN else {
print("Failed to calculate gender equality (`femaleFactor` is `NaN`)")
return nil
}
return (male: maleFactor, female: femaleFactor)
}
}
Try it like this:
if let genderEqualityRatios = response.result.value as? String {
self.calcGenderEquality(genderEqualityRatios)
} else {
print("a problem occurred and we couldn't call calcGenderEquality")
}
I have read lots of tutorials for video player. But How to start recording when camera open in a custom view with time limit.
Here is the working solution with a swift version
var session: AVCaptureSession?
var userreponsevideoData = NSData()
var userreponsethumbimageData = NSData()
override func viewDidLoad() {
super.viewDidLoad()
createSession()
}
func for stopRecording(){
session?.stopRunning()
}
func createSession() {
var input: AVCaptureDeviceInput?
let movieFileOutput = AVCaptureMovieFileOutput()
videosPreviewLayer?.frame.size = photoPreviewImageView.frame.size
session = AVCaptureSession()
let error: NSError? = nil
do { input = try AVCaptureDeviceInput(device: self.cameraWithPosition(position: .back)!) } catch {return}
if error == nil {
session?.addInput(input!)
} else {
print("camera input error: \(String(describing: error))")
}
videosPreviewLayer = AVCaptureVideoPreviewLayer(session: session!)
videosPreviewLayer?.frame.size = self.photoPreviewImageView.frame.size
videosPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
videosPreviewLayer?.connection?.videoOrientation = .portrait
photoPreviewImageView.layer.sublayers?.forEach { $0.removeFromSuperlayer() }
photoPreviewImageView.layer.addSublayer(videosPreviewLayer!)
switchCameraButton.isHidden=true
flashButton.isHidden=true
msgLabel.isHidden=true
galleryCollectionView.isHidden=true
timerLabel.isHidden=false
let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
fileURL = URL(string:"\(documentsURL.appendingPathComponent("temp"))" + ".mov")
print("*****fileurl%#",fileURL ?? "00000")
let maxDuration: CMTime = CMTimeMake(600, 10)
movieFileOutput.maxRecordedDuration = maxDuration
movieFileOutput.minFreeDiskSpaceLimit = 1024 * 1024
if self.session!.canAddOutput(movieFileOutput) {
self.session!.addOutput(movieFileOutput)
}
session?.startRunning()
movieFileOutput.startRecording(to: fileURL!, recordingDelegate: self)
}
func cameraWithPosition(position: AVCaptureDevice.Position) -> AVCaptureDevice? {
let devices = AVCaptureDevice.devices(for: AVMediaType.video)
for device in devices {
if device.position == position {
return device
}
}
return nil
}
}
extension SwipeGallerymainViewController: AVCaptureFileOutputRecordingDelegate
{
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
print(outputFileURL)
let filemainurl = outputFileURL
do
{
let asset = AVURLAsset(url:filemainurl as URL, options:nil)
print(asset)
let imgGenerator = AVAssetImageGenerator(asset: asset)
imgGenerator.appliesPreferredTrackTransform = true
let cgImage = try imgGenerator.copyCGImage(at: CMTimeMake(0, 1), actualTime: nil)
let uiImage = UIImage(cgImage: cgImage)
previewImage = uiImage
userreponsethumbimageData = NSData(contentsOf: filemainurl as URL)!
print(userreponsethumbimageData.length)
print(uiImage)
}
catch let error as NSError
{
print(error)
return
}
let VideoFilePath = URL(fileURLWithPath:NSTemporaryDirectory()).appendingPathComponent("mergeVideo\(arc4random()%1000)d").appendingPathExtension("mp4").absoluteString
if FileManager.default.fileExists(atPath: VideoFilePath)
{
print("exist")
do
{
try FileManager.default.removeItem(atPath: VideoFilePath)
}
catch { }
}
let tempfilemainurl = NSURL(string: VideoFilePath)!
let sourceAsset = AVURLAsset(url:filemainurl as URL, options:nil)
let assetExport: AVAssetExportSession = AVAssetExportSession(asset: sourceAsset, presetName: AVAssetExportPresetMediumQuality)!
assetExport.outputFileType = AVFileType.mov
assetExport.outputURL = tempfilemainurl as URL
assetExport.exportAsynchronously { () -> Void in
switch assetExport.status
{
case AVAssetExportSessionStatus.completed:
DispatchQueue.main.async {
do
{
self.userreponsevideoData = try NSData(contentsOf: tempfilemainurl as URL, options: NSData.ReadingOptions())
print("MB - \(self.userreponsevideoData.length) byte")
self.isVideoLoad=true
self.performSegue(withIdentifier:"previewSegue", sender:self)
}
catch
{
print(error)
}
}
case AVAssetExportSessionStatus.failed:
print("failed \(String(describing: assetExport.error))")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(String(describing: assetExport.error))")
default:
print("complete")
}
}
}
func captureOutput(captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAtURL fileURL: NSURL!, fromConnections connections: [AnyObject]!) {
print(fileURL)
}
}
I have done same thing in java using Robot class on laptop. Robot class is capable of sending keystrokes and mouse movements automatically. You can try same thing on your platform. Automate the button press event so that recording get started.