I am using shazamkit package to recognize sound in flutter. android version works perfect but in iOS version when I start to use get this error:
ERROR: [0x190bf000] >avae> AVAudioNode.mm:568: CreateRecordingTap: required condition is false: IsFormatSampleRateAndChannelCountValid(format)
*** Terminating app due to uncaught exception 'com.apple.coreaudio.avfaudio', reason: 'required condition is false: IsFormatSampleRateAndChannelCountValid(format)'
this is my swift code :
import Flutter
import UIKit
import ShazamKit
import AudioToolbox
public class SwiftFlutterShazamKitPlugin: NSObject, FlutterPlugin {
private var session: SHSession?
private let audioEngine = AVAudioEngine()
private let playerNode = AVAudioPlayerNode()
private let mixerNode = AVAudioMixerNode()
private var callbackChannel: FlutterMethodChannel?
private var sampleRate = 44800
public static func register(with registrar: FlutterPluginRegistrar) {
let channel = FlutterMethodChannel(name: "flutter_shazam_kit", binaryMessenger: registrar.messenger())
let instance = SwiftFlutterShazamKitPlugin(callbackChannel: FlutterMethodChannel(name: "flutter_shazam_kit_callback", binaryMessenger: registrar.messenger()))
registrar.addMethodCallDelegate(instance, channel: channel)
}
init(callbackChannel: FlutterMethodChannel? = nil) {
self.callbackChannel = callbackChannel
}
public func handle(_ call: FlutterMethodCall, result: #escaping FlutterResult) {
switch call.method {
case "configureShazamKitSession":
let args = call.arguments as! Dictionary<String, Any>
configureShazamKitSession(
customCatalogPath: args["customCatalogPath"] as? String,
sampleRate: args["sampleRate"] as! Int
)
result(nil)
case "startDetectionWithMicrophone":
do{
configureAudio()
try startListening(result: result)
}catch{
callbackChannel?.invokeMethod("didHasError", arguments: error.localizedDescription)
}
case "endDetectionWithMicrophone":
stopListening()
result(nil)
case "endSession":
session = nil
result(nil)
default:
result(nil)
}
}
}
//MARK: ShazamKit session delegation here
//MARK: Methods for AVAudio
extension SwiftFlutterShazamKitPlugin {
func configureShazamKitSession(customCatalogPath: String?, sampleRate: Int) {
self.sampleRate = sampleRate
do {
if session == nil {
if (customCatalogPath == nil) {
session = SHSession()
} else {
let documentsUrl = FileManager.default.urls(
for: .documentDirectory,
in: .userDomainMask
).first!
let catalog = SHCustomCatalog()
try catalog.add(from: URL(fileURLWithPath: customCatalogPath!))
session = SHSession(catalog: catalog)
}
session?.delegate = self
}
} catch let error {
callbackChannel?.invokeMethod("didHasError",
arguments: "configureShazamKitSession() failed")
}
}
func addAudio(buffer: AVAudioPCMBuffer, audioTime: AVAudioTime) {
// Add the audio to the current match request
session?.matchStreamingBuffer(buffer, at: audioTime)
}
func configureAudio() {
playerNode.stop()
audioEngine.stop()
let inputFormat = audioEngine.inputNode.inputFormat(forBus: 0)
// Set an output format compatible with ShazamKit.
let outputFormat = AVAudioFormat(standardFormatWithSampleRate: Double(sampleRate), channels: 1)
// Create a mixer node to convert the input.
audioEngine.attach(mixerNode)
// Attach the mixer to the microphone input and the output of the audio engine.
audioEngine.connect(audioEngine.inputNode, to: mixerNode, format: inputFormat)
// audioEngine.connect(mixerNode, to: audioEngine.outputNode, format: outputFormat)
// Install a tap on the mixer node to capture the microphone audio.
mixerNode.installTap(onBus: 0,
bufferSize: 8192,
format: outputFormat) { buffer, audioTime in
// Add captured audio to the buffer used for making a match.
self.addAudio(buffer: buffer, audioTime: audioTime)
}
}
func startListening(result: FlutterResult) throws {
guard session != nil else{
callbackChannel?.invokeMethod("didHasError", arguments: "ShazamSession not found, please call configureShazamKitSession() first to initialize it.")
result(nil)
return
}
callbackChannel?.invokeMethod("detectStateChanged", arguments: 1)
// Throw an error if the audio engine is already running.
guard !audioEngine.isRunning else {
callbackChannel?.invokeMethod("didHasError", arguments: "Audio engine is currently running, please stop the audio engine first and then try again")
return
}
let audioSession = AVAudioSession.sharedInstance()
do {
try audioSession.setCategory(AVAudioSession.Category.playAndRecord, mode: AVAudioSession.Mode.measurement, options: AVAudioSession.CategoryOptions.defaultToSpeaker)
try audioSession.setActive(true, options: .notifyOthersOnDeactivation)
} catch {
print("audioSession properties weren't set because of an error.")
delegate?.showFeedbackError(title: "Sorry", message: "Mic is busy")
return false
}
// Ask the user for permission to use the mic if required then start the engine.
try audioSession.setCategory(.playAndRecord)
audioSession.requestRecordPermission { [weak self] success in
guard success else {
self?.callbackChannel?.invokeMethod("didHasError", arguments: "Recording permission not found, please allow permission first and then try again")
return
}
do{
self?.audioEngine.prepare()
try self?.audioEngine.start()
}catch{
self?.callbackChannel?.invokeMethod("didHasError", arguments: "Can't start the audio engine")
}
}
result(nil)
}
func stopListening() {
callbackChannel?.invokeMethod("detectStateChanged", arguments: 0)
// Check if the audio engine is already recording.
mixerNode.removeTap(onBus: 0)
audioEngine.stop()
}
}
//MARK: Delegate methods for SHSession
extension SwiftFlutterShazamKitPlugin: SHSessionDelegate{
public func session(_ session: SHSession, didFind match: SHMatch) {
var mediaItems: [[String: Any]] = []
match.mediaItems.forEach { rawItem in
var item: [String : Any] = [:]
var count: UInt32 = 0
let properties = class_copyPropertyList(class_getSuperclass(rawItem.classForCoder), &count)
for i in 0..<count {
guard let property = properties?[Int(i)] else { continue }
let name = String(cString: property_getName(property))
if (name == "properties") {
let props = rawItem.value(forKey: name) as! NSDictionary
for property in props.allKeys {
let prop = property as! String
var val = props.value(forKey: prop)!
if (String(describing: type(of: val)) == "__NSTaggedDate") {
let dateFormatter = DateFormatter()
dateFormatter.dateFormat = "yyyy-MM-dd'T'HH:mm:ss.SSSZ"
val = dateFormatter.string(from: val as! Date)
}
item[prop] = val
}
}
}
mediaItems.append(item)
free(properties)
}
do {
let jsonData = try JSONSerialization.data(withJSONObject: mediaItems)
let jsonString = String(data: jsonData, encoding: .utf8)
self.callbackChannel?.invokeMethod("matchFound", arguments: jsonString)
} catch {
callbackChannel?.invokeMethod("didHasError", arguments: "Error when trying to format data, please try again")
}
}
public func session(_ session: SHSession, didNotFindMatchFor signature: SHSignature, error: Error?) {
callbackChannel?.invokeMethod("notFound", arguments: nil)
callbackChannel?.invokeMethod("didHasError", arguments: error?.localizedDescription)
}
}
Your help is greatly appreciated
https://pub.dev/packages/flutter_shazam_kit
Related
I'm trying to record my screen with a sample ios application.
But it does not work because RPScreen.shared().isAvailable always returns false.
These are my codes:
ViewController.swift
import UIKit
class ViewController: UIViewController {
#IBOutlet weak var StartRecordingButton: UIButton!
#IBOutlet weak var EndRecordingButton: UIButton!
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
StartRecordingButton.addTarget(self, action: #selector(startRecord(_:)), for: .touchUpInside)
EndRecordingButton.addTarget(self, action: #selector(stopRecord(_:)), for: .touchUpInside)
}
private lazy var recorder: ScreenRecorder = ScreenRecorder(configuration: ScreenRecorder.Configuration(), completion: {
(url, error) in
guard let url = url else {
fatalError("\(#function) record failed \(String(describing: error))")
}
debugPrint(#function, "success", url)
})
#objc func startRecord(_ sender: UIButton) {
recordStart()
}
#objc func stopRecord(_ sender: UIButton) {
recordStop()
}
private func recordStart() {
guard !recorder.isRecording else { return }
do {
try recorder.start()
} catch {
fatalError("start recording failed \(error)")
}
}
private func recordStop() {
guard recorder.isRecording else { return }
do {
try recorder.end()
} catch {
fatalError("finish recording failed \(error)")
}
}
}
ScreenRecorder.swift
import ReplayKit
#available(iOS 11.0, *)
public class ScreenRecorder: NSObject {
let screenRecorder = RPScreenRecorder.shared()
// Alias for arguments
public typealias Completion = (URL?, Error?) -> ()
let completion: Completion
let configuration: Configuration
public init (configuration: Configuration, completion: #escaping Completion) {
self.configuration = configuration
self.completion = completion
super.init()
}
// Start recording screen
public func start() throws {
print(screenRecorder.isAvailable)
guard screenRecorder.isAvailable else {
throw ScreenRecorderError.notAvailable
}
guard !screenRecorder.isRecording else {
throw ScreenRecorderError.alreadyRunning
}
try setUp()
assetWriter?.startWriting()
assetWriter?.startSession(atSourceTime: CMTime.zero)
screenRecorder.startCapture(handler: { [weak self] (cmSampleBuffer, rpSampleBufferType, error) in
if let error = error {
debugPrint(#function, "something happened", error)
}
if RPSampleBufferType.video == rpSampleBufferType {
self?.appendVideo(sampleBuffer: cmSampleBuffer)
}
}) { [weak self] (error) in
if let error = error {
self?.completion(nil, error)
}
}
}
public func end() throws {
guard screenRecorder.isRecording else {
throw ScreenRecorderError.notRunning
}
screenRecorder.stopCapture { [weak self] (error) in
if let error = error {
self?.completion(nil, error)
}
self?.videoAssetWriterInput?.markAsFinished()
self?.assetWriter?.finishWriting {
DispatchQueue.main.async {
self?.completion(self?.cacheFileURL, nil)
}
}
}
}
public var isRecording: Bool {
return screenRecorder.isRecording
}
private var startTime: CMTime?
private var assetWriter: AVAssetWriter?
private var videoAssetWriterInput: AVAssetWriterInput?
private var writerInputPixelBufferAdapter: AVAssetWriterInputPixelBufferAdaptor?
private func setUp() throws {
try createCacheDirectoryIfNeeded()
try removeOldCachedFile()
guard let cacheURL = cacheFileURL else {
throw ScreenRecorderError.invalidURL
}
let assetWriter = try AVAssetWriter(url: cacheURL, fileType: configuration.fileType)
let videoSettings: [String: Any] = [
AVVideoCodecKey: configuration.codec,
AVVideoWidthKey: UInt(configuration.videoSize.width),
AVVideoHeightKey: UInt(configuration.videoSize.height),
]
let videoAssetWriterInput = try AVAssetWriterInput(mediaType: .video, outputSettings: videoSettings)
videoAssetWriterInput.expectsMediaDataInRealTime = true
if assetWriter.canAdd(videoAssetWriterInput) {
assetWriter.add(videoAssetWriterInput)
}
self.assetWriter = assetWriter
self.videoAssetWriterInput = videoAssetWriterInput
self.writerInputPixelBufferAdapter = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoAssetWriterInput, sourcePixelBufferAttributes: [
kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32ARGB)
])
}
private func appendVideo(sampleBuffer: CMSampleBuffer) {
guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
let firstTime: CMTime
if let startTime = self.startTime {
firstTime = startTime
} else {
firstTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
startTime = firstTime
}
let currentTime: CMTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
let diffTime: CMTime = CMTimeSubtract(currentTime, firstTime)
if writerInputPixelBufferAdapter?.assetWriterInput.isReadyForMoreMediaData ?? false {
writerInputPixelBufferAdapter?.append(pixelBuffer, withPresentationTime: diffTime)
}
}
private func createCacheDirectoryIfNeeded() throws {
guard let cacheDirectoryURL = cacheDirectoryURL else { return }
let fileManager = FileManager.default
guard !fileManager.fileExists(atPath: cacheDirectoryURL.path) else { return }
try fileManager.createDirectory(at: cacheDirectoryURL, withIntermediateDirectories: true, attributes: nil)
}
private func removeOldCachedFile() throws {
guard let cacheURL = cacheFileURL else { return }
let fileManager = FileManager.default
guard fileManager.fileExists(atPath: cacheURL.path) else { return }
try fileManager.removeItem(at: cacheURL)
}
private var cacheDirectoryURL: URL? = {
guard let path = NSSearchPathForDirectoriesInDomains(.cachesDirectory, .userDomainMask, true).first else {
return nil
}
return URL(fileURLWithPath: path).appendingPathComponent("ScreenRecorder")
}()
private var cacheFileURL: URL? {
guard let cacheDirectoryURL = cacheDirectoryURL else { return nil }
return cacheDirectoryURL.appendingPathComponent("screenrecord.mp4")
}
}
#available(iOS 11.0, *)
extension ScreenRecorder {
public struct Configuration{
public var codec: AVVideoCodecType = .h264
public var fileType: AVFileType = .mp4
public var videoSize: CGSize = CGSize(
width: UIScreen.main.bounds.width,
height: UIScreen.main.bounds.height
)
public var audioQuality: AVAudioQuality = .medium
public var audioFormatID: AudioFormatID = kAudioFormatMPEG4AAC
public var numberOfChannels: UInt = 2
public var sampleRate: Double = 44100.0
public var bitrate: UInt = 16
public init() {}
}
public enum ScreenRecorderError: Error {
case notAvailable
case alreadyRunning
case notRunning
case invalidURL
}
}
And it shows this fatal error which I wrote:
ios_record_screen[1258:213516] Fatal error: start recording failed notAvailable
I've enabled screen recording in Settings app in my iPhone8, and tried to run on my friend's iPhone X as well.
But both phones didn't work...
I could not find helpful information in the Internet.
Hope a help.
I hope the problem for those who struggled before has been resolved
In my case,
override func viewDidLoad()
needed
RPScreenRecorder.shared().delegate = self
syntax.
Of course, even the delegate extension that comes with it.
I was implementing RPScreenRecorder in a new view, which was working normally in other views, and I encountered the same problem as the author in the process.
It was a problem that the delegate was not imported while looking for a difference from the previous one.
Hope this helps anyone who finds this page in the future.
I am working on an audio streaming application with recording functionality for a receiver.
I got stuck at the point where the user want to record audio stream on the receiver side.
Below is my code
Initialisation
var engine = AVAudioEngine()
var recordingFile: AVAudioFile?
var audioPlayer: AVAudioPlayer?
let player = AVAudioPlayerNode()
var isRecording: Bool = false
Initialise AudioEngine
func initializeAudioEngine() {
let input = self.engine.inputNode
let format = input.inputFormat(forBus: 0)
self.engine.attach(self.player)
let mainMixerNode = self.engine.mainMixerNode
self.engine.connect(input, to:mainMixerNode, format: format)
self.engine.prepare()
do {
try self.engine.start()
self.startRecording()
} catch (let error) {
print("START FAILED", error)
}
}
Start Recording
func startRecording() {
self.createRecordingFile()
self.engine.mainMixerNode.installTap(onBus: 0,
bufferSize: 1024,
format: self.engine.mainMixerNode.outputFormat(forBus: 0)) { (buffer, time) -> Void in
do {
self.isRecording = true
try self.recordingFile?.write(from: buffer)
} catch (let error) {
print("RECORD ERROR", error);
}
return
}
}
Create Buffer
private func createBuffer(forFileNamed fileName: String) -> AVAudioPCMBuffer? {
var res: AVAudioPCMBuffer?
if let fileURL = Bundle.main.url(forResource: fileName, withExtension: "caf") {
do {
let file = try AVAudioFile(forReading: fileURL)
res = AVAudioPCMBuffer(pcmFormat: file.processingFormat, frameCapacity:AVAudioFrameCount(file.length))
if let _ = res {
do {
try file.read(into: res!)
} catch (let error) {
print("ERROR read file", error)
}
}
} catch (let error) {
print("ERROR file creation", error)
}
}
return res
}
Stop Recording
func stopRecording() {
self.engine.mainMixerNode.removeTap(onBus: 0)
}
I am trying to record using earphone, but It's not working
Its will work because once you setup
let audiosession = AVAudioSession()
As AVAudioSessionCategoryPlayAndRecord and set
audiosession.setActive(true)
It will start recording whichever audio dump to device.
WebRTC does not have any Internal API to start or stop recording.
We can try using AVAudioSession instead.
First setUp Audio session
func setUPAudioSession() -> Bool {
let audiosession = AVAudioSession()
do {
try audiosession.setCategory(AVAudioSessionCategoryPlayAndRecord)
} catch(let error) {
print("--> \(error.localizedDescription)")
}
do {
try audiosession.setActive(true)
} catch (let error) {
print("--> \(error.localizedDescription)")
}
return audiosession.isInputAvailable;
}
After setUp the audio session now start recording as below
func startRecording() -> Bool {
var settings: [String: Any] = [String: String]()
settings[AVFormatIDKey] = kAudioFormatLinearPCM
settings[AVSampleRateKey] = 8000.0
settings[AVNumberOfChannelsKey] = 1
settings[AVLinearPCMBitDepthKey] = 16
settings[AVLinearPCMIsBigEndianKey] = false
settings[AVLinearPCMIsFloatKey] = false
settings[AVAudioQualityMax] = AVEncoderAudioQualityKey
//Create device directory where recorded file will be save automatically
let searchPaths: [String] = NSSearchPathForDirectoriesInDomains(.documentDirectory, .allDomainsMask, true)
let documentPath_ = searchPaths.first
let pathToSave = "\(documentPath_)/\(dateString)"
let url: URL = URL(pathToSave)
recorder = try? AVAudioRecorder(url: url, settings: settings)
// Initialize degate, metering, etc.
recorder.delegate = self;
recorder.meteringEnabled = true;
recorder?.prepareToRecord()
if let recordIs = recorder {
return recordIs.record()
}
return false
}
Play recorded file
func playrecodingFile() {
//Get the path of recorded file saved in previous method
let searchPaths: [String] = NSSearchPathForDirectoriesInDomains(.documentDirectory, .allDomainsMask, true)
let documentPath_ = searchPaths.first
let fileManager = FileManager.default
let arrayListOfRecordSound: [String]
if fileManager.fileExists(atPath: recordingFolder()) {
let arrayListOfRecordSound = try? fileManager.contentsOfDirectory(atPath: documentPath_)
}
let selectedSound = "\(documentPath_)/\(arrayListOfRecordSound.first)"
let url = URL.init(fileURLWithPath: selectedSound)
let player = try? AVAudioPlayer(contentsOf: url)
player?.delegate = self;
try? AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayback)
player?.prepareToPlay()
player?.play()
}
Stop recording
func stopRecording() {
recorder?.stop()
}
pauseRecording
func pauseRecording() {
recorder?.pause()
}
Stop recording
func stopRecording() {
recorder?.stop()
}
I am using Apple speech to recognize voice for one hour but Apple speech just recognize the voice one minute.
I read that I can make more the one request to recognize the voice more than one minute, but I don't know how.
here is my code
import UIKit
import Speech
public class ViewController: UIViewController, SFSpeechRecognizerDelegate {
// MARK: Properties
private let speechRecognizer = SFSpeechRecognizer(locale: Locale(identifier: "ar_SA"))!
private var recognitionRequest: SFSpeechAudioBufferRecognitionRequest?
private var recognitionTask: SFSpeechRecognitionTask?
private let audioEngine = AVAudioEngine()
#IBOutlet var textView : UITextView!
#IBOutlet var recordButton : UIButton!
var inString = ""
public override func viewDidLoad() {
super.viewDidLoad()
speechRecognizer.delegate = self
SFSpeechRecognizer.requestAuthorization { authStatus in
/*
The callback may not be called on the main thread. Add an
operation to the main queue to update the record button's state.
*/
OperationQueue.main.addOperation {
switch authStatus {
case .authorized:
print("Dalal")
case .denied:
print("Dalal2")
case .restricted:
print("Dalal3")
case .notDetermined:
print("Dalal4")
}
}
}
// Disable the record buttons until authorization has been granted.
try! startRecording()
}
private func startRecording() throws {
// Cancel the previous task if it's running.
if let recognitionTask = recognitionTask {
recognitionTask.cancel()
self.recognitionTask = nil
}
let audioSession = AVAudioSession.sharedInstance()
try audioSession.setCategory(AVAudioSessionCategoryRecord)
try audioSession.setMode(AVAudioSessionModeMeasurement)
try audioSession.setActive(true, with: .notifyOthersOnDeactivation)
recognitionRequest = SFSpeechAudioBufferRecognitionRequest()
guard let inputNode = audioEngine.inputNode else { fatalError("Audio engine has no input node") }
guard let recognitionRequest = recognitionRequest else { fatalError("Unable to created a SFSpeechAudioBufferRecognitionRequest object") }
// Configure request so that results are returned before audio recording is finished
recognitionRequest.shouldReportPartialResults = true
// A recognition task represents a speech recognition session.
// We keep a reference to the task so that it can be cancelled.
recognitionTask = speechRecognizer.recognitionTask(with: recognitionRequest) { result, error in
var isFinal = false
let fileName = "Test"
let dir = try? FileManager.default.url(for: .documentDirectory,
in: .userDomainMask, appropriateFor: nil, create: true)
// If the directory was found, we write a file to it and read it back
if let fileURL = dir?.appendingPathComponent(fileName).appendingPathExtension("txt") {
// Write to the file named Test
do {
if let result = result {
self.textView.text = result.bestTranscription.formattedString
isFinal = result.isFinal
}
try self.textView.text.write(to: fileURL, atomically: true, encoding: .utf8)
} catch {
print("Failed writing to URL: \(fileURL), Error: " + error.localizedDescription)
}
if error != nil || isFinal {
self.audioEngine.stop()
// self.addp()
inputNode.removeTap(onBus: 0)
self.recognitionRequest = nil
self.recognitionTask = nil
}
do {
self.inString = try String(contentsOf: fileURL)
} catch {
print("Failed reading from URL: \(fileURL), Error: " + error.localizedDescription)
}
print("Read from the file: \(self.inString)")
}
}
let recordingFormat = inputNode.outputFormat(forBus: 0)
inputNode.installTap(onBus: 0, bufferSize: 1024, format: recordingFormat) { (buffer: AVAudioPCMBuffer, when: AVAudioTime) in
self.recognitionRequest?.append(buffer)
}
audioEngine.prepare()
try audioEngine.start()
textView.text = "(listening)"
}
public func speechRecognizer(_ speechRecognizer: SFSpeechRecognizer, availabilityDidChange available: Bool) {
print("any text")
}
}//end class
any suggestions or help ?
thank you.
I have a bug in my camera app. If you open the app while on a phone call, the entire app freezes. I've tried using AVCaptureSessionWasInterrupted and AVCaptureSessionInterruptionEnded notifications to handle the audio input management during a phone call, but have had no luck fixing the issue. When I comment out the audio input setup, the app no longer freezes during a phone call, so I'm pretty confident the issue lies somewhere with the audio management.
Why is the app freezing during phone calls and how can I fix it?
Thanks in advance!
Relevant code:
class CameraManager: NSObject {
static let shared = CameraManager()
private let notificationQueue = OperationQueue.main
var delegate: CameraManagerDelegate? = nil
let session = AVCaptureSession()
var captureDeviceInput: AVCaptureDeviceInput? = nil
var audioInput: AVCaptureDeviceInput? = nil
let photoOutput = AVCapturePhotoOutput()
let videoOutput = AVCaptureMovieFileOutput()
var isRecording: Bool {
return videoOutput.isRecording
}
func getCurrentVideoCaptureDevice() throws -> AVCaptureDevice {
guard let device = self.captureDeviceInput?.device else {
throw CameraManagerError.missingCaptureDeviceInput
}
return device
}
func getZoomFactor() throws -> CGFloat {
return try getCurrentVideoCaptureDevice().videoZoomFactor
}
func getMaxZoomFactor() throws -> CGFloat {
return try getCurrentVideoCaptureDevice().activeFormat.videoMaxZoomFactor
}
override init() {
super.init()
NotificationCenter.default.addObserver(forName: Notification.Name.UIApplicationDidBecomeActive, object: nil, queue: notificationQueue) { [unowned self] (notification) in
self.session.startRunning()
try? self.setupCamera()
try? self.setZoomLevel(zoomLevel: 1.0)
if Settings.shared.autoRecord {
try? self.startRecording()
}
}
NotificationCenter.default.addObserver(forName: Notification.Name.UIApplicationWillResignActive, object: nil, queue: notificationQueue) { [unowned self] (notification) in
self.stopRecording()
self.session.stopRunning()
}
NotificationCenter.default.addObserver(forName: Notification.Name.AVCaptureSessionWasInterrupted, object: nil, queue: notificationQueue) { [unowned self] (notification) in
if let audioInput = self.audioInput {
self.session.removeInput(audioInput)
}
}
NotificationCenter.default.addObserver(forName: Notification.Name.AVCaptureSessionInterruptionEnded, object: nil, queue: notificationQueue) { [unowned self] (notification) in
try? self.setupAudio()
}
try? self.setupSession()
}
func setupSession() throws {
session.sessionPreset = .high
if !session.isRunning {
session.startRunning()
}
if Utils.checkPermissions() {
try setupInputs()
setupOutputs()
}
}
func setupInputs() throws {
try setupCamera()
try setupAudio()
}
func setupCamera() throws {
do {
try setCamera(position: Settings.shared.defaultCamera)
} catch CameraManagerError.unableToFindCaptureDevice(let position) {
//some devices don't have a front camera, so try the back for setup
if position == .front {
try setCamera(position: .back)
}
}
}
func setupAudio() throws {
if let audioInput = self.audioInput {
self.session.removeInput(audioInput)
}
guard let audioDevice = AVCaptureDevice.default(for: .audio) else {
throw CameraManagerError.unableToGetAudioDevice
}
let audioInput = try AVCaptureDeviceInput(device: audioDevice)
if session.canAddInput(audioInput) {
session.addInput(audioInput)
self.audioInput = audioInput
} else {
self.delegate?.unableToAddAudioInput()
}
}
func setupOutputs() {
self.photoOutput.isHighResolutionCaptureEnabled = true
guard session.canAddOutput(self.photoOutput) else {
//error
return
}
session.addOutput(self.photoOutput)
guard session.canAddOutput(self.videoOutput) else {
//error
return
}
session.addOutput(self.videoOutput)
}
func startRecording() throws {
if !self.videoOutput.isRecording {
let documentDirectory = try FileManager.default.url(for: .documentDirectory, in: .userDomainMask, appropriateFor:nil, create:false)
let url = documentDirectory.appendingPathComponent(UUID().uuidString + ".mov")
self.videoOutput.startRecording(to: url, recordingDelegate: self)
}
}
func stopRecording() {
if self.videoOutput.isRecording {
self.videoOutput.stopRecording()
}
}
func setZoomLevel(zoomLevel: CGFloat) throws {
guard let captureDevice = self.captureDeviceInput?.device else {
throw CameraManagerError.missingCaptureDevice
}
try captureDevice.lockForConfiguration()
captureDevice.videoZoomFactor = zoomLevel
captureDevice.unlockForConfiguration()
}
func capturePhoto() {
let photoOutputSettings = AVCapturePhotoSettings()
photoOutputSettings.flashMode = Settings.shared.flash
photoOutputSettings.isAutoStillImageStabilizationEnabled = true
photoOutputSettings.isHighResolutionPhotoEnabled = true
self.photoOutput.capturePhoto(with: photoOutputSettings, delegate: self)
}
func toggleCamera() throws {
if let captureDeviceInput = self.captureDeviceInput,
captureDeviceInput.device.position == .back {
try setCamera(position: .front)
} else {
try setCamera(position: .back)
}
}
func setCamera(position: AVCaptureDevice.Position) throws {
if let captureDeviceInput = self.captureDeviceInput {
if captureDeviceInput.device.position == position {
return
} else {
session.removeInput(captureDeviceInput)
}
}
var device: AVCaptureDevice? = nil
switch position {
case .front:
device = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front)
default:
device = AVCaptureDevice.default(for: .video)
}
guard let nonNilDevice = device else {
throw CameraManagerError.unableToFindCaptureDevice(position)
}
try nonNilDevice.lockForConfiguration()
if nonNilDevice.isFocusModeSupported(.continuousAutoFocus) {
nonNilDevice.focusMode = .continuousAutoFocus
}
if nonNilDevice.isExposureModeSupported(.continuousAutoExposure) {
nonNilDevice.exposureMode = .continuousAutoExposure
}
nonNilDevice.unlockForConfiguration()
let input = try AVCaptureDeviceInput(device: nonNilDevice)
guard session.canAddInput(input) else {
throw CameraManagerError.unableToAddCaptureDeviceInput
}
session.addInput(input)
self.captureDeviceInput = input
}
func setFocus(point: CGPoint) throws {
guard let device = self.captureDeviceInput?.device else {
throw CameraManagerError.missingCaptureDeviceInput
}
guard device.isFocusPointOfInterestSupported && device.isFocusModeSupported(.autoFocus) else {
throw CameraManagerError.notSupportedByDevice
}
try device.lockForConfiguration()
device.focusPointOfInterest = point
device.focusMode = .autoFocus
device.unlockForConfiguration()
}
func setExposure(point: CGPoint) throws {
guard let device = self.captureDeviceInput?.device else {
throw CameraManagerError.missingCaptureDeviceInput
}
guard device.isExposurePointOfInterestSupported && device.isExposureModeSupported(.autoExpose) else {
throw CameraManagerError.notSupportedByDevice
}
try device.lockForConfiguration()
device.exposurePointOfInterest = point
device.exposureMode = .autoExpose
device.unlockForConfiguration()
}
}
extension CameraManager: AVCapturePhotoCaptureDelegate {
func photoOutput(_ output: AVCapturePhotoOutput, willCapturePhotoFor resolvedSettings: AVCaptureResolvedPhotoSettings) {
self.delegate?.cameraManagerWillCapturePhoto()
}
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
guard let imageData = photo.fileDataRepresentation() else {
//error
return
}
let capturedImage = UIImage.init(data: imageData , scale: 1.0)
if let image = capturedImage {
UIImageWriteToSavedPhotosAlbum(image, nil, nil, nil)
}
self.delegate?.cameraManagerDidFinishProcessingPhoto()
}
}
extension CameraManager: AVCaptureFileOutputRecordingDelegate {
func fileOutput(_ output: AVCaptureFileOutput, didStartRecordingTo fileURL: URL, from connections: [AVCaptureConnection]) {
self.delegate?.cameraManagerDidStartRecording()
}
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
self.delegate?.cameraManagerDidFinishRecording()
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: outputFileURL)
}) { saved, error in
if saved {
do {
try FileManager.default.removeItem(at: outputFileURL)
} catch _ as NSError {
//error
}
}
}
}
}
This question already has answers here:
What does "Fatal error: Unexpectedly found nil while unwrapping an Optional value" mean?
(16 answers)
Closed 6 years ago.
I have downloaded a project from https://github.com/doberman/speaker-gender-detect--ios. I followed the instructions but when I run the app, I get a message that says: fatal error: unexpectedly found nil while unwrapping an Optional value. How can I fix the crash so that the app works.
The app crashes here:
let genderEqualityRatios = self.calcGenderEquality (String (response.result.value!))
This is my code:
import AVFoundation
import Alamofire
import SwiftyJSON
protocol AudioRecorderDelegate {
func audioRecorder(audioRecorder: AudioRecorder?, updatedLevel: Float)
func audioRecorder(audioRecorder: AudioRecorder?, updatedGenderEqualityRatio: (male: Float, female: Float))
}
class AudioRecorder: NSObject {
static let sharedInstance: AudioRecorder = AudioRecorder()
private let kRemoteURL: NSURL = NSURL(string: "xxx.xxx.xxx.xxx")! // change to your API endpoint URL
private let kPostAudioInterval: NSTimeInterval = 10.0 // change to post to API more/less frequently
var delegate: AudioRecorderDelegate?
private let recorderSettings = [
AVSampleRateKey: NSNumber(float: Float(16000.0)),
AVFormatIDKey: NSNumber(int: Int32(kAudioFormatMPEG4AAC)),
AVNumberOfChannelsKey: NSNumber(int: 1),
AVEncoderAudioQualityKey: NSNumber(int: Int32(AVAudioQuality.High.rawValue))
]
private var recorder: AVAudioRecorder?
private var checkLevelsTimer: NSTimer?
private var postTimer: NSTimer?
private var maleDuration: Float = 0.0
private var femaleDuration: Float = 0.0
override init() {
super.init()
do {
let audioSession: AVAudioSession = AVAudioSession.sharedInstance()
try audioSession.setCategory(AVAudioSessionCategoryRecord)
} catch let err as NSError {
print("Failed to initialize AudioRecorder: \(err)")
}
}
func startRecording() {
// print("startRecording")
if self.recorder != nil && self.recorder!.recording {
self.stopRecording()
}
let audioURL: NSURL = self.getAudioURL()
// print("got audioURL: '\(audioURL)'")
do {
self.recorder = try AVAudioRecorder(URL: audioURL, settings: self.recorderSettings)
self.recorder?.meteringEnabled = true
self.recorder?.prepareToRecord()
} catch let err as NSError {
print("Failed to set up AVAudioRecorder instance: \(err)")
}
guard self.recorder != nil else { return }
self.recorder?.record()
let audioSession = AVAudioSession.sharedInstance()
do {
try audioSession.setActive(true)
self.checkLevelsTimer = NSTimer.scheduledTimerWithTimeInterval(0.05, target: self, selector: "checkLevels", userInfo: nil, repeats: true)
self.postTimer = NSTimer.scheduledTimerWithTimeInterval(kPostAudioInterval, target: self, selector: "onPostTimerTrigger", userInfo: nil, repeats: true)
} catch let err as NSError {
print("Failed to activate audio session (or failed to set up checkLevels timer): \(err)")
}
}
func stopRecording(shouldSubmitAudioAfterStop: Bool = false) {
// print("stopRecording")
guard self.recorder != nil else {
print("`self.recorder` is `nil` - no recording to stop")
return
}
self.recorder?.stop()
if let t = self.checkLevelsTimer {
t.invalidate()
self.checkLevelsTimer = nil
}
if let t = self.postTimer {
t.invalidate()
self.postTimer = nil
}
let audioURL: NSURL = self.recorder!.url
self.recorder = nil
let audioSession = AVAudioSession.sharedInstance()
do {
try audioSession.setActive(false)
if shouldSubmitAudioAfterStop {
self.postAudio(audioURL)
} else {
// print("`shouldSubmitAudioAfterStop` is `false` - I won't post audio")
}
} catch let err as NSError {
print("Failed to deactivate audio session (or failed to post audio): \(err)")
}
}
// MARK: -
func checkLevels() {
guard self.recorder != nil else {
print("`self.recorder` is `nil` - can't check levels")
return
}
self.recorder?.updateMeters()
let averagePower: Float = self.recorder!.averagePowerForChannel(0)
if let d = self.delegate {
d.audioRecorder(self, updatedLevel: averagePower)
} else {
print("AudioRecorder - averagePower: \(averagePower)")
}
}
func onPostTimerTrigger() {
// print("onPostTimerTrigger")
guard let r = self.recorder else {
print("`self.recorder` is `nil` - no audio to post")
return
}
if !r.recording {
print("not recording - no audio to post")
}
self.stopRecording(true)
self.startRecording()
}
// MARK: -
private func getAudioURL(filename: String = "recording") -> NSURL {
let fileManager: NSFileManager = NSFileManager.defaultManager()
let urls: [NSURL] = fileManager.URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)
let documentDirectory: NSURL = urls[0] as NSURL
let uniqueFilename = "\(filename)_\(NSDate().timeIntervalSince1970)"
let audioURL: NSURL = documentDirectory.URLByAppendingPathComponent("\(uniqueFilename).m4a")
return audioURL
}
private func postAudio(audioURL: NSURL) {
// print("AudioRecorder.postAudio - audioURL: \(audioURL.absoluteString)")
Alamofire.upload(Method.POST, kRemoteURL, multipartFormData: { multipartFormData in
multipartFormData.appendBodyPart(fileURL: audioURL, name: "file")
}, encodingCompletion: { encodingResult in
switch encodingResult {
case .Success (let upload, _, _):
upload.responseString { response in
//print("response: \(response)")
let genderEqualityRatios = self.calcGenderEquality(String(response.result.value!))
if let eq = genderEqualityRatios, let d = self.delegate {
d.audioRecorder(self, updatedGenderEqualityRatio: eq)
}
}
case .Failure(let encodingError):
print("encodingError: \(encodingError)")
}
})
}
private func calcGenderEquality(response: String) -> (male: Float, female: Float)? {
guard let dataFromString = response.dataUsingEncoding(NSUTF8StringEncoding, allowLossyConversion: false) else {
return nil
}
let json = JSON(data: dataFromString)
for selection in json["selections"].arrayValue {
if selection["gender"] == "M" {
self.maleDuration = self.maleDuration + (selection["endTime"].floatValue - selection["startTime"].floatValue)
} else if selection["gender"] == "F" {
self.femaleDuration = self.maleDuration + (selection["endTime"].floatValue - selection["startTime"].floatValue)
}
}
let spokenDuration = self.maleDuration + self.femaleDuration
let maleFactor = self.maleDuration / spokenDuration
let femaleFactor = self.femaleDuration / spokenDuration
guard !maleFactor.isNaN else {
print("Failed to calculate gender equality (`maleFactor` is `NaN`)")
return nil
}
guard !femaleFactor.isNaN else {
print("Failed to calculate gender equality (`femaleFactor` is `NaN`)")
return nil
}
return (male: maleFactor, female: femaleFactor)
}
}
Try it like this:
if let genderEqualityRatios = response.result.value as? String {
self.calcGenderEquality(genderEqualityRatios)
} else {
print("a problem occurred and we couldn't call calcGenderEquality")
}