I want my app to recognize speech from the microphone and allow audio in the background to keep playing.
My app recognizes speech coming in through the microphone and converts it to text. When my app launches it shuts down any audio playing in the background.
Is it possible to let the background audio continue to play while my app listens for speech using the microphone?
Stripped down code:
import UIKit
import Speech
class ViewController: UIViewController {
public private(set) var isRecording = false
private var audioEngine: AVAudioEngine!
private var inputNode: AVAudioInputNode!
private var audioSession: AVAudioSession!
private var recognitionRequest: SFSpeechAudioBufferRecognitionRequest?
override func viewDidLoad() {
super.viewDidLoad()
}
override public func viewDidAppear(_ animated: Bool) {
checkPermissions()
startRecording()
isRecording.toggle()
}
private func startRecording() {
guard let recognizer = SFSpeechRecognizer(), recognizer.isAvailable else {
handleError(withMessage: "Speech recognizer not available.")
return
}
recognitionRequest = SFSpeechAudioBufferRecognitionRequest()
recognitionRequest!.shouldReportPartialResults = true
recognizer.recognitionTask(with: recognitionRequest!) { (result, error) in
guard error == nil else { self.handleError(withMessage: error!.localizedDescription); return }
guard let result = result else { return }
print(result.bestTranscription.segments)
}
audioEngine = AVAudioEngine()
inputNode = audioEngine.inputNode
let recordingFormat = inputNode.outputFormat(forBus: 0)
inputNode.installTap(onBus: 0, bufferSize: 1024, format: recordingFormat) { (buffer, _) in
self.recognitionRequest?.append(buffer)
}
audioEngine.prepare()
do {
audioSession = AVAudioSession.sharedInstance()
try audioSession.setCategory(.record, mode: .spokenAudio, options: .duckOthers)
try audioSession.setActive(true, options: .notifyOthersOnDeactivation)
try audioEngine.start()
} catch {
handleError(withMessage: error.localizedDescription)
}
}
private func checkPermissions() {
SFSpeechRecognizer.requestAuthorization { authStatus in
DispatchQueue.main.async {
switch authStatus {
case .authorized: break
default: self.handlePermissionFailed()
}
}
}
}
private func handlePermissionFailed() {
// Present an alert asking the user to change their settings.
let ac = UIAlertController(title: "This app must have access to speech recognition to work.",
message: "Please consider updating your settings.",
preferredStyle: .alert)
ac.addAction(UIAlertAction(title: "Open settings", style: .default) { _ in
let url = URL(string: UIApplication.openSettingsURLString)!
UIApplication.shared.open(url)
})
ac.addAction(UIAlertAction(title: "Close", style: .cancel))
present(ac, animated: true)
}
private func handleError(withMessage message: String) {
// Present an alert.
let ac = UIAlertController(title: "An error occured", message: message, preferredStyle: .alert)
ac.addAction(UIAlertAction(title: "OK", style: .default))
present(ac, animated: true)
}
}
When I run my app and there is audio running in the background my app pauses the audio. I tried exiting my app and restarting the audio but when I return to my app it once again pauses the background audio. I would like the audio to keep playing while my app is using the microphone to listen.
I tried removing "options: .duckOthers" but it made no difference.
I believe what I want to do is possible. Shazam, for instance, can play a song on the speaker and simultaneously use the microphone to listen to it and identify it.
Try .playAndRecord instead of .record .
Related
Why won't RPScreenRecorder record the mic, even though it is enabled, if the permissions popup doesn't appear? It works when the popup appears but attempts after restarting the app don't record the mic.
here's the very simple app i made just to test this feature for a larger app.
I have tested this exact application on iOS 11 and it works every time. However on iOS 12+ it only works when the permission popup appears and that's every 8 minutes. It should work every time after giving permissions.
import ReplayKit
class ViewController: UIViewController, RPPreviewViewControllerDelegate {
private let recorder = RPScreenRecorder.shared()
private var isRecording = false
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
}
#IBAction func react() {
if !isRecording {
let alert = UIAlertController(title: "Record", message: "Would you like to record a video?", preferredStyle: .alert)
let okay = UIAlertAction(title: "Okay", style: .destructive, handler: { (action: UIAlertAction) in
self.startRecording()
})
alert.addAction(okay)
self.present(alert, animated: true, completion: nil)
} else {
stopRecording()
}
}
private func startRecording() {
guard self.recorder.isAvailable else {
print("Recording is not available at this time.")
return
}
self.recorder.isMicrophoneEnabled = true
self.recorder.startRecording{ [unowned self] (error) in
guard error == nil else {
print("There was an error starting the recording.")
return
}
print("Started Recording Successfully")
self.isRecording = true
}
}
private func stopRecording() {
recorder.stopRecording { [unowned self] (preview, error) in
print("Stopped recording")
guard preview != nil else {
print("Preview controller is not available.")
return
}
let alert = UIAlertController(title: "Recording Finished", message: "Would you like to edit or delete your recording?", preferredStyle: .alert)
let deleteAction = UIAlertAction(title: "Delete", style: .destructive, handler: { (action: UIAlertAction) in
self.recorder.discardRecording(handler: { () -> Void in
print("Recording suffessfully deleted.")
})
})
let editAction = UIAlertAction(title: "Edit", style: .default, handler: { (action: UIAlertAction) -> Void in
preview?.previewControllerDelegate = self
self.present(preview!, animated: true, completion: nil)
})
alert.addAction(editAction)
alert.addAction(deleteAction)
self.present(alert, animated: true, completion: nil)
self.isRecording = false
}
}
func previewControllerDidFinish(_ previewController: RPPreviewViewController) {
dismiss(animated: true)
}
}
I expect that the mic should record every single time after allowing the permissions however it appears to only be recording the mic during the sessions in which it asks for those permissions.
This appears to have been fixed in iOS 13. The OS now asks for permission every time you request to record the screen. I still don't have a fix for iOS 12 however.
I would like to add a feature for users to record their ARKit experience. I'm taking the capturedImage of the ARFrame supplied by session(_ session: ARSession, didUpdate frame: ARFrame) and concatenating them into a video.
Unfortunately, ARFrame.capturedImage shows the frame of video captured by the camera, but doesn't include nodes placed by ARKit.
Is there any way to capture video coming from an ARSCNView?
I've tried this library, but it has major bugs (no shadows, large stutter at beginning of recording). I'd also like to not use ReplayKit for this project.
Here is what I'm using to turn ARFrame.capturedImage into a UIImage, and subsequently, a video.
extension UIImage {
convenience init(pixelBuffer: CVPixelBuffer) {
let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
let size = CGSize(width: CVPixelBufferGetWidth(pixelBuffer), height: CVPixelBufferGetHeight(pixelBuffer))
let tempContext = CIContext()
let image = tempContext.createCGImage(ciImage, from: CGRect(origin: CGPoint.zero, size: size))!
// This assumes we're using an iPhone in portrait.
self.init(cgImage: image, scale: 1, orientation: .right)
}
}
I actually found a library to do this. It's SceneKitVideoRecorder.
I don't fully understand how it works yet, but the important code is located in SceneKitVideoRecorder.swift.
You can try to use ReplayKit. I use ReplayKit in my AR App to record when I place a model to the scene and more.
Try this my snippet:
import ReplayKit
class YourController: UIViewController, RPPreviewViewControllerDelegate {
#IBAction func shotVideo(_ sender: UIButton) {
print("Video")
if !isRecording {
startRecording()
} else {
stopRecording()
}
}
func startRecording() {
guard recorder.isAvailable else {
print("The recording isn't available now.")
return
}
recorder.isMicrophoneEnabled = false
recorder.startRecording{ [unowned self] (error) in
guard error == nil else {
print("Trouble with starting this recording.")
return
}
print("Recording started with success.")
self.isRecording = true
}
}
func stopRecording() {
recorder.stopRecording { (preview, error) in
print("The recording is stopped")
guard preview != nil else {
print("The preview controller isn't available.")
return
}
let alert = UIAlertController(title: "End Recording", message: "Want to edit or delete this recording?", preferredStyle: .alert)
let deleteAction = UIAlertAction(title: "Delete", style: .destructive, handler: { (action: UIAlertAction) in
self.recorder.discardRecording(handler: { () -> Void in
print("Recording suffessfully deleted.")
})
})
let editAction = UIAlertAction(title: "Edit", style: .default, handler: { (action: UIAlertAction) -> Void in
preview?.previewControllerDelegate = self
self.present(preview!, animated: true, completion: nil)
})
alert.addAction(editAction)
alert.addAction(deleteAction)
self.present(alert, animated: true, completion: nil)
self.isRecording = false
}
}
// RPPreviewViewControllerDelegate
func previewControllerDidFinish(_ previewController: RPPreviewViewController) {
dismiss(animated: true)
}
}
This is my code, I hope I have been helpful :).
I've made an app that worked fine on my iPhone 6 Plus. The app contains a QRCode scanning feature which obviously starts the camera. When running the Camera, it works in full screen with my iPhone 6 Plus but not in my iPhone X. the camera still works but there are white bars above and below the camera view. Any tips?
here's the code of the scanner:
import UIKit
import AVFoundation
import SafariServices
class Attend: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
#IBOutlet weak var border: UIImageView!
var video = AVCaptureVideoPreviewLayer()
override func viewDidLoad() {
super.viewDidLoad()
let session = AVCaptureSession()
let captureDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
do
{
let input = try AVCaptureDeviceInput(device: captureDevice)
session.addInput(input)
}
catch
{
print ("ERROR")
}
let output = AVCaptureMetadataOutput()
session.addOutput(output)
output.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
output.metadataObjectTypes = [AVMetadataObjectTypeQRCode]
video = AVCaptureVideoPreviewLayer(session: session)
video.frame = view.layer.bounds
view.layer.addSublayer(video)
self.view.bringSubview(toFront: border)
session.startRunning()
}
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputMetadataObjects metadataObjects: [Any]!, from connection: AVCaptureConnection!) {
if metadataObjects != nil && metadataObjects.count != 0
{
if let object = metadataObjects[0] as? AVMetadataMachineReadableCodeObject
{
if object.type == AVMetadataObjectTypeQRCode
{
let alert = UIAlertController(title: "QR Code", message: object.stringValue, preferredStyle: .alert)
alert.addAction(UIAlertAction(title: "Retake", style: .default, handler: nil))
alert.addAction(UIAlertAction(title: "Open", style: .default, handler: { (nil) in
UIPasteboard.general.string = object.stringValue
self.loadSafari(url: object.stringValue)
}))
present(alert, animated: true, completion: nil)
}
}
}
}
func loadSafari(url : String){
guard let url = URL(string: url) else { return }
let safariController = SFSafariViewController(url: url)
present(safariController, animated: true, completion: nil)
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
}
}
1 - use LaunchSceen.storyboard instead of launch images - your bottom cut problem will be solved
2 - make your top constraints from safe area of your view
I am using Twilio programmable video to connect two users in an audio chat. I want to give the user the option to record their screen during the audio session so I am using Replaykit. Everything works, except the audio on the recording cuts out as soon as Twilio starts.
Is there some conflict between the type of audio Twilio uses and Replaykit audio capture?
I experienced something like this before when trying to add sound while Twilio was active and it would cause Twilio audio to cut out as soon as another sound was played.
Edit: I've tried different ways, so I only have my latest changes, but here is the code I am using for ReplayKit. It's just standard Start, Stop, and Preview recording.
func startRecording() {
guard recorder.isAvailable else {
print("Recording not available")
return
}
recorder.isMicrophoneEnabled = true
recorder.startRecording{ [unowned self] (error) in
guard error == nil else {
print("error starting the recording")
return
}
print("Started Recording Successfully")
self.isRecording = true
}
}
func stopRecording() {
recorder.stopRecording { [unowned self] (preview, error) in
print("Stopped recording")
guard preview != nil else {
print("Preview controller not available")
return
}
let alert = UIAlertController(title: "Recording Finished", message: "Would you like to edit or delete your recording?", preferredStyle: .alert)
let deleteAction = UIAlertAction(title: "Delete", style: .destructive, handler: { (action: UIAlertAction) in
self.recorder.discardRecording(handler: { () -> Void in
print("Recording suffessfully deleted.")
})
})
let editAction = UIAlertAction(title: "Edit", style: .default, handler: { (action: UIAlertAction) -> Void in
preview?.previewControllerDelegate = self
self.present(preview!, animated: true, completion: nil)
})
alert.addAction(editAction)
alert.addAction(deleteAction)
self.present(alert, animated: true, completion: nil)
self.isRecording = false
}
}
func previewControllerDidFinish(_ previewController: RPPreviewViewController) {
dismiss(animated: true)
}
I've also tried using the new recording feature from control panel. I can capture audio from other apps, but when Twilio starts on my app, the audio on the recording goes silent and comes back when Twilio stops. This is what makes me think there is some conflict between Twilio and Replaykit, but maybe there is a way to capture it that I don't know about.
I also tried .startCapture instead of .startRecording, but I don't think I was using it right and I haven't been able to find much documentation on it.
To prevent a new audio source to interrupt the previous one you can set its category to ambient:
try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryAmbient)
In your case I think you should apply it to the Twilio Session
I'm using an iPhone 6s plus, here is the code for the speech recognition viewcontroller:
import Speech
import UIKit
protocol SpeechRecognitionDelegate: class {
func speechRecognitionComplete(query: String?)
func speechRecognitionCancelled()
}
class SpeechRecognitionViewController: UIViewController, SFSpeechRecognizerDelegate {
var textView: UITextView!
private let speechRecognizer = SFSpeechRecognizer(locale: Locale.init(identifier: "en-US"))
private var recognitionRequest: SFSpeechAudioBufferRecognitionRequest?
private var recognitionTask: SFSpeechRecognitionTask?
private let audioEngine = AVAudioEngine()
private var query: String?
weak var delegate: SpeechRecognitionDelegate?
var isListening: Bool = false
init(delegate: SpeechRecognitionDelegate, frame: CGRect) {
super.init(nibName: nil, bundle: nil)
self.delegate = delegate
self.view.frame = frame
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
enum ErrorMessage: String {
case denied = "To enable Speech Recognition go to Settings -> Privacy."
case notDetermined = "Authorization not determined - please try again."
case restricted = "Speech Recognition is restricted on this device."
case noResults = "No results found - please try a different search."
}
func displayErrorAlert(message: ErrorMessage) {
let alertController = UIAlertController(title: nil,
message: message.rawValue,
preferredStyle: .alert)
let alertAction = UIAlertAction(title: "OK", style: .default, handler: nil)
alertController.addAction(alertAction)
OperationQueue.main.addOperation {
self.present(alertController, animated: true, completion: nil)
}
}
override func viewDidLoad() {
super.viewDidLoad()
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
speechRecognizer?.delegate = self
//initialize textView and add it to self.view
}
func startListening() {
guard !isListening else {return}
isListening = true
recognitionRequest = SFSpeechAudioBufferRecognitionRequest()
guard let recognitionRequest = recognitionRequest else {
print("SpeechRecognitionViewController recognitionRequest \(self.recognitionRequest)")
return
}
recognitionRequest.shouldReportPartialResults = true
recognitionTask = speechRecognizer?.recognitionTask(with: recognitionRequest, resultHandler: { (result, error) in
var isFinal = false
if result != nil {
self.query = result?.bestTranscription.formattedString
self.textView.text = self.query
isFinal = (result?.isFinal)!
}
if error != nil || isFinal {
print("recognitionTask error = \(error?.localizedDescription)")
self.stopListening()
}
})
let audioSession = AVAudioSession.sharedInstance()
do {
try audioSession.setCategory(AVAudioSessionCategoryRecord)
try audioSession.setMode(AVAudioSessionModeMeasurement)
try audioSession.setActive(true, with: .notifyOthersOnDeactivation)
} catch {
print("Audio session isn't configured correctly")
}
let recordingFormat = audioEngine.inputNode?.outputFormat(forBus: 0)
audioEngine.inputNode?.installTap(onBus: 0, bufferSize: 1024, format: recordingFormat) { (buffer, time) in
self.recognitionRequest?.append(buffer)
}
audioEngine.prepare()
do {
try audioEngine.start()
textView.text = "Listening..."
} catch {
print("Audio engine failed to start")
}
}
func stopListening() {
guard isListening else {return}
audioEngine.stop()
audioEngine.inputNode?.removeTap(onBus: 0)
recognitionRequest = nil
recognitionTask = nil
isListening = false
}
// MARK: SFSpeechRecognizerDelegate
func speechRecognizer(_ speechRecognizer: SFSpeechRecognizer, availabilityDidChange available: Bool) {
if !available {
let alertController = UIAlertController(title: nil,
message: "Speech Recognition is currently unavailable.",
preferredStyle: .alert)
let alertAction = UIAlertAction(title: "OK", style: .default) { (alertAction) in
.self.stopListening()
}
alertController.addAction(alertAction)
present(alertController, animated: true)
}
}
}
This VC is embedded in another viewcontroller.
When a button is tapped in the parent viewcontroller, startListening() is called. When the same button is hit again stopListening() is called.
The first time the speech recognition works just fine. on a second try I get this error (I guess it has to do with grammar loading?):
recognitionTask error = Optional("The operation couldn’t be completed. (kAFAssistantErrorDomain error 209.)")
and speech recognition doesn't works anymore. After 30 seconds I get the timeout error:
Optional(Error Domain=kAFAssistantErrorDomain Code=203 "Timeout" UserInfo={NSLocalizedDescription=Timeout, NSUnderlyingError=0x170446f90 {Error Domain=SiriSpeechErrorDomain Code=100 "(null)"}})
Original code is here SayWhat
What am I missing?
All I had to do was add recognitionRequest?.endAudio() when trying to stop listening:
func stopListening() {
guard isListening else {return}
audioEngine.stop()
audioEngine.inputNode?.removeTap(onBus: 0)
// Indicate that the audio source is finished and no more audio will be appended
recognitionRequest?.endAudio()
recognitionRequest = nil
recognitionTask = nil
isListening = false
}