So I didn't managed to find any code about this issue.
I have recorded an mp4 file(audioFile.mp4) and now i want to stream it through socket , yet i have problem in converting.
I know that there is the ffmpeg(https://www.ffmpeg.org/) platform yet i didn't see any code of that.
Would appreciate any idea.
class ViewController: UIViewController {
var requestManager = RequestManager()
var socket: WebSocket?
var audioRecorder: AVAudioRecorder!
#IBOutlet weak var recordBtn: UIButton!
#IBOutlet weak var playBtn: UIButton!
var fileName: String = "audioFile.mp4"
var soundRecorder: AVAudioRecorder?
var soundPlayer: AVAudioPlayer?
var audioSession = AVAudioSession.sharedInstance()
override func viewDidLoad() {
super.viewDidLoad()
self.socket?.delegate = self
setUpRecorder()
playBtn.isEnabled = false
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
self.audioSession = AVAudioSession.sharedInstance()
do {
try audioSession.setCategory(AVAudioSession.Category.playAndRecord, mode: .measurement, options: .defaultToSpeaker)
try audioSession.setActive(true, options: .notifyOthersOnDeactivation)
} catch {
print(error)
}
}
func getDocDirector() -> URL {
let path = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
return path[0]
}
func setUpRecorder() {
let audioFileName = getDocDirector().appendingPathComponent(fileName)
let recordSettings: [String: Any] = [AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
AVEncoderAudioQualityKey: AVAudioQuality.max.rawValue,
AVNumberOfChannelsKey: 1,
AVEncoderBitRateKey: 16000,
AVSampleRateKey: 16000]
do {
soundRecorder = try AVAudioRecorder(url: audioFileName, settings: recordSettings)
soundRecorder?.delegate = self
soundRecorder?.prepareToRecord()
} catch {
print(error)
}
}
func setUpPlayer() {
let audioFileName = getDocDirector().appendingPathComponent(fileName)
do {
soundPlayer = try AVAudioPlayer(contentsOf: audioFileName)
soundPlayer?.delegate = self
soundPlayer?.prepareToPlay()
soundPlayer?.volume = 1.0
} catch {
print(error)
}
}
#IBAction func recordAction(_ sender: Any) {
if recordBtn.titleLabel?.text == "Record" {
soundRecorder?.record()
recordBtn.setTitle("Stop", for: .normal)
playBtn.isEnabled = false
} else {
soundRecorder?.stop()
recordBtn.setTitle("Record", for: .normal)
playBtn.isEnabled = false
}
}
#IBAction func playAction(_ sender: Any) {
if playBtn.titleLabel?.text == "Play" {
playBtn.setTitle("Stop", for: .normal)
recordBtn.isEnabled = false
setUpPlayer()
soundPlayer?.play()
} else {
playBtn.setTitle("Play", for: .normal)
recordBtn.isEnabled = false
}
}
func openSocket() {
getUrl(success: { [weak self] (url) in
self?.socket = WebSocket(url: URL(string: url)!)
self?.socket?.connect()
}) { (e) in
//
}
}
}
Related
To put it in context, I'm developing an app that is recording a lot of time while in background or in the app. I'm facing two problems using AVAudioRecorder to record in my app:
My recording resumes fine when, for example, I play an audio in WhatsApp or a video in Instagram. But when I play a song in Apple Music or a play a voice note, it doesn't resume the recording again.
If I'm in a Phone Call and I start the recording, my app crashes and outputs the following:
(Error de OSStatus 561017449.)
I think also need to handle when the input/output device changes so my app doesn't crash.
The code that I implemented for the class where I'm recording (and also playing the sound I use recorder so I can try it and see if it works) is this:
class RecordViewController: UIViewController, AVAudioPlayerDelegate , AVAudioRecorderDelegate {
#IBOutlet weak var principalLabel: UILabel!
#IBOutlet weak var loginLabel: UILabel!
#IBOutlet weak var recordBTN: UIButton!
#IBOutlet weak var playBTN: UIButton!
var audioRecorder : AVAudioRecorder!
var audioPlayer : AVAudioPlayer!
var isAudioRecordingGranted: Bool!
var isRecording = false
var isPlaying = false
override func viewDidLoad() {
super.viewDidLoad()
view.backgroundColor = .white
check_record_permission()
checkActivateMicrophone()
NotificationCenter.default.addObserver(self, selector: #selector(handleInterruption(notification:)), name: AVAudioSession.interruptionNotification, object: audioRecorder)
let tapRegister: UITapGestureRecognizer = UITapGestureRecognizer(target: self, action: #selector(self.goToLogin))
loginLabel.addGestureRecognizer(tapRegister)
loginLabel.isUserInteractionEnabled = true
}
#objc func goToLogin() {
self.performSegue(withIdentifier: "goToLogin", sender: self)
}
#objc func handleInterruption(notification: Notification) {
guard let userInfo = notification.userInfo,
let typeInt = userInfo[AVAudioSessionInterruptionTypeKey] as? UInt,
let type = AVAudioSession.InterruptionType(rawValue: typeInt) else {
return
}
switch type {
case .began:
if isRecording {
print("OOOOOOO")
audioRecorder.pause()
}
break
case .ended:
guard let optionsValue = userInfo[AVAudioSessionInterruptionOptionKey] as? UInt else { return }
let options = AVAudioSession.InterruptionOptions(rawValue: optionsValue)
if options.contains(.shouldResume) {
audioRecorder.record()
print("AAAAAAA")
} else {
print("III")
DispatchQueue.main.asyncAfter(deadline: .now() + 2.0) {
self.audioRecorder.record()
}
}
default: ()
}
}
func check_record_permission()
{
switch AVAudioSession.sharedInstance().recordPermission {
case AVAudioSessionRecordPermission.granted:
isAudioRecordingGranted = true
break
case AVAudioSessionRecordPermission.denied:
isAudioRecordingGranted = false
break
case AVAudioSessionRecordPermission.undetermined:
AVAudioSession.sharedInstance().requestRecordPermission({ (allowed) in
if allowed {
self.isAudioRecordingGranted = true
} else {
self.isAudioRecordingGranted = false
}
})
break
default:
break
}
}
func checkActivateMicrophone() {
if !isAudioRecordingGranted {
playBTN.isEnabled = false
playBTN.alpha = 0.5
recordBTN.isEnabled = false
recordBTN.alpha = 0.5
principalLabel.text = "Activa el micrófono desde ajustes"
} else {
playBTN.isEnabled = true
playBTN.alpha = 1
recordBTN.isEnabled = true
recordBTN.alpha = 1
principalLabel.text = "¡Prueba las grabaciones!"
}
}
func getDocumentsDirectory() -> URL
{
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
let documentsDirectory = paths[0]
return documentsDirectory
}
func getFileUrl() -> URL
{
let filename = "myRecording.m4a"
let filePath = getDocumentsDirectory().appendingPathComponent(filename)
return filePath
}
func setup_recorder()
{
if isAudioRecordingGranted
{
let session = AVAudioSession.sharedInstance()
do
{
try session.setCategory(AVAudioSession.Category.playAndRecord, options: .defaultToSpeaker)
try session.setActive(true)
let settings = [
AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
AVSampleRateKey: 44100,
AVNumberOfChannelsKey: 2,
AVEncoderAudioQualityKey:AVAudioQuality.high.rawValue
]
audioRecorder = try AVAudioRecorder(url: getFileUrl(), settings: settings)
audioRecorder.delegate = self
audioRecorder.isMeteringEnabled = true
audioRecorder.prepareToRecord()
}
catch let error {
print(error.localizedDescription)
}
}
else
{
print("AAAAA")
}
}
#IBAction func recordAct(_ sender: Any) {
if(isRecording) {
finishAudioRecording(success: true)
recordBTN.setTitle("Record", for: .normal)
playBTN.isEnabled = true
isRecording = false
}
else
{
setup_recorder()
audioRecorder.record()//aaaaaa
recordBTN.setTitle("Stop", for: .normal)
playBTN.isEnabled = false
isRecording = true
}
}
func finishAudioRecording(success: Bool)
{
if success
{
audioRecorder.stop()
audioRecorder = nil
print("recorded successfully.")
}
else
{
print("Recording failed.")
}
}
func prepare_play()
{
do
{
audioPlayer = try AVAudioPlayer(contentsOf: getFileUrl())
audioPlayer.delegate = self
audioPlayer.prepareToPlay()
}
catch{
print("Error")
}
}
#IBAction func playAct(_ sender: Any) {
if(isPlaying)
{
audioPlayer.stop()
recordBTN.isEnabled = true
playBTN.setTitle("Play", for: .normal)
isPlaying = false
}
else
{
if FileManager.default.fileExists(atPath: getFileUrl().path)
{
recordBTN.isEnabled = false
playBTN.setTitle("pause", for: .normal)
prepare_play()
audioPlayer.play()
isPlaying = true
}
else
{
print("Audio file is missing.")
}
}
}
func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully flag: Bool)
{
if !flag
{
print("UUU")
finishAudioRecording(success: false)
}
playBTN.isEnabled = true
}
func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool)
{
recordBTN.isEnabled = true
}
}
I implemented the "Background Mode" of Audio, AirPlay and Picture in Picture
I am creating a radio application for iPhone (coded in Swift 4.2) and I want to add a function allowing me to record and save in a file, the sound produced by my radio (read from an AVPlayer) when I push a button. Which code should I use?
The code is in Swift 4.2, with Xcode 10.1.
I search on the web : "How to record an audio stream swift 4.2", "How to record audio from AVPlayer swift 4.2", but I can't find an answer.
My code:
import UIKit
import AVFoundation
import MediaPlayer
class ViewControllerPlayer: UIViewController {
var URl = "http://link_of_audio_stream"
var player:AVPlayer?
var playerItem:AVPlayerItem?
var playerLayer:AVPlayerLayer?
override func viewDidLoad() {
super.viewDidLoad()
let url = URL(string: URl)
let playerItem1:AVPlayerItem = AVPlayerItem(url: url!)
player = AVPlayer(playerItem: playerItem1)
}
#IBAction func Play(_ sender: Any) {
player?.play()
}
#IBAction func Pause(_ sender: Any) {
player?.pause()
}
private var audioRecorder: AVAudioRecorder!
func startRecording() throws {
guard let newFileURL = createURLForNewRecord() else {
throw RecordingServiceError.canNotCreatePath
}
do {
var urlString = URL(string: URl)
urlString = newFileURL
audioRecorder = try AVAudioRecorder(url: newFileURL,
settings: [AVFormatIDKey:Int(kAudioFormatMPEG4AAC),
AVSampleRateKey: 8000,
AVNumberOfChannelsKey: 1,
AVEncoderAudioQualityKey: AVAudioQuality.min.rawValue])
audioRecorder.delegate = self as? AVAudioRecorderDelegate
audioRecorder.prepareToRecord()
audioRecorder.record(forDuration: TimeConstants.recordDuration)
//error: Use of unresolved identifier 'TimeConstants'
} catch let error {
print(error)
}
}
func STOPREC1() throws {
audioRecorder.stop()
audioRecorder = nil
print("Recording finished successfully.")
}
enum RecordingServiceError: String, Error {
case canNotCreatePath = "Can not create path for new recording"
}
private func createURLForNewRecord() -> URL? {
guard let appGroupFolderUrl = FileManager.getAppFolderURL() else {
return nil
}
let date = String(describing: Date())
let fullFileName = "Enregistrement radio " + date + ".m4a"
let newRecordFileName = appGroupFolderUrl.appendingPathComponent(fullFileName)
return newRecordFileName
}
}
extension FileManager {
class func getAppFolderURL() -> URL? {
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
let documentsDirectory = paths[0]
return documentsDirectory
}
}
After multiple internet search, I found the solution.
I found this Swift Class somewhere on internet named « CachingPlayerItem.swift », it will allow the to record an online audio stream.
import Foundation
import AVFoundation
fileprivate extension URL {
func withScheme(_ scheme: String) -> URL? {
var components = URLComponents(url: self, resolvingAgainstBaseURL: false)
components?.scheme = scheme
return components?.url
}
}
#objc protocol CachingPlayerItemDelegate {
/// Is called when the media file is fully downloaded.
#objc optional func playerItem(_ playerItem: CachingPlayerItem, didFinishDownloadingData data: Data)
/// Is called every time a new portion of data is received.
#objc optional func playerItem(_ playerItem: CachingPlayerItem, didDownloadBytesSoFar bytesDownloaded: Int, outOf bytesExpected: Int)
/// Is called after initial prebuffering is finished, means
/// we are ready to play.
#objc optional func playerItemReadyToPlay(_ playerItem: CachingPlayerItem)
/// Is called when the data being downloaded did not arrive in time to
/// continue playback.
#objc optional func playerItemPlaybackStalled(_ playerItem: CachingPlayerItem)
/// Is called on downloading error.
#objc optional func playerItem(_ playerItem: CachingPlayerItem, downloadingFailedWith error: Error)
}
open class CachingPlayerItem: AVPlayerItem {
class ResourceLoaderDelegate: NSObject, AVAssetResourceLoaderDelegate, URLSessionDelegate, URLSessionDataDelegate, URLSessionTaskDelegate {
var playingFromData = false
var mimeType: String? // is required when playing from Data
var session: URLSession?
var mediaData: Data?
var response: URLResponse?
var pendingRequests = Set<AVAssetResourceLoadingRequest>()
weak var owner: CachingPlayerItem?
var fileURL: URL!
var outputStream: OutputStream?
func resourceLoader(_ resourceLoader: AVAssetResourceLoader, shouldWaitForLoadingOfRequestedResource loadingRequest: AVAssetResourceLoadingRequest) -> Bool {
if playingFromData {
// Nothing to load.
} else if session == nil {
// If we're playing from a url, we need to download the file.
// We start loading the file on first request only.
guard let initialUrl = owner?.url else {
fatalError("internal inconsistency")
}
startDataRequest(with: initialUrl)
}
pendingRequests.insert(loadingRequest)
processPendingRequests()
return true
}
func startDataRequest(with url: URL) {
var recordingName = "record.mp3"
if let recording = owner?.recordingName{
recordingName = recording
}
fileURL = try! FileManager.default.url(for: .documentDirectory, in: .userDomainMask, appropriateFor: nil, create: false)
.appendingPathComponent(recordingName)
let configuration = URLSessionConfiguration.default
configuration.requestCachePolicy = .reloadIgnoringLocalAndRemoteCacheData
session = URLSession(configuration: configuration, delegate: self, delegateQueue: nil)
session?.dataTask(with: url).resume()
outputStream = OutputStream(url: fileURL, append: true)
outputStream?.schedule(in: RunLoop.current, forMode: RunLoop.Mode.default)
outputStream?.open()
}
func resourceLoader(_ resourceLoader: AVAssetResourceLoader, didCancel loadingRequest: AVAssetResourceLoadingRequest) {
pendingRequests.remove(loadingRequest)
}
// MARK: URLSession delegate
func urlSession(_ session: URLSession, dataTask: URLSessionDataTask, didReceive data: Data) {
let bytesWritten = data.withUnsafeBytes{outputStream?.write($0, maxLength: data.count)}
}
func urlSession(_ session: URLSession, dataTask: URLSessionDataTask, didReceive response: URLResponse, completionHandler: #escaping (URLSession.ResponseDisposition) -> Void) {
completionHandler(Foundation.URLSession.ResponseDisposition.allow)
mediaData = Data()
self.response = response
processPendingRequests()
}
func urlSession(_ session: URLSession, task: URLSessionTask, didCompleteWithError error: Error?) {
if let errorUnwrapped = error {
owner?.delegate?.playerItem?(owner!, downloadingFailedWith: errorUnwrapped)
return
}
processPendingRequests()
owner?.delegate?.playerItem?(owner!, didFinishDownloadingData: mediaData!)
}
// MARK: -
func processPendingRequests() {
// get all fullfilled requests
let requestsFulfilled = Set<AVAssetResourceLoadingRequest>(pendingRequests.compactMap {
self.fillInContentInformationRequest($0.contentInformationRequest)
if self.haveEnoughDataToFulfillRequest($0.dataRequest!) {
$0.finishLoading()
return $0
}
return nil
})
// remove fulfilled requests from pending requests
_ = requestsFulfilled.map { self.pendingRequests.remove($0) }
}
func fillInContentInformationRequest(_ contentInformationRequest: AVAssetResourceLoadingContentInformationRequest?) {
if playingFromData {
contentInformationRequest?.contentType = self.mimeType
contentInformationRequest?.contentLength = Int64(mediaData!.count)
contentInformationRequest?.isByteRangeAccessSupported = true
return
}
guard let responseUnwrapped = response else {
// have no response from the server yet
return
}
contentInformationRequest?.contentType = responseUnwrapped.mimeType
contentInformationRequest?.contentLength = responseUnwrapped.expectedContentLength
contentInformationRequest?.isByteRangeAccessSupported = true
}
func haveEnoughDataToFulfillRequest(_ dataRequest: AVAssetResourceLoadingDataRequest) -> Bool {
let requestedOffset = Int(dataRequest.requestedOffset)
let requestedLength = dataRequest.requestedLength
let currentOffset = Int(dataRequest.currentOffset)
guard let songDataUnwrapped = mediaData,
songDataUnwrapped.count > currentOffset else {
return false
}
let bytesToRespond = min(songDataUnwrapped.count - currentOffset, requestedLength)
let dataToRespond = songDataUnwrapped.subdata(in: Range(uncheckedBounds: (currentOffset, currentOffset + bytesToRespond)))
dataRequest.respond(with: dataToRespond)
return songDataUnwrapped.count >= requestedLength + requestedOffset
}
deinit {
session?.invalidateAndCancel()
}
}
fileprivate let resourceLoaderDelegate = ResourceLoaderDelegate()
fileprivate let url: URL
fileprivate let initialScheme: String?
fileprivate var customFileExtension: String?
weak var delegate: CachingPlayerItemDelegate?
func stopDownloading(){
resourceLoaderDelegate.session?.invalidateAndCancel()
}
open func download() {
if resourceLoaderDelegate.session == nil {
resourceLoaderDelegate.startDataRequest(with: url)
}
}
private let cachingPlayerItemScheme = "cachingPlayerItemScheme"
var recordingName = "record.mp3"
/// Is used for playing remote files.
convenience init(url: URL, recordingName: String) {
self.init(url: url, customFileExtension: nil, recordingName: recordingName)
}
/// Override/append custom file extension to URL path.
/// This is required for the player to work correctly with the intended file type.
init(url: URL, customFileExtension: String?, recordingName: String) {
guard let components = URLComponents(url: url, resolvingAgainstBaseURL: false),
let scheme = components.scheme,
var urlWithCustomScheme = url.withScheme(cachingPlayerItemScheme) else {
fatalError("Urls without a scheme are not supported")
}
self.recordingName = recordingName
self.url = url
self.initialScheme = scheme
if let ext = customFileExtension {
urlWithCustomScheme.deletePathExtension()
urlWithCustomScheme.appendPathExtension(ext)
self.customFileExtension = ext
}
let asset = AVURLAsset(url: urlWithCustomScheme)
asset.resourceLoader.setDelegate(resourceLoaderDelegate, queue: DispatchQueue.main)
super.init(asset: asset, automaticallyLoadedAssetKeys: nil)
resourceLoaderDelegate.owner = self
addObserver(self, forKeyPath: "status", options: NSKeyValueObservingOptions.new, context: nil)
NotificationCenter.default.addObserver(self, selector: #selector(playbackStalledHandler), name:NSNotification.Name.AVPlayerItemPlaybackStalled, object: self)
}
/// Is used for playing from Data.
init(data: Data, mimeType: String, fileExtension: String) {
guard let fakeUrl = URL(string: cachingPlayerItemScheme + "://whatever/file.\(fileExtension)") else {
fatalError("internal inconsistency")
}
self.url = fakeUrl
self.initialScheme = nil
resourceLoaderDelegate.mediaData = data
resourceLoaderDelegate.playingFromData = true
resourceLoaderDelegate.mimeType = mimeType
let asset = AVURLAsset(url: fakeUrl)
asset.resourceLoader.setDelegate(resourceLoaderDelegate, queue: DispatchQueue.main)
super.init(asset: asset, automaticallyLoadedAssetKeys: nil)
resourceLoaderDelegate.owner = self
addObserver(self, forKeyPath: "status", options: NSKeyValueObservingOptions.new, context: nil)
NotificationCenter.default.addObserver(self, selector: #selector(playbackStalledHandler), name:NSNotification.Name.AVPlayerItemPlaybackStalled, object: self)
}
// MARK: KVO
override open func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey : Any]?, context: UnsafeMutableRawPointer?) {
delegate?.playerItemReadyToPlay?(self)
}
// MARK: Notification hanlers
#objc func playbackStalledHandler() {
delegate?.playerItemPlaybackStalled?(self)
}
// MARK: -
override init(asset: AVAsset, automaticallyLoadedAssetKeys: [String]?) {
fatalError("not implemented")
}
deinit {
NotificationCenter.default.removeObserver(self)
removeObserver(self, forKeyPath: "status")
resourceLoaderDelegate.session?.invalidateAndCancel()
}
}
After, in you main swift file, you put this code to record:
let recordingName = "my_rec_name.mp3"
var playerItem: CachingPlayerItem!
let url_stream = URL(string: "http://my_url_stream_link")
playerItem = CachingPlayerItem(url: url_stream!, recordingName: recordingName ?? "record.mp3")
var player1 = AVPlayer(playerItem: playerItem)
player1.automaticallyWaitsToMinimizeStalling = false
And to stop the record, you use this code:
playerItem.stopDownloading()
recordingName = nil
playerItem = nil
Recordings will be saved on the directory of your app.
I had a really hard time with this one so I am posting an answer.
Remember to add these lines to your info.plist:
Here is my controller that records the voice input and returns it to a previous controller:
import Foundation
import UIKit
import Speech
class SpeechToTextViewController: UIViewController {
#IBOutlet weak var animationView: UIView!
#IBOutlet weak var circleView: UIView!
#IBOutlet weak var micImage: UIImageView!
#IBOutlet weak var listeningLabel: UILabel!
#IBOutlet weak var buttonStartView: UIView!
#IBOutlet weak var cancelRecordingButton: UIButton!
#IBOutlet weak var stopRecordingButton: UIButton!
#IBOutlet weak var startRecordingButton: UIButton!
private let audioEngine = AVAudioEngine()
private let speechRecognizer = SFSpeechRecognizer(locale: Locale.init(identifier:"en-US"))
private var recognitionRequest: SFSpeechAudioBufferRecognitionRequest!
private var recognitionTask: SFSpeechRecognitionTask?
private var isRecording: Bool = false
var delegate: SpeechToTextViewDelegate?
override func viewDidLoad() {
super.viewDidLoad()
self.view.backgroundColor = UIColor(white: 1.0, alpha: 0.25)
self.stopRecordingButton.isHidden = true
self.listeningLabel.isHidden = true
}
#IBAction func startStopRecording(_ sender: Any) {
isRecording = !isRecording
if isRecording && !audioEngine.isRunning {
self.cancelRecordingButton.isHidden = true
self.startRecordingButton.isHidden = true
self.stopRecordingButton.isHidden = false
self.listeningLabel.isHidden = false
UIView.animate(withDuration: 1, animations: {}) { _ in
UIView.animate(withDuration: 1, delay: 0.25, options: [.autoreverse, .repeat], animations: {
self.circleView.transform = CGAffineTransform(scaleX: 1.5, y: 1.5)
})
}
do {
try recordSpeech()
} catch {
print(error)
}
} else {
self.listeningLabel.isHidden = true
stopRecording()
}
}
func recordSpeech() throws {
recognitionRequest = SFSpeechAudioBufferRecognitionRequest()
let node = audioEngine.inputNode
let recordingFormat = node.outputFormat(forBus: 0)
node.installTap(onBus: 0, bufferSize: 1024, format: recordingFormat) {buffer, _ in
self.recognitionRequest.append(buffer)
}
audioEngine.prepare()
try audioEngine.start()
guard let myRecognizer = SFSpeechRecognizer() else {
print("myRecognizer is unable to be created")
return
}
if !myRecognizer.isAvailable
{
print("myRecognizer is not available")
return
}
recognitionTask = speechRecognizer?.recognitionTask(with: recognitionRequest, resultHandler: { result, error in
var isFinal = false
if let result = result
{
isFinal = result.isFinal
self.delegate?.appendMessage(result.bestTranscription.formattedString)
}
if error != nil || isFinal {
if error != nil {
print("error trying to capture speech to text")
print(error!)
}
self.stopRecording()
}
})
}
func stopRecording() {
if audioEngine.isRunning {
self.audioEngine.stop()
self.recognitionRequest.endAudio()
// Cancel the previous task if it's running
if let recognitionTask = recognitionTask {
recognitionTask.cancel()
self.recognitionTask = nil
}
}
delegate?.doneTalking()
self.dismiss(animated: true, completion: nil)
}
#IBAction func cancelRecording(_ sender: Any) {
delegate?.doneTalking()
self.dismiss(animated: true, completion: nil)
}
}
Use AVAudioRecorder for recording:
private var audioRecorder: AVAudioRecorder!
After you declared an audio recorder you can write a recording method:
func startRecording() throws {
guard let newFileURL = createURLForNewRecord() else {
throw RecordingServiceError.canNotCreatePath
}
do {
currentFileURL = newFileURL
audioRecorder = try AVAudioRecorder(url: newFileURL,
settings: [AVFormatIDKey:Int(kAudioFormatMPEG4AAC),
AVSampleRateKey: 8000,
AVNumberOfChannelsKey: 1,
AVEncoderAudioQualityKey: AVAudioQuality.min.rawValue])
audioRecorder.delegate = self
audioRecorder.prepareToRecord()
audioRecorder.record(forDuration: TimeConstants.recordDuration)
} catch let error {
print(error)
}
}
And use some helper methods and structs:
enum RecordingServiceError: String, Error {
case canNotCreatePath = "Can not create path for new recording"
}
private func createURLForNewRecord() -> URL? {
guard let appGroupFolderUrl = FileManager.getAppFolderURL() else {
return nil
}
let fileNamePrefix = DateFormatter.stringFromDate(Date())
let fullFileName = "Record_" + fileNamePrefix + ".m4a"
let newRecordFileName = appGroupFolderUrl.appendingPathComponent(fullFileName)
return newRecordFileName
}
extension FileManager {
class func getAppFolderURL() -> URL? {
return FileManager.default.containerURL(forSecurityApplicationGroupIdentifier: "you app bundle")
}
}
I am trying to start recording when the user starts to talk and stops recording when the user is done talking. And I want to limit the maximum record audio length.I could not be able to find enough function in AVAudioRecorderDelegate.Hope you understand my problem.Thanks in Advance
#IBAction func recordAudio(_ sender: Any) {
recordingLabel.text = "Recording in progress..."
let dirPath = NSSearchPathForDirectoriesInDomains(.documentDirectory,.userDomainMask, true)[0] as String
let recordingName = "recordedVoice.wav"
let pathArray = [dirPath, recordingName]
let filePath = URL(string: pathArray.joined(separator: "/"))
let session = AVAudioSession.sharedInstance()
try! session.setCategory(AVAudioSessionCategoryPlayAndRecord, with:AVAudioSessionCategoryOptions.defaultToSpeaker)
try! audioRecorder = AVAudioRecorder(url: filePath!, settings: [:])
audioRecorder.delegate = self
audioRecorder.isMeteringEnabled = true
audioRecorder.prepareToRecord()
audioRecorder.record()
}
#IBAction func stopRecording(_ sender: Any) {
recordButton.isEnabled = true
stopRecordingButton.isEnabled = false
recordingLabel.text = "Tap to record..."
audioRecorder.stop()
let audioSession = AVAudioSession.sharedInstance()
try! audioSession.setActive(false)
}
func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully flag: Bool) {
if (flag) {
//Success
} else {
print("Could not save audio recording!")
}
}
To Record Audio When user tak1 you need some steps
1. Permission from User to all your app to use Mic
In your Info Plist Add Privacy - Microphone Usage Description in user Plist and add Text Description
2. Location to save Recorded File user FileManager
3. To End After time : use audioRecorder.record(forDuration: 30) // record for 30 Sec
Check complete code :
import UIKit
import AVFoundation
class ViewController: UIViewController {
#IBOutlet weak var recordButton: UIButton!
var recordingSession: AVAudioSession!
var audioRecorder: AVAudioRecorder!
override func viewDidLoad() {
super.viewDidLoad()
}
#IBAction func recordAudio(_ sender: Any) {
self.requestRecordPermission()
}
func requestRecordPermission() {
recordingSession = AVAudioSession.sharedInstance()
do {
try recordingSession.setCategory(AVAudioSessionCategoryPlayAndRecord)
try recordingSession.setActive(true)
recordingSession.requestRecordPermission() { [unowned self] allowed in
DispatchQueue.main.async {
if allowed {
// User allow you to record
// Start recording and change UIbutton color
self.recordButton.backgroundColor = .red
self.startRecording()
} else {
// failed to record!
}
}
}
} catch {
// failed to record!
}
}
func startRecording() {
let audioFilename = getDocumentsDirectory().appendingPathComponent("recordedFile.m4a")
let settings = [
AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
AVSampleRateKey: 12000,
AVNumberOfChannelsKey: 1,
AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue
]
do {
audioRecorder = try AVAudioRecorder(url: audioFilename, settings: settings)
audioRecorder.delegate = self
audioRecorder.record(forDuration: 30) // record for 30 Sec
recordButton.setTitle("Tap to Stop", for: .normal)
recordButton.backgroundColor = .green
} catch {
finishRecording(success: false)
}
}
func getDocumentsDirectory() -> URL {
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
return paths[0]
}
#objc func recordTapped() {
if audioRecorder == nil {
startRecording()
} else {
finishRecording(success: true)
}
}
public func finishRecording(success: Bool) {
audioRecorder.stop()
audioRecorder = nil
if success {
// record sucess
recordButton.backgroundColor = .green
} else {
// record fail
recordButton.backgroundColor = .yellow
}
}
}
extension ViewController :AVAudioRecorderDelegate{
func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully flag: Bool) {
if !flag {
finishRecording(success: false)
}
}
}
I have a tableview and have multiple cells in tableView.
Each cell has an item of AVAudioPlayer
And I face a problem.
I don't know how to manage the AVAudioPlayer.
When I play first AVAudioPlayer, and then play second AVAudioPlayer, the sound will overlap.
How to stop first AVAudioPlayer in my customized cell, and play second AVAudioPlayer?
Thanks.
This is my customized cell:
class TableViewCell: UITableViewCell {
#IBOutlet weak var myImageView: UIImageView!
#IBOutlet weak var myChatBubbleView: UIView!
#IBOutlet weak var myDateLabel: UILabel!
#IBOutlet weak var mySecondLabel: UILabel!
#IBOutlet weak var myRecordPlayerBtn: MenuButton!
private var timer:Timer?
private var elapsedTimeInSecond:Int = 0
var audioPlayer:AVAudioPlayer?
var message:ChatroomMessage?
var chatroomId:String = ""
var delegate:PlayRecordDelegate?
override func awakeFromNib() {
super.awakeFromNib()
// Initialization code
self.backgroundColor = defaultBackgroundColor
self.tintColor = defaultChatroomCheckButtonColor
myImageView.layer.masksToBounds = true
myImageView.layer.cornerRadius = defaultIconRadius
myChatBubbleView.backgroundColor = defaultChatGreenBubbleColor
myChatBubbleView.layer.cornerRadius = defaultButtonRadius
myDateLabel.textColor = defaultChatTimeColor
mySecondLabel.textColor = defaultChatTimeColor
mySecondLabel.isHidden = true
myRecordPlayerBtn.imageView?.animationDuration = 1
myRecordPlayerBtn.imageView?.animationImages = [
UIImage(named: "img_myRocordPlaying1")!,
UIImage(named: "img_myRocordPlaying2")!,
UIImage(named: "img_myRocordPlaying3")!
]
}
override func setSelected(_ selected: Bool, animated: Bool) {
super.setSelected(selected, animated: animated)
// Configure the view for the selected state
}
func loadByMessage(_ message:ChatroomMessage, chatroomId:String) {
self.message = message
self.chatroomId = chatroomId
myRecordPlayerBtn.addTarget(self, action: #selector(recordPlay), for: .touchUpInside)
}
func resetRecordAnimation() {
self.myRecordPlayerBtn.imageView!.stopAnimating()
self.myRecordPlayerBtn.isSelected = false
}
func recordPlay(_ sender: UIButton) {
self.myRecordPlayerBtn.imageView?.startAnimating()
let documentsDirectoryURL = try! FileManager().url(for: .documentDirectory, in: .userDomainMask, appropriateFor: nil, create: true).appendingPathComponent("\(chatroomId)/Record/")
let fileName = message?.content.substring(from: 62)
let fileURL = documentsDirectoryURL.appendingPathComponent(fileName!)
if FileManager.default.fileExists(atPath: fileURL.path) {
let asset = AVURLAsset(url: URL(fileURLWithPath: fileURL.path), options: nil)
let audioDuration = asset.duration
let audioDurationSeconds = CMTimeGetSeconds(audioDuration)
self.elapsedTimeInSecond = Int(audioDurationSeconds)
if audioPlayer?.isPlaying == true {
audioPlayer?.stop()
DispatchQueue.main.async {
self.resetTimer(second: self.elapsedTimeInSecond)
self.startTimer()
}
}
updateTimeLabel()
startTimer()
audioPlayer = try? AVAudioPlayer(contentsOf: fileURL)
audioPlayer?.delegate = self
audioPlayer?.play()
}else{
//don't have file in local
let recordUrl = URL(string: (message?.content)!)
URLSession.shared.downloadTask(with: recordUrl!, completionHandler: { (location, response, error) in
guard
let httpURLResponse = response as? HTTPURLResponse, httpURLResponse.statusCode == 200,
let mimeType = response?.mimeType, mimeType.hasPrefix("audio"),
let location = location, error == nil
else { return }
do {
try FileManager.default.moveItem(at: location, to: fileURL)
let asset = AVURLAsset(url: URL(fileURLWithPath: fileURL.path), options: nil)
let audioDuration = asset.duration
let audioDurationSeconds = CMTimeGetSeconds(audioDuration)
self.elapsedTimeInSecond = Int(audioDurationSeconds)
DispatchQueue.main.async {
self.updateTimeLabel()
self.startTimer()
}
self.audioPlayer = try? AVAudioPlayer(contentsOf: fileURL)
self.audioPlayer?.delegate = self
self.audioPlayer?.play()
} catch {
print(error)
}
}).resume()
}
}
func startTimer() {
timer?.invalidate()
timer = Timer.scheduledTimer(withTimeInterval: 1.0, repeats: true, block: { (timer) in
self.elapsedTimeInSecond -= 1
self.updateTimeLabel()
})
}
func resetTimer(second:Int) {
timer?.invalidate()
elapsedTimeInSecond = second
updateTimeLabel()
}
func updateTimeLabel() {
let seconds = elapsedTimeInSecond % 60
let minutes = (elapsedTimeInSecond/60) % 60
mySecondLabel.isHidden = false
mySecondLabel.text = String(format: "%02d:%02d", minutes,seconds)
}
}
extension TableViewCell:AVAudioPlayerDelegate {
func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool) {
let documentsDirectoryURL = try! FileManager().url(for: .documentDirectory, in: .userDomainMask, appropriateFor: nil, create: true).appendingPathComponent("\(Id)/Record/")
let fileName = message?.content.substring(from: 62)
let fileURL = documentsDirectoryURL.appendingPathComponent(fileName!)
if FileManager.default.fileExists(atPath: fileURL.path) {
let asset = AVURLAsset(url: URL(fileURLWithPath: fileURL.path), options: nil)
let audioDuration = asset.duration
let audioDurationSeconds = CMTimeGetSeconds(audioDuration)
DispatchQueue.main.async {
self.resetTimer(second: Int(audioDurationSeconds))
self.myRecordPlayerBtn.imageView!.stopAnimating()
self.myRecordPlayerBtn.imageView?.image = #imageLiteral(resourceName: "img_myRocordDefault")
}
}
}
}
Probably first initialize to check if your player is playing
if audioPlayer != nil{
if audioPlayer?.isPlaying == true {
audioPlayer?.stop()
DispatchQueue.main.async {
self.resetTimer(second: self.elapsedTimeInSecond)
self.startTimer()
}
}
}
If you don't want to play two audio track at the same time, you should use a shared instance of AVAudioPlayer
It will be better for performances and you can define the instance as static var in your controller. It will be accessible in each cell.
I have developed a music palyer application, and I used a shared instance in the MusicPlayManager:
class MusicPlayManager{
var player : AVAudioPlayer?
static let sharedInstance = MusicPlayManager.init()
private override init() {
super.init()
}
// something else, such as palyNext, playPrevious methods
}
In your viewController,you can use MusicPlayManager.sharedInstance.player
I have a annoying problem with my project here...
When I record something and play it in my Simulator, it's all work fine.
But when I play it in my iPhone the sound don't work... I searched for hours for a solution...
Here's my code
import UIKit
import Parse
import AVFoundation
class View1: UIViewController, AVAudioRecorderDelegate, AVAudioPlayerDelegate {
#IBOutlet weak var recordButton: UIButton!
#IBOutlet weak var playButton: UIButton!
var soundRecorder: AVAudioRecorder!
var soundPlayer:AVAudioPlayer!
let fileName = "sound.caf"
override func viewDidLoad() {
super.viewDidLoad()
setupRecorder()
}
#IBAction func recordSound(sender: AnyObject) {
if (sender.titleLabel?!.text == "Record"){
soundRecorder.record()
sender.setTitle("Stop", forState: .Normal)
playButton.enabled = false
} else {
soundRecorder.stop()
sender.setTitle("Record", forState: .Normal)
}
}
#IBAction func playSound(sender: AnyObject) {
if (sender.titleLabel?!.text == "Play"){
recordButton.enabled = false
sender.setTitle("Stop", forState: .Normal)
preparePlayer()
soundPlayer.play()
} else {
soundPlayer.stop()
sender.setTitle("Play", forState: .Normal)
}
}
// MARK:- AVRecorder Setup
func setupRecorder() {
//set the settings for recorder
let recordSettings = [AVSampleRateKey : NSNumber(float: Float(44100.0)),
AVFormatIDKey : NSNumber(int: Int32(kAudioFormatAppleLossless)),
AVNumberOfChannelsKey : NSNumber(int: 2),
AVEncoderAudioQualityKey : NSNumber(int: Int32(AVAudioQuality.Max.rawValue))];
var error: NSError?
do {
// soundRecorder = try AVAudioRecorder(URL: getFileURL(), settings: recordSettings as [NSObject : AnyObject])
soundRecorder = try AVAudioRecorder(URL: getFileURL(), settings: recordSettings)
} catch let error1 as NSError {
error = error1
soundRecorder = nil
}
if let err = error {
print("AVAudioRecorder error: \(err.localizedDescription)")
} else {
soundRecorder.delegate = self
soundRecorder.prepareToRecord()
}
}
// MARK:- Prepare AVPlayer
func preparePlayer() {
var error: NSError?
do {
soundPlayer = try AVAudioPlayer(contentsOfURL: getFileURL())
} catch let error1 as NSError {
error = error1
soundPlayer = nil
}
if let err = error {
print("AVAudioPlayer error: \(err.localizedDescription)")
} else {
soundPlayer.delegate = self
soundPlayer.prepareToPlay()
soundPlayer.volume = 1.0
}
}
// MARK:- File URL
func getCacheDirectory() -> String {
let paths = NSSearchPathForDirectoriesInDomains(.CachesDirectory,.UserDomainMask, true)
return paths[0]
}
func getFileURL() -> NSURL {
let path = getCacheDirectory().stringByAppendingString(fileName)
let filePath = NSURL(fileURLWithPath: path)
return filePath
}
// MARK:- AVAudioPlayer delegate methods
func audioPlayerDidFinishPlaying(player: AVAudioPlayer, successfully flag: Bool) {
recordButton.enabled = true
playButton.setTitle("Play", forState: .Normal)
}
func audioPlayerDecodeErrorDidOccur(player: AVAudioPlayer, error: NSError?) {
print("Error while playing audio \(error!.localizedDescription)")
}
// MARK:- AVAudioRecorder delegate methods
func audioRecorderDidFinishRecording(recorder: AVAudioRecorder, successfully flag: Bool) {
playButton.enabled = true
recordButton.setTitle("Record", forState: .Normal)
}
func audioRecorderEncodeErrorDidOccur(recorder: AVAudioRecorder, error: NSError?) {
print("Error while recording audio \(error!.localizedDescription)")
}
// MARK:- didReceiveMemoryWarning
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
}
Thank you.
add this code in setupRecorder method
let session = AVAudioSession.sharedInstance()
try! session.setCategory(AVAudioSessionCategoryPlayAndRecord)