append or concatenate audio files in swift - ios

Hi I want to append voice files.
I'm recording voice with AVAudioRecorder, but to play the recording I need to call "stop", but after playing it I want to continue record. Like the native iOS Voice memo app.
Should I use AVMutableCompositionTrack and how do I do that in swift? Thanks!

If you are looking to simply pause your recording and continue it later you can use AVAudioRecorder's pause() function rather than stop() and it will continue the recording when you use play() again.
However, if you are looking to actually concatenate audio files, you can do it like this:
func concatenateFiles(audioFiles: [NSURL], completion: (concatenatedFile: NSURL?) -> ()) {
guard audioFiles.count > 0 else {
completion(concatenatedFile: nil)
return
}
if audioFiles.count == 1 {
completion(concatenatedFile: audioFiles.first)
return
}
// Concatenate audio files into one file
var nextClipStartTime = kCMTimeZero
let composition = AVMutableComposition()
let track = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
// Add each track
for recording in audioFiles {
let asset = AVURLAsset(URL: NSURL(fileURLWithPath: recording.path!), options: nil)
if let assetTrack = asset.tracksWithMediaType(AVMediaTypeAudio).first {
let timeRange = CMTimeRange(start: kCMTimeZero, duration: asset.duration)
do {
try track.insertTimeRange(timeRange, ofTrack: assetTrack, atTime: nextClipStartTime)
nextClipStartTime = CMTimeAdd(nextClipStartTime, timeRange.duration)
} catch {
print("Error concatenating file - \(error)")
completion(concatenatedFile: nil)
return
}
}
}
// Export the new file
if let exportSession = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetPassthrough) {
let paths = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)
let documents = NSURL(string: paths.first!)
if let fileURL = documents?.URLByAppendingPathComponent("file_name.caf") {
// Remove existing file
do {
try NSFileManager.defaultManager().removeItemAtPath(fileURL.path!)
print("Removed \(fileURL)")
} catch {
print("Could not remove file - \(error)")
}
// Configure export session output
exportSession.outputURL = NSURL.fileURLWithPath(fileURL.path!)
exportSession.outputFileType = AVFileTypeCoreAudioFormat
// Perform the export
exportSession.exportAsynchronouslyWithCompletionHandler() { handler -> Void in
if exportSession.status == .Completed {
print("Export complete")
dispatch_async(dispatch_get_main_queue(), {
completion(file: fileURL)
})
return
} else if exportSession.status == .Failed {
print("Export failed - \(exportSession.error)")
}
completion(concatenatedFile: nil)
return
}
}
}
}

Related

AVAssetExportSession succeeds to convert mp4 to m4a on iPhone simulator but iPhone device

I'm trying to convert mp4 video file to m4a audio format by AVAssetExportSession on my iOS app.
This is the conversion code:
let outputUrl = URL(fileURLWithPath: NSTemporaryDirectory() + "out.m4a")
if FileManager.default.fileExists(atPath: outputUrl.path) {
try? FileManager.default.removeItem(atPath: outputUrl.path)
}
let asset = AVURLAsset(url: inputUrl)
// tried the `AVAssetExportPresetAppleM4A` preset name but the same result
let exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetPassthrough)!
exportSession.outputFileType = AVFileType.m4a
exportSession.outputURL = outputUrl
await exportSession.export()
switch exportSession.status {
case .completed:
return outputUrl
default:
// This becomes `4` which is `.failed`
print("Status: \(exportSession.status)")
throw exportSession.error!
}
Currently, it seems to work on iPhone simulators (confirmed on iOS 16.1/15.5) but it doesn't on my iPhone 7 (iOS 15.7.1) real device. It doesn't seem to work as well on my colleague's iOS 16.1 real device, so it shouldn't be a matter of the iOS version.
The mp4 file is located in the iOS Files app and the inputUrl in the above code becomes something like this (I get this URL via UIDocumentPickerViewController):
file:///private/var/mobile/Library/Mobile%20Documents/com~apple~CloudDocs/Downloads/%E3%81%8A%E3%81%97%E3%82%83%E3%81%B8%E3%82%99%E3%82%8A%E3%81%B2%E3%82%8D%E3%82%86%E3%81%8D.mp4
and the error is:
Error Domain=AVFoundationErrorDomain Code=-11800 "The operation could not be completed" UserInfo={NSUnderlyingError=0x2808f30c0 {Error Domain=NSOSStatusErrorDomain Code=-16979 "(null)"}, NSLocalizedFailureReason=An unknown error occurred (-16979), NSLocalizedRecoverySuggestion=XXXXDEFAULTVALUEXXXX, NSURL=file:///private/var/mobile/Library/Mobile%20Documents/com~apple~CloudDocs/Downloads/%E3%81%8A%E3%81%97%E3%82%83%E3%81%B8%E3%82%99%E3%82%8A%E3%81%B2%E3%82%8D%E3%82%86%E3%81%8D.mp4, NSLocalizedDescription=The operation could not be completed}
It seems to be resolved by calling startAccessingSecurityScopedResource() to the inputUrl before exporting.
inputUrl.startAccessingSecurityScopedResource()
Not sure exactly why but that's probably because the inputUrl is under the file:///private namespace?
Use this function for extract audio from video :----
Export audio from video url into new path :-
func extractAudioFromVideo(videoUrl:URL) {
let mixComposition: AVMutableComposition = AVMutableComposition()
var mutableCompositionAudioVideoTrack: [AVMutableCompositionTrack] = []
let videoAsset: AVAsset = AVAsset(url: videoUrl)
if let audioVideoTrack = mixComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid){
mutableCompositionAudioVideoTrack.append(audioVideoTrack)
if let audioVideoAssetTrack: AVAssetTrack = videoAsset.tracks(withMediaType: .audio).first {
do {
try mutableCompositionAudioVideoTrack.first?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: videoAsset.duration), of: audioVideoAssetTrack, at: CMTime.zero)
} catch {
print(error)
}
}
}
if let documentsPath = NSSearchPathForDirectoriesInDomains(.cachesDirectory, .userDomainMask, true).first {
let outputURL = URL(fileURLWithPath: documentsPath).appendingPathComponent(".m4a")
do {
if FileManager.default.fileExists(atPath: outputURL.path) {
try FileManager.default.removeItem(at: outputURL)
}
} catch { }
if let exportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetAppleM4A) {
exportSession.outputURL = outputURL
exportSession.outputFileType = AVFileType.m4a
exportSession.shouldOptimizeForNetworkUse = true
exportSession.exportAsynchronously(completionHandler: {
switch exportSession.status {
case . completed:
DispatchQueue.main.async {
print("audio url :---- \(outputURL)")
// -------- play output audio URL in player ------
}
case .failed:
if let _error = exportSession.error {
print(_error.localizedDescription)
}
case .cancelled:
if let _error = exportSession.error {
print(_error.localizedDescription)
}
default:
print("")
}
})
}
}
}
AVMutableComposition Play :-
You can play direct AVMutableComposition without exporting audio track.
Benefits of play AVMutableComposition is that you can instant play audio into player.
var avplayer = AVPlayer()
var playerController : AVPlayerViewController?
#IBAction func btnAudioPlay(sender:UIButton) {
self.playAudioCompositionFromVideo(fromVideoURL: URL(string: "")!) { Composition in
let playerItem = AVPlayerItem(asset: Composition)
self.playerController = AVPlayerViewController()
self.avplayer = AVPlayer(playerItem: playerItem)
self.playerController?.player = self.avplayer
self.playerController?.player?.play()
} failure: { errore in
print(errore as Any)
}
}
func playAudioCompositionFromVideo(fromVideoURL url: URL, success: #escaping ((AVMutableComposition) -> Void), failure: #escaping ((String?) -> Void)) {
let asset = AVPlayerItem(url: url).asset
let mixComposition = AVMutableComposition()
let timeRange = CMTimeRangeMake(start: CMTime.zero, duration: asset.duration)
//------------ Get Audio Tracks From Asset ---------
let audioTracks = asset.tracks(withMediaType: AVMediaType.audio)
if audioTracks.count > 0 {
// ---- Use audio if video contains the audio track ---
let compositionAudioTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: kCMPersistentTrackID_Invalid)
// -------- Get First Audio track --------
guard let audioTrack = audioTracks.first else { return }
do {
try compositionAudioTrack?.insertTimeRange(timeRange, of: audioTrack, at: CMTime.zero)
compositionAudioTrack?.preferredTransform = audioTrack.preferredTransform
success(mixComposition)
} catch _ {
failure("audio track insert failed!")
}
} else {
failure("audio track is not available!")
}
}

Merging audio with AVAssetExportSession

I'd like to merge two audio files without too much overhead time. The following code successfully merges the audio but takes way too long (>30seconds for more than a couple minutes of audio), and I'm wondering if there is any way to expedite that process. I read in a couple places to use AVAssetExportPresetPassthrough, but I can't seem to get that preset to work with any file type. The only settings I've been able to get to work are using AVAssetExportPresetAppleM4A and exporting as a .m4a
Code to create AVAssetExportSession
if (audioHasBeenRecorded) {
// Merge recordings
let composition = AVMutableComposition()
let compositionAudioTrack = composition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: kCMPersistentTrackID_Invalid)
guard let start = recorder.timeStart else { return }
compositionAudioTrack?.insert(originalRecording: FileManagerHelper.recordingLocalURL(secondRecording: false), insertedRecording: FileManagerHelper.recordingLocalURL(secondRecording: true), startTime: CMTime(seconds: start, preferredTimescale: 1000000))
if let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A) {
print(assetExport.supportedFileTypes)
assetExport.outputFileType = AVFileType.m4a
assetExport.outputURL = FileManagerHelper.recordingLocalURL(secondRecording: false)
do { // Delete old audio
try FileManager.default.removeItem(at: FileManagerHelper.recordingLocalURL(secondRecording: false))
try FileManager.default.removeItem(at: FileManagerHelper.recordingLocalURL(secondRecording: true))
} catch { log(error.localizedDescription, msgType: .error) }
assetExport.exportAsynchronously(completionHandler: {
if let error = assetExport.error {
log(error, msgType: .error)
} else {
log("Successfully merged recordings!", msgType: .error)
self.idea.numberOfPoints = self.audioVisualizer.count
self.idea.save()
self.setupPlayer() // Prepare to play the recorded audio file
self.seekTo(TimeInterval((recorder.timeStart ?? 0) + (recorder.timeEnd ?? 0)))
DispatchQueue.main.async { [ weak self ] in
guard let self = self else { return }
self.audioVisualizer.visualize(self.idea)
}
}
})
}
}
Insert code:
extension AVMutableCompositionTrack {
func insert(originalRecording: URL, insertedRecording: URL, startTime: CMTime) {
let originalAsset = AVURLAsset(url: originalRecording)
let insertedAsset = AVURLAsset(url: insertedRecording)
let range1 = CMTimeRangeMake(start: CMTime.zero, duration: startTime)
let range2 = CMTimeRangeMake(start: CMTime.zero, duration: insertedAsset.duration)
let range3 = CMTimeRangeMake(start: startTime + insertedAsset.duration, duration: originalAsset.duration - startTime)
if let originalTrack = originalAsset.tracks(withMediaType: AVMediaType.audio).first,
let insertedTrack = insertedAsset.tracks(withMediaType: AVMediaType.audio).first {
try? insertTimeRange(range1, of: originalTrack, at: CMTime.zero)
try? insertTimeRange(range2, of: insertedTrack, at: startTime)
try? insertTimeRange(range3, of: originalTrack, at: startTime + insertedAsset.duration)
}
}
}

AVFoundation -Videos merge but only last video plays

I have an array of [AVAsset](). Whenever I record different videos at different durations the below code merges all the durations into 1 video but it will only play the last video in a loop.
For eg. video1 is 1 minute and shows a dog walking, video2 is 1 minute and shows a bird flying, video3 is 1 minute and shows a horse running. The video will merge and play for 3 minutes but it will only show the horse running for 1 minute each three consecutive times.
Where am I going wrong at?
var movieFileOutput = AVCaptureMovieFileOutput()
var arrayVideos = [AVAsset]()
var videoFileUrl: URL?
// button to record video
#objc func recordButtonTapped() {
// Stop recording
if movieFileOutput.isRecording {
movieFileOutput.stopRecording()
print("Stop Recording")
} else {
// Start recording
movieFileOutput.connection(with: AVMediaType.video)?.videoOrientation = videoOrientation()
movieFileOutput.maxRecordedDuration = maxRecordDuration()
videoFileUrl = URL(fileURLWithPath: videoFileLocation())
if let videoFileUrlFromCamera = videoFileUrl {
movieFileOutput.startRecording(to: videoFileUrlFromCamera, recordingDelegate: self)
}
}
}
func videoFileLocation() -> String {
return NSTemporaryDirectory().appending("videoFile.mov")
}
// button to save the merged video
#objc func saveButtonTapped() {
mergeVids()
}
// function to merge and save videos
func mergeVids() {
let mixComposition = AVMutableComposition()
let compositionVideoTrack = mixComposition.addMutableTrack(withMediaType: .video,
preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
compositionVideoTrack?.preferredTransform = CGAffineTransform(rotationAngle: .pi / 2)
let soundtrackTrack = mixComposition.addMutableTrack(withMediaType: .audio,
preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
var insertTime = CMTime.zero
for videoAsset in arrayVideos {
do {
try compositionVideoTrack?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero,
duration: videoAsset.duration),
of: videoAsset.tracks(withMediaType: .video)[0],
at: insertTime)
try soundtrackTrack?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero,
duration: videoAsset.duration),
of: videoAsset.tracks(withMediaType: .audio)[0],
at: insertTime)
insertTime = CMTimeAdd(insertTime, videoAsset.duration)
} catch let error as NSError {
print("\(error.localizedDescription)")
}
}
let outputFileURL = URL(fileURLWithPath: NSTemporaryDirectory() + "merge.mp4")
let path = outputFileURL.path
if FileManager.default.fileExists(atPath: path) {
try! FileManager.default.removeItem(atPath: path)
}
let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)
exporter!.outputURL = outputFileURL
exporter!.outputFileType = AVFileType.mp4
exporter!.shouldOptimizeForNetworkUse = true
exporter!.exportAsynchronously { [weak self] in
let cameraVideoURL = exporter!.outputURL!
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: cameraVideoURL)
}) { (saved, error) in
if let error = error { return }
if !saved { return }
// url is saved
self?.videoFileUrl = nil
self?.arrayVideos.removeAll()
}
}
}
// AVCaptureFileOutputRecording Delegates
func fileOutput(_ output: AVCaptureFileOutput, didStartRecordingTo fileURL: URL, from connections: [AVCaptureConnection]) {
print("+++++++++++++++Started")
print("*****Started recording: \(fileURL)\n")
}
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
if error == nil {
let asset = AVAsset(url: outputFileURL)
arrayVideos.append(asset)
print(arrayVideos.count)
} else {
print("Error recording movie: \(error!.localizedDescription)")
}
func cleanUp() {
let path = outputFileURL.path
if FileManager.default.fileExists(atPath: path) {
do {
try FileManager.default.removeItem(atPath: path)
} catch {
print("Could not remove file at url: \(outputFileURL)")
}
}
}
}
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
print("++++++Frame Drop: \(connection.description)")
}
Thanks to #alxlives for testing out the merge function and pointing out that since it was fine on his machine the problem must've been somewhere else.
The problem was here:
func videoFileLocation() -> String {
return NSTemporaryDirectory().appending("videoFile.mov")
}
In the recordButtonTapped when it used the above code it kept using the same "videoFile.mov" extension:
videoFileUrl = URL(fileURLWithPath: videoFileLocation()) // <<< it gets called here every time a new video runs
if let videoFileUrlFromCamera = videoFileUrl {
movieFileOutput.startRecording(to: videoFileUrlFromCamera, recordingDelegate: self)
}
To fix it I needed to make each extension unique:
func videoFileLocation() -> String {
let uuid = UUID().uuidString
return NSTemporaryDirectory().appending("videoFile_\(uuid).mov")
}

AVAssetTrack does not see AVMediaTypeAudio

I am new in swift and programming, I tried to concatenate some recorder files, which I made with success like that:
func concatenateFiles(audioFiles: [URL], completion: #escaping (_ concatenatedFile: NSURL?) -> ()) {
// Result file
var nextClipStartTime = kCMTimeZero
let composition = AVMutableComposition()
let track = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
// Add each track
for audio in audioFiles {
let asset = AVURLAsset(url: NSURL(fileURLWithPath: audio.path) as URL, options: nil)
if let assetTrack = asset.tracks(withMediaType: AVMediaTypeAudio).first {
let timeRange = CMTimeRange(start: kCMTimeZero, duration: asset.duration)
do {
try track.insertTimeRange(timeRange, of: assetTrack, at: nextClipStartTime)
nextClipStartTime = CMTimeAdd(nextClipStartTime, timeRange.duration)
} catch {
print("Error concatenating file - \(error)")
completion(nil)
return
}
}
}
// Export the new file
if let exportSession = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A) {
let paths = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)
let format = DateFormatter()
format.dateFormat = "yyyy:MM:dd-HH:mm:ss"
let currentFileName = "REC:\(format.string(from: Date()))"
let documentsDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
let fileURL = documentsDirectory.appendingPathComponent("\(currentFileName).m4a")
// Remove existing file
do {
print(audioFiles.count)
try FileManager.default.removeItem(atPath: fileURL.path)
print("Removed \(fileURL)")
} catch {
print("Could not remove file - \(error)")
}
// Configure export session output
exportSession.outputURL = fileURL as URL
exportSession.outputFileType = AVFileTypeAppleM4A
// Perform the export
exportSession.exportAsynchronously() { () -> Void in
switch exportSession.status
{
case AVAssetExportSessionStatus.completed:
print("Export complete")
DispatchQueue.main.async(execute: {
if self.concatinatedArray == nil
{
self.concatinatedArray = [URL]()
}
self.concatinatedArray?.append(exportSession.outputURL!)
completion(fileURL as NSURL?)
})
return print("success to Merge Video")
case AVAssetExportSessionStatus.failed:
completion(nil)
return print("failed to MERGE )")
case AVAssetExportSessionStatus.cancelled:
completion(nil)
return print("cancelled merge)")
default:
print("complete")
}
}
}
}
But now, when I want to Merge it with a video, I got crashes on moment:
let aAudioAssetTrack: AVAssetTrack = aAudioAsset.tracks(withMediaType: AVMediaTypeAudio)[0]
I use standard method of merging, it works with other sounds that I have, it doesn't only with the concatenated audio files.. please help how to manage it working?...
AVURLAsset* avAsset = [[AVURLAsset alloc]initWithURL:[NSURL fileURLWithPath:path2] options:nil];
if ([[avAsset tracksWithMediaType:AVMediaTypeAudio] count] > 0)
{
AVAssetTrack *clipAudioTrack = [[avAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[firstTrackAudio insertTimeRange:CMTimeRangeMake(kCMTimeZero, avAsset.duration) ofTrack:clipAudioTrack atTime:kCMTimeZero error:nil];
}

Audio export fail iOS Swift

I'm trying to concatenate two audio files with the following code:
func concatenateFiles(audioFiles: [NSURL], completion: (concatenatedFile: NSURL?) -> ()) {
// Result file
var nextClipStartTime = kCMTimeZero
let composition = AVMutableComposition()
let track = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
// Add each track
for audio in audioFiles {
let asset = AVURLAsset(URL: NSURL(fileURLWithPath: audio.path!), options: nil)
if let assetTrack = asset.tracksWithMediaType(AVMediaTypeAudio).first {
let timeRange = CMTimeRange(start: kCMTimeZero, duration: asset.duration)
do {
try track.insertTimeRange(timeRange, ofTrack: assetTrack, atTime: nextClipStartTime)
nextClipStartTime = CMTimeAdd(nextClipStartTime, timeRange.duration)
} catch {
print("Error concatenating file - \(error)")
completion(concatenatedFile: nil)
return
}
}
}
// Export the new file
if let exportSession = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetPassthrough) {
let paths = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)
let documents = NSURL(string: paths.first!)
if let fileURL = documents?.URLByAppendingPathComponent("\(stringFromDate(NSDate())).m4a") {
// Remove existing file
do {
try NSFileManager.defaultManager().removeItemAtPath(fileURL.path!)
print("Removed \(fileURL)")
} catch {
print("Could not remove file - \(error)")
}
// Configure export session output
exportSession.outputURL = fileURL
exportSession.outputFileType = AVFileTypeAppleM4A
// Perform the export
exportSession.exportAsynchronouslyWithCompletionHandler() { handler -> Void in
if exportSession.status == .Completed {
print("Export complete")
dispatch_async(dispatch_get_main_queue(), {
completion(concatenatedFile: fileURL)
})
return
} else if exportSession.status == .Failed {
print("Export failed - \(exportSession.error)")
}
completion(concatenatedFile: nil)
return
}
}
}
}
but i've receive this error exporting the file:
Export failed - Optional(Error Domain=AVFoundationErrorDomain Code=-11838 "Operation Stopped" UserInfo={NSLocalizedDescription=Operation Stopped, NSLocalizedFailureReason=The operation is not supported for this media.})
i've try to change the format but doesnt work, i have no more idea,
there's some one can help me?
I really don't know why, but the problem was solved when i change the attribution of the outputURL in the exportSession,
before:
exportSession.outputURL = fileURL
now:
exportSession.outputURL = NSURL.fileURLWithPath(fileURL.path!)

Resources