Merging audio with AVAssetExportSession - ios

I'd like to merge two audio files without too much overhead time. The following code successfully merges the audio but takes way too long (>30seconds for more than a couple minutes of audio), and I'm wondering if there is any way to expedite that process. I read in a couple places to use AVAssetExportPresetPassthrough, but I can't seem to get that preset to work with any file type. The only settings I've been able to get to work are using AVAssetExportPresetAppleM4A and exporting as a .m4a
Code to create AVAssetExportSession
if (audioHasBeenRecorded) {
// Merge recordings
let composition = AVMutableComposition()
let compositionAudioTrack = composition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: kCMPersistentTrackID_Invalid)
guard let start = recorder.timeStart else { return }
compositionAudioTrack?.insert(originalRecording: FileManagerHelper.recordingLocalURL(secondRecording: false), insertedRecording: FileManagerHelper.recordingLocalURL(secondRecording: true), startTime: CMTime(seconds: start, preferredTimescale: 1000000))
if let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A) {
print(assetExport.supportedFileTypes)
assetExport.outputFileType = AVFileType.m4a
assetExport.outputURL = FileManagerHelper.recordingLocalURL(secondRecording: false)
do { // Delete old audio
try FileManager.default.removeItem(at: FileManagerHelper.recordingLocalURL(secondRecording: false))
try FileManager.default.removeItem(at: FileManagerHelper.recordingLocalURL(secondRecording: true))
} catch { log(error.localizedDescription, msgType: .error) }
assetExport.exportAsynchronously(completionHandler: {
if let error = assetExport.error {
log(error, msgType: .error)
} else {
log("Successfully merged recordings!", msgType: .error)
self.idea.numberOfPoints = self.audioVisualizer.count
self.idea.save()
self.setupPlayer() // Prepare to play the recorded audio file
self.seekTo(TimeInterval((recorder.timeStart ?? 0) + (recorder.timeEnd ?? 0)))
DispatchQueue.main.async { [ weak self ] in
guard let self = self else { return }
self.audioVisualizer.visualize(self.idea)
}
}
})
}
}
Insert code:
extension AVMutableCompositionTrack {
func insert(originalRecording: URL, insertedRecording: URL, startTime: CMTime) {
let originalAsset = AVURLAsset(url: originalRecording)
let insertedAsset = AVURLAsset(url: insertedRecording)
let range1 = CMTimeRangeMake(start: CMTime.zero, duration: startTime)
let range2 = CMTimeRangeMake(start: CMTime.zero, duration: insertedAsset.duration)
let range3 = CMTimeRangeMake(start: startTime + insertedAsset.duration, duration: originalAsset.duration - startTime)
if let originalTrack = originalAsset.tracks(withMediaType: AVMediaType.audio).first,
let insertedTrack = insertedAsset.tracks(withMediaType: AVMediaType.audio).first {
try? insertTimeRange(range1, of: originalTrack, at: CMTime.zero)
try? insertTimeRange(range2, of: insertedTrack, at: startTime)
try? insertTimeRange(range3, of: originalTrack, at: startTime + insertedAsset.duration)
}
}
}

Related

Swift - Workaround/Alternative to M3u8 to play mp4 segment or merge segments into mp4

I used AVAssetExportSession to download a session URL but the issue that you can't download live stream so to get around it, the live stream is split into 10 seconds mp4 segments that are downloaded using an m3u8 to create the URLs. I then use AVAssetExportSession to merge those mp4 segments.
I can merge those clips one by one into one mp4 file which is what I want but as the file gets bigger, the longer it takes as I am dealing with thousands of segments which becomes unpractical.
I thought about using AVplayerLooper but I cannot scrub, rewind or forward through the mp4 segment like a single video.
Is there a way to combine the mp4 clips together to play as one video as the m3u8 does without merging? or is there a fast way to merge videos?
Note: The server uses FFmpeg but I am not allowed to use FFmpeg or pods in the app.
below is the function to merge videos
var mp4Array: [AVAsset] = []
var avAssetExportSession: AVAssetExportSession?
var firstAsset: AVAsset?
var secondAsset: AVAsset?
func mergeVideos() {
firstAsset = mp4Array.first
secondAsset = mp4Array[1]
guard let firstAsset = firstAsset, let secondAsset = secondAsset else { return }
let mixComposition = AVMutableComposition()
guard let firstTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) else {return}
do {
try firstTrack.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: firstAsset.duration),
of: firstAsset.tracks(withMediaType: .video)[0],
at: CMTime.zero)
} catch {
print("Couldn't load track 1")
return
}
guard let secondTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) else {return}
do {
try secondTrack.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: secondAsset.duration),
of: secondAsset.tracks(withMediaType: .video)[0],
at: firstAsset.duration)
} catch {
print("couldn't load track 2")
return
}
let mainInstruction = AVMutableVideoCompositionInstruction()
mainInstruction.timeRange = CMTimeRangeMake(start: CMTime.zero, duration: CMTimeAdd(firstAsset.duration, secondAsset.duration))
let firstAssetInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: firstTrack)
firstAssetInstruction.setOpacity(0.0, at: firstAsset.duration)
let secondAssetInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: secondTrack)
mainInstruction.layerInstructions = [firstAssetInstruction, secondAssetInstruction]
let mainComposition = AVMutableVideoComposition()
mainComposition.instructions = [mainInstruction]
mainComposition.frameDuration = CMTimeMake(value: 1, timescale: 30)
mainComposition.renderSize = firstTrack.naturalSize
guard let documentDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first else { return }
let url = documentDirectory.appendingPathComponent("MergedVideos/mergeVideo\(videoInt).mp4")
guard let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) else {return}
exporter.outputURL = url
exporter.outputFileType = AVFileType.mp4
exporter.shouldOptimizeForNetworkUse = true
exporter.videoComposition = mainComposition
exporter.exportAsynchronously {
if exporter.status == .completed {
let avasset = AVAsset(url:url)
self.mergeUrl = avasset
if self.mp4Array.count > 1{
print("This add the merged video to the front of the mp4array")
self.mp4Array.remove(at: 1)
self.mp4Array.removeFirst()
self.videoInt = self.videoInt + 1
self.mp4Array.append(self.mergeUrl!)
self.mp4Array.bringToFront(item: self.mp4Array.last!)
}
if (self.mp4Array.count > 1){
if self.mergeUrl != nil {
self.mergeVideos()
}
} else {
var numberofvideosdeleted = 0
while (numberofvideosdeleted < self.videoInt - 1){
do {
print("deleting")
let url = documentDirectory.appendingPathComponent("MergedVideos/mergeVideo\(numberofvideosdeleted).mp4")
try FileManager.default.removeItem(at: url)
numberofvideosdeleted = numberofvideosdeleted + 1
} catch {
print("Error removing videos")
}
}
self.deleteCurrentSegementsInFolder()
}
}
}
}
I ended up using FFmpeg Mobile to concatenate the videos and it works really well. Takes around 1 minute to concatenate a 3GB movie file.
Link below to the cocoapod:
https://github.com/tanersener/mobile-ffmpeg

Recorded voice can’t hear clearly

I'm making a karaoke application.If the headphones aren't plugged in, my app works fine. (my voice and background music record together). It is successful.So that, I merge background music and voice recorded when headphone is plugged in… If i merge two songs i hear both sound clearly.But the issue is if i recorded my voice and music i can hear voice my voice very low.
func mixAudio()
{
let currentTime = CFAbsoluteTimeGetCurrent()
let composition = AVMutableComposition()
let compositionAudioTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
compositionAudioTrack.preferredVolume = 0.8
let avAsset = AVURLAsset.init(URL: soundFileURL, options: nil)
print("\(avAsset)")
var tracks = avAsset.tracksWithMediaType(AVMediaTypeAudio)
let clipAudioTrack = tracks[0]
do {
try compositionAudioTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, avAsset.duration), ofTrack: clipAudioTrack, atTime: kCMTimeZero)
}
catch _ {
}
let compositionAudioTrack1 = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
compositionAudioTrack.preferredVolume = 0.8
let avAsset1 = AVURLAsset.init(URL: soundFileURL1)
print(avAsset1)
var tracks1 = avAsset1.tracksWithMediaType(AVMediaTypeAudio)
let clipAudioTrack1 = tracks1[0]
do {
try compositionAudioTrack1.insertTimeRange(CMTimeRangeMake(kCMTimeZero, avAsset1.duration), ofTrack: clipAudioTrack1, atTime: kCMTimeZero)
}
catch _ {
}
var paths = NSSearchPathForDirectoriesInDomains(.LibraryDirectory, .UserDomainMask, true)
let CachesDirectory = paths[0]
let strOutputFilePath = CachesDirectory.stringByAppendingString("/newone.mov")
print(" strOutputFilePath is \n \(strOutputFilePath)")
let requiredOutputPath = CachesDirectory.stringByAppendingString("/newone.m4a")
print(" requiredOutputPath is \n \(requiredOutputPath)")
soundFile1 = NSURL.fileURLWithPath(requiredOutputPath)
print(" OUtput path is \n \(soundFile1)")
do {
try NSFileManager.defaultManager().removeItemAtURL(soundFile1)
}
catch _ {
}
let exporter = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A)
exporter!.outputURL = soundFile1
exporter!.outputFileType = AVFileTypeAppleM4A
exporter!.exportAsynchronouslyWithCompletionHandler
{() -> Void in
print(" OUtput path is \n \(requiredOutputPath)")
print("export complete: \(CFAbsoluteTimeGetCurrent() - currentTime)")
do
{
print(self.soundFile1)
print(" OUtput path is \n \(requiredOutputPath)")
self.wasteplayer = try AVAudioPlayer(contentsOfURL: self.soundFile1)
self.wasteplayer.numberOfLoops = 0
self.wasteplayer.play()
}
catch _ {
}
}

Issue in merging/mixing of two audio using AVMutableCompositionTrack

it crashes when i try to play the merged audio
I am trying to merging two audio file . They merged successfully, but I am seeing with a problem that when i play that audio with AVAudioPlayer it crashes . also there is a problem with formats as the merge audio only store with .m4a format if i save that audio with .wav format the it crashes.
func merge(audio1: NSURL, audio2: NSURL) {
let finalURL = getMergeFileURL()
let preferredTimeScale : Int32 = 100
//This object will be edited to include both audio files
let composition = AVMutableComposition()
let compositionAudioTrack1:AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
//let url1 = audio1
let avAsset1 = AVURLAsset(url: audio1 as URL, options: nil)
let tracks1 = avAsset1.tracks(withMediaType: AVMediaTypeAudio)
let assetTrack1:AVAssetTrack = tracks1[0]
let duration1: CMTime = CMTimeMakeWithSeconds(30.0, preferredTimeScale)
let startCMTime = CMTimeMakeWithSeconds(Double(30.0), preferredTimeScale)
let timeRange1 = CMTimeRangeMake(startCMTime, duration1)
let compositionAudioTrack2:AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
//let url2 = audio2
let avAsset2 = AVURLAsset(url: audio2 as URL, options: nil)
let tracks2 = avAsset2.tracks(withMediaType: AVMediaTypeAudio)
let assetTrack2:AVAssetTrack = tracks2[0]
let duration2: CMTime = CMTimeMakeWithSeconds(30.0, preferredTimeScale)
let startCMTime2 = CMTimeMakeWithSeconds(Double(30.0), preferredTimeScale)
let timeRange2 = CMTimeRangeMake(startCMTime, duration1)
//Insert the tracks into the composition
do {
try compositionAudioTrack1.insertTimeRange(timeRange1, of: assetTrack1, at: kCMTimeZero)
try compositionAudioTrack2.insertTimeRange(timeRange2, of: assetTrack2, at: duration1)
} catch {
print(error)
}
//Perform the merge
let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A)
assetExport!.outputFileType = AVFileTypeAppleM4A
assetExport!.outputURL = finalURL as URL // final url is the url of that merged file
assetExport!.exportAsynchronously(completionHandler: {
switch assetExport!.status{
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport!.error)")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport!.error)")
default:
print("complete")
}
})
}

how to trim a video in swift for a particular time

I am working on a task in which I have to trim the recorded video from particular start point to particular end point as entered or selected by user.
How am I supposed to do that. As I used UIVideoEditorController before but I don't want to use the default view and I want to trim the video directly.
let FinalUrlTosave = NSURL(string: "\(newURL)")
exportSession!.outputURL=FinalUrlTosave
exportSession!.shouldOptimizeForNetworkUse = true
// exportSession.outputFileType = AVFileTypeQuickTimeMovie
exportSession!.outputFileType = AVFileTypeQuickTimeMovie;
let start:CMTime
let duration:CMTime
var st = starttime.doubleValue
var ed = endTime.doubleValue
start = CMTimeMakeWithSeconds(st, 600)
duration = CMTimeMakeWithSeconds(ed, 600)
// let timeRangeForCurrentSlice = CMTimeRangeMake(start, duration)
let range = CMTimeRangeMake(start, duration);
exportSession!.timeRange = range
exportSession!.exportAsynchronouslyWithCompletionHandler({
switch exportSession!.status{
case AVAssetExportSessionStatus.Failed:
print("failed \(exportSession!.error)")
case AVAssetExportSessionStatus.Cancelled:
print("cancelled \(exportSession!.error)")
default:
print("complete....complete")
// self.SaveVideoToPhotoLibrary(destinationURL1!)
}
})
I am trying to achieve my goal using this but not succeeding.
Error message:
failed Optional(Error Domain=NSURLErrorDomain Code=-1100 "The
requested URL was not found on this server."
UserInfo={NSErrorFailingURLStringKey=file:///var/mobile/Containers/Data/Application/E68D3BFD-6923-4EA6-9FB3-C020CE4AA9D4/Documents/moment/jGq_9AUFa47s2ZiiPP4x.mp4,
NSErrorFailingURLKey=file:///var/mobile/Containers/Data/Application/E68D3BFD-6923-4EA6-9FB3-C020CE4AA9D4/Documents/moment/jGq_9AUFa47s2ZiiPP4x.mp4,
NSLocalizedDescription=The requested URL was not found on this
server., NSUnderlyingError=0x1553c220 {Error Domain=N
Error occured second time:
failed Optional(Error Domain=NSURLErrorDomain Code=-3000 "Cannot
create file" UserInfo={NSUnderlyingError=0x14e00000 {Error
Domain=NSOSStatusErrorDomain Code=-12124 "(null)"},
NSLocalizedDescription=Cannot create file})
I found my solution using this method and it works like a charm....
func cropVideo(sourceURL1: NSURL, statTime:Float, endTime:Float)
{
let manager = NSFileManager.defaultManager()
guard let documentDirectory = try? manager.URLForDirectory(.DocumentDirectory, inDomain: .UserDomainMask, appropriateForURL: nil, create: true) else {return}
guard let mediaType = "mp4" as? String else {return}
guard let url = sourceURL1 as? NSURL else {return}
if mediaType == kUTTypeMovie as String || mediaType == "mp4" as String {
let asset = AVAsset(URL: url)
let length = Float(asset.duration.value) / Float(asset.duration.timescale)
print("video length: \(length) seconds")
let start = statTime
let end = endTime
var outputURL = documentDirectory.URLByAppendingPathComponent("output")
do {
try manager.createDirectoryAtURL(outputURL, withIntermediateDirectories: true, attributes: nil)
let name = Moment.newName()
outputURL = outputURL.URLByAppendingPathComponent("\(name).mp4")
}catch let error {
print(error)
}
//Remove existing file
_ = try? manager.removeItemAtURL(outputURL)
guard let exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetHighestQuality) else {return}
exportSession.outputURL = outputURL
exportSession.outputFileType = AVFileTypeMPEG4
let startTime = CMTime(seconds: Double(start ?? 0), preferredTimescale: 1000)
let endTime = CMTime(seconds: Double(end ?? length), preferredTimescale: 1000)
let timeRange = CMTimeRange(start: startTime, end: endTime)
exportSession.timeRange = timeRange
exportSession.exportAsynchronouslyWithCompletionHandler{
switch exportSession.status {
case .Completed:
print("exported at \(outputURL)")
self.saveVideoTimeline(outputURL)
case .Failed:
print("failed \(exportSession.error)")
case .Cancelled:
print("cancelled \(exportSession.error)")
default: break
}
}
}
}
Swift 5
func cropVideo(sourceURL1: URL, statTime:Float, endTime:Float)
{
let manager = FileManager.default
guard let documentDirectory = try? manager.url(for: .documentDirectory, in: .userDomainMask, appropriateFor: nil, create: true) else {return}
let mediaType = "mp4"
if mediaType == kUTTypeMovie as String || mediaType == "mp4" as String {
let asset = AVAsset(url: sourceURL1 as URL)
let length = Float(asset.duration.value) / Float(asset.duration.timescale)
print("video length: \(length) seconds")
let start = statTime
let end = endTime
var outputURL = documentDirectory.appendingPathComponent("output")
do {
try manager.createDirectory(at: outputURL, withIntermediateDirectories: true, attributes: nil)
outputURL = outputURL.appendingPathComponent("\(UUID().uuidString).\(mediaType)")
}catch let error {
print(error)
}
//Remove existing file
_ = try? manager.removeItem(at: outputURL)
guard let exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetHighestQuality) else {return}
exportSession.outputURL = outputURL
exportSession.outputFileType = .mp4
let startTime = CMTime(seconds: Double(start ), preferredTimescale: 1000)
let endTime = CMTime(seconds: Double(end ), preferredTimescale: 1000)
let timeRange = CMTimeRange(start: startTime, end: endTime)
exportSession.timeRange = timeRange
exportSession.exportAsynchronously{
switch exportSession.status {
case .completed:
print("exported at \(outputURL)")
case .failed:
print("failed \(exportSession.error)")
case .cancelled:
print("cancelled \(exportSession.error)")
default: break
}
}
}
}
A swift4 version for this.
static func cropVideo(sourceURL: URL, startTime: Double, endTime: Double, completion: ((_ outputUrl: URL) -> Void)? = nil)
{
let fileManager = FileManager.default
let documentDirectory = fileManager.urls(for: .documentDirectory, in: .userDomainMask)[0]
let asset = AVAsset(url: sourceURL)
let length = Float(asset.duration.value) / Float(asset.duration.timescale)
print("video length: \(length) seconds")
var outputURL = documentDirectory.appendingPathComponent("output")
do {
try fileManager.createDirectory(at: outputURL, withIntermediateDirectories: true, attributes: nil)
outputURL = outputURL.appendingPathComponent("\(sourceURL.lastPathComponent).mp4")
}catch let error {
print(error)
}
//Remove existing file
try? fileManager.removeItem(at: outputURL)
guard let exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetHighestQuality) else { return }
exportSession.outputURL = outputURL
exportSession.outputFileType = .mp4
let timeRange = CMTimeRange(start: CMTime(seconds: startTime, preferredTimescale: 1000),
end: CMTime(seconds: endTime, preferredTimescale: 1000))
exportSession.timeRange = timeRange
exportSession.exportAsynchronously {
switch exportSession.status {
case .completed:
print("exported at \(outputURL)")
completion?(outputURL)
case .failed:
print("failed \(exportSession.error.debugDescription)")
case .cancelled:
print("cancelled \(exportSession.error.debugDescription)")
default: break
}
}
}
This one does the job and it fixes the rotation problem.
extension AVAsset {
func assetByTrimming(startTime: CMTime, endTime: CMTime) throws -> AVAsset {
let duration = CMTimeSubtract(endTime, startTime)
let timeRange = CMTimeRange(start: startTime, duration: duration)
let composition = AVMutableComposition()
do {
for track in tracks {
let compositionTrack = composition.addMutableTrack(withMediaType: track.mediaType, preferredTrackID: track.trackID)
compositionTrack?.preferredTransform = track.preferredTransform
try compositionTrack?.insertTimeRange(timeRange, of: track, at: CMTime.zero)
}
} catch let error {
throw TrimError("error during composition", underlyingError: error)
}
return composition
}
struct TrimError: Error {
let description: String
let underlyingError: Error?
init(_ description: String, underlyingError: Error? = nil) {
self.description = "TrimVideo: " + description
self.underlyingError = underlyingError
}
}
func cropVideo1(_ sourceURL1: URL, statTime:Float, endTime:Float){
let videoAsset: AVAsset = AVAsset(url: sourceURL1) as AVAsset
let composition = AVMutableComposition()
composition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
let videoComposition = AVMutableVideoComposition()
videoComposition.renderSize = CGSize(width: 1280, height: 768)
videoComposition.frameDuration = CMTimeMake(8, 15)
let instruction = AVMutableVideoCompositionInstruction()
let length = Float(videoAsset.duration.value)
print(length)
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(60, 30))
let start = statTime
let end = endTime
let exportSession = AVAssetExportSession(asset: videoAsset, presetName: AVAssetExportPresetHighestQuality)!
exportSession.outputFileType = AVFileTypeMPEG4
let startTime = CMTime(seconds: Double(start ), preferredTimescale: 1000)
let endTime = CMTime(seconds: Double(end ), preferredTimescale: 1000)
let timeRange = CMTimeRange(start: startTime, end: endTime)
exportSession.timeRange = timeRange
let formatter = DateFormatter()
formatter.dateFormat = "yyyy'-'MM'-'dd'T'HH':'mm':'ss'Z'"
let date = Date()
let documentsPath = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0] as NSString
let outputPath = "\(documentsPath)/\(formatter.string(from: date)).mp4"
let outputURL = URL(fileURLWithPath: outputPath)
exportSession.outputURL = outputURL
exportSession.outputFileType = AVFileTypeQuickTimeMovie
print("sucess")
exportSession.exportAsynchronously(completionHandler: { () -> Void in
DispatchQueue.main.async(execute: {
self.exportDidFinish(exportSession)
print("sucess")
})
})
}
func exportDidFinish(_ session: AVAssetExportSession) {
if session.status == AVAssetExportSessionStatus.completed {
let outputURL = session.outputURL
let library = ALAssetsLibrary()
if library.videoAtPathIs(compatibleWithSavedPhotosAlbum: outputURL) {
library.writeVideoAtPath(toSavedPhotosAlbum: outputURL) { alAssetURL, error in
if error != nil {
DispatchQueue.main.async(execute: {
print("Failed to save video")
})
} else {
DispatchQueue.main.async(execute: {
Print("Sucessfully saved Video")
})
}
self.activityIndicator.stopAnimating()
}
}
}
}

Swift Merge audio and video files into one video

I wrote a program in Swift.I want to merge a video with an audio file, but got this error.
"failed Error Domain=AVFoundationErrorDomain Code=-11838 "Operation Stopped" UserInfo=0x17da4230 {NSLocalizedDescription=Operation Stopped, NSLocalizedFailureReason=The operation is not supported for this media.}"
code
func mergeAudio(audioURL: NSURL, moviePathUrl: NSURL, savePathUrl: NSURL) {
var composition = AVMutableComposition()
let trackVideo:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
let trackAudio:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
let option = NSDictionary(object: true, forKey: "AVURLAssetPreferPreciseDurationAndTimingKey")
let sourceAsset = AVURLAsset(URL: moviePathUrl, options: option as [NSObject : AnyObject])
let audioAsset = AVURLAsset(URL: audioURL, options: option as [NSObject : AnyObject])
let tracks = sourceAsset.tracksWithMediaType(AVMediaTypeVideo)
let audios = audioAsset.tracksWithMediaType(AVMediaTypeAudio)
if tracks.count > 0 {
let assetTrack:AVAssetTrack = tracks[0] as! AVAssetTrack
let assetTrackAudio:AVAssetTrack = audios[0] as! AVAssetTrack
let audioDuration:CMTime = assetTrackAudio.timeRange.duration
let audioSeconds:Float64 = CMTimeGetSeconds(assetTrackAudio.timeRange.duration)
trackVideo.insertTimeRange(CMTimeRangeMake(kCMTimeZero,audioDuration), ofTrack: assetTrack, atTime: kCMTimeZero, error: nil)
trackAudio.insertTimeRange(CMTimeRangeMake(kCMTimeZero,audioDuration), ofTrack: assetTrackAudio, atTime: kCMTimeZero, error: nil)
}
var assetExport: AVAssetExportSession = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetPassthrough)
assetExport.outputFileType = AVFileTypeMPEG4
assetExport.outputURL = savePathUrl
self.tmpMovieURL = savePathUrl
assetExport.shouldOptimizeForNetworkUse = true
assetExport.exportAsynchronouslyWithCompletionHandler { () -> Void in
switch assetExport.status {
case AVAssetExportSessionStatus.Completed:
let assetsLib = ALAssetsLibrary()
assetsLib.writeVideoAtPathToSavedPhotosAlbum(savePathUrl, completionBlock: nil)
println("success")
case AVAssetExportSessionStatus.Failed:
println("failed \(assetExport.error)")
case AVAssetExportSessionStatus.Cancelled:
println("cancelled \(assetExport.error)")
default:
println("complete")
}
}
}
In my idea media type like mpeg4 is wrong.
Where is the problem? What am i missing?
Improved code (of Govind's answer) with some additional features:
Merge audio of the video + external audio (the initial answer was dropping the sound of the video)
Flip video horizontally if needed (I personally use it when user captures using frontal camera, btw instagram flips it too)
Apply preferredTransform correctly which solves the issue when video was saved rotated (video is external: captured by other device/generated by other app)
Removed some unused code with VideoComposition.
Added a completion handler to the method so that it can be called from a different class.
Update to Swift 4.
Step 1.
import UIKit
import AVFoundation
import AVKit
import AssetsLibrary
Step 2.
/// Merges video and sound while keeping sound of the video too
///
/// - Parameters:
/// - videoUrl: URL to video file
/// - audioUrl: URL to audio file
/// - shouldFlipHorizontally: pass True if video was recorded using frontal camera otherwise pass False
/// - completion: completion of saving: error or url with final video
func mergeVideoAndAudio(videoUrl: URL,
audioUrl: URL,
shouldFlipHorizontally: Bool = false,
completion: #escaping (_ error: Error?, _ url: URL?) -> Void) {
let mixComposition = AVMutableComposition()
var mutableCompositionVideoTrack = [AVMutableCompositionTrack]()
var mutableCompositionAudioTrack = [AVMutableCompositionTrack]()
var mutableCompositionAudioOfVideoTrack = [AVMutableCompositionTrack]()
//start merge
let aVideoAsset = AVAsset(url: videoUrl)
let aAudioAsset = AVAsset(url: audioUrl)
let compositionAddVideo = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo,
preferredTrackID: kCMPersistentTrackID_Invalid)
let compositionAddAudio = mixComposition.addMutableTrack(withMediaType: AVMediaTypeAudio,
preferredTrackID: kCMPersistentTrackID_Invalid)
let compositionAddAudioOfVideo = mixComposition.addMutableTrack(withMediaType: AVMediaTypeAudio,
preferredTrackID: kCMPersistentTrackID_Invalid)
let aVideoAssetTrack: AVAssetTrack = aVideoAsset.tracks(withMediaType: AVMediaTypeVideo)[0]
let aAudioOfVideoAssetTrack: AVAssetTrack? = aVideoAsset.tracks(withMediaType: AVMediaTypeAudio).first
let aAudioAssetTrack: AVAssetTrack = aAudioAsset.tracks(withMediaType: AVMediaTypeAudio)[0]
// Default must have tranformation
compositionAddVideo.preferredTransform = aVideoAssetTrack.preferredTransform
if shouldFlipHorizontally {
// Flip video horizontally
var frontalTransform: CGAffineTransform = CGAffineTransform(scaleX: -1.0, y: 1.0)
frontalTransform = frontalTransform.translatedBy(x: -aVideoAssetTrack.naturalSize.width, y: 0.0)
frontalTransform = frontalTransform.translatedBy(x: 0.0, y: -aVideoAssetTrack.naturalSize.width)
compositionAddVideo.preferredTransform = frontalTransform
}
mutableCompositionVideoTrack.append(compositionAddVideo)
mutableCompositionAudioTrack.append(compositionAddAudio)
mutableCompositionAudioOfVideoTrack.append(compositionAddAudioOfVideo)
do {
try mutableCompositionVideoTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero,
aVideoAssetTrack.timeRange.duration),
of: aVideoAssetTrack,
at: kCMTimeZero)
//In my case my audio file is longer then video file so i took videoAsset duration
//instead of audioAsset duration
try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero,
aVideoAssetTrack.timeRange.duration),
of: aAudioAssetTrack,
at: kCMTimeZero)
// adding audio (of the video if exists) asset to the final composition
if let aAudioOfVideoAssetTrack = aAudioOfVideoAssetTrack {
try mutableCompositionAudioOfVideoTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero,
aVideoAssetTrack.timeRange.duration),
of: aAudioOfVideoAssetTrack,
at: kCMTimeZero)
}
} catch {
print(error.localizedDescription)
}
// Exporting
let savePathUrl: URL = URL(fileURLWithPath: NSHomeDirectory() + "/Documents/newVideo.mp4")
do { // delete old video
try FileManager.default.removeItem(at: savePathUrl)
} catch { print(error.localizedDescription) }
let assetExport: AVAssetExportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)!
assetExport.outputFileType = AVFileTypeMPEG4
assetExport.outputURL = savePathUrl
assetExport.shouldOptimizeForNetworkUse = true
assetExport.exportAsynchronously { () -> Void in
switch assetExport.status {
case AVAssetExportSessionStatus.completed:
print("success")
completion(nil, savePathUrl)
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport.error?.localizedDescription ?? "error nil")")
completion(assetExport.error, nil)
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport.error?.localizedDescription ?? "error nil")")
completion(assetExport.error, nil)
default:
print("complete")
completion(assetExport.error, nil)
}
}
}
Again thanks to #Govind's answer! It helped me a lot!
Hope this update helps someone too:)
In Above question same error I found due to wrong savePathUrl, destination URL should be like below code including new video name.
I was looking for the code to Merge audio and video files into one video but couldn't find anywhere so after spending hours while reading apple docs I wrote this code.
NOTE : This is tested and 100% working code for me.
Stap : 1
Import this modules in your viewController.
import UIKit
import AVFoundation
import AVKit
import AssetsLibrary
step 2:
Add this function in your code
func mergeFilesWithUrl(videoUrl:NSURL, audioUrl:NSURL)
{
let mixComposition : AVMutableComposition = AVMutableComposition()
var mutableCompositionVideoTrack : [AVMutableCompositionTrack] = []
var mutableCompositionAudioTrack : [AVMutableCompositionTrack] = []
let totalVideoCompositionInstruction : AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()
//start merge
let aVideoAsset : AVAsset = AVAsset(URL: videoUrl)
let aAudioAsset : AVAsset = AVAsset(URL: audioUrl)
mutableCompositionVideoTrack.append(mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid))
mutableCompositionAudioTrack.append( mixComposition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid))
let aVideoAssetTrack : AVAssetTrack = aVideoAsset.tracksWithMediaType(AVMediaTypeVideo)[0]
let aAudioAssetTrack : AVAssetTrack = aAudioAsset.tracksWithMediaType(AVMediaTypeAudio)[0]
do{
try mutableCompositionVideoTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), ofTrack: aVideoAssetTrack, atTime: kCMTimeZero)
//In my case my audio file is longer then video file so i took videoAsset duration
//instead of audioAsset duration
try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), ofTrack: aAudioAssetTrack, atTime: kCMTimeZero)
//Use this instead above line if your audiofile and video file's playing durations are same
// try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), ofTrack: aAudioAssetTrack, atTime: kCMTimeZero)
}catch{
}
totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero,aVideoAssetTrack.timeRange.duration )
let mutableVideoComposition : AVMutableVideoComposition = AVMutableVideoComposition()
mutableVideoComposition.frameDuration = CMTimeMake(1, 30)
mutableVideoComposition.renderSize = CGSizeMake(1280,720)
// playerItem = AVPlayerItem(asset: mixComposition)
// player = AVPlayer(playerItem: playerItem!)
//
//
// AVPlayerVC.player = player
//find your video on this URl
let savePathUrl : NSURL = NSURL(fileURLWithPath: NSHomeDirectory() + "/Documents/newVideo.mp4")
let assetExport: AVAssetExportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)!
assetExport.outputFileType = AVFileTypeMPEG4
assetExport.outputURL = savePathUrl
assetExport.shouldOptimizeForNetworkUse = true
assetExport.exportAsynchronouslyWithCompletionHandler { () -> Void in
switch assetExport.status {
case AVAssetExportSessionStatus.Completed:
//Uncomment this if u want to store your video in asset
//let assetsLib = ALAssetsLibrary()
//assetsLib.writeVideoAtPathToSavedPhotosAlbum(savePathUrl, completionBlock: nil)
print("success")
case AVAssetExportSessionStatus.Failed:
print("failed \(assetExport.error)")
case AVAssetExportSessionStatus.Cancelled:
print("cancelled \(assetExport.error)")
default:
print("complete")
}
}
}
Step 3:
Call function where u want like this
let videoUrl : NSURL = NSURL(fileURLWithPath: NSBundle.mainBundle().pathForResource("SampleVideo", ofType: "mp4")!)
let audioUrl : NSURL = NSURL(fileURLWithPath: NSBundle.mainBundle().pathForResource("SampleAudio", ofType: "mp3")!)
mergeFilesWithUrl(videoUrl, audioUrl: audioUrl)
hope this will help you and will save your time.
Swift 4.2 / 5
func mergeVideoWithAudio(videoUrl: URL, audioUrl: URL, success: #escaping ((URL) -> Void), failure: #escaping ((Error?) -> Void)) {
let mixComposition: AVMutableComposition = AVMutableComposition()
var mutableCompositionVideoTrack: [AVMutableCompositionTrack] = []
var mutableCompositionAudioTrack: [AVMutableCompositionTrack] = []
let totalVideoCompositionInstruction : AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()
let aVideoAsset: AVAsset = AVAsset(url: videoUrl)
let aAudioAsset: AVAsset = AVAsset(url: audioUrl)
if let videoTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid), let audioTrack = mixComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid) {
mutableCompositionVideoTrack.append(videoTrack)
mutableCompositionAudioTrack.append(audioTrack)
if let aVideoAssetTrack: AVAssetTrack = aVideoAsset.tracks(withMediaType: .video).first, let aAudioAssetTrack: AVAssetTrack = aAudioAsset.tracks(withMediaType: .audio).first {
do {
try mutableCompositionVideoTrack.first?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: aVideoAssetTrack.timeRange.duration), of: aVideoAssetTrack, at: CMTime.zero)
try mutableCompositionAudioTrack.first?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: aVideoAssetTrack.timeRange.duration), of: aAudioAssetTrack, at: CMTime.zero)
videoTrack.preferredTransform = aVideoAssetTrack.preferredTransform
} catch{
print(error)
}
totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(start: CMTime.zero,duration: aVideoAssetTrack.timeRange.duration)
}
}
let mutableVideoComposition: AVMutableVideoComposition = AVMutableVideoComposition()
mutableVideoComposition.frameDuration = CMTimeMake(value: 1, timescale: 30)
mutableVideoComposition.renderSize = CGSize(width: 480, height: 640)
if let documentsPath = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first {
let outputURL = URL(fileURLWithPath: documentsPath).appendingPathComponent("\("fileName").m4v")
do {
if FileManager.default.fileExists(atPath: outputURL.path) {
try FileManager.default.removeItem(at: outputURL)
}
} catch { }
if let exportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) {
exportSession.outputURL = outputURL
exportSession.outputFileType = AVFileType.mp4
exportSession.shouldOptimizeForNetworkUse = true
/// try to export the file and handle the status cases
exportSession.exportAsynchronously(completionHandler: {
switch exportSession.status {
case .failed:
if let _error = exportSession.error {
failure(_error)
}
case .cancelled:
if let _error = exportSession.error {
failure(_error)
}
default:
print("finished")
success(outputURL)
}
})
} else {
failure(nil)
}
}
}
Version Swift3 with URL and new syntax.
func mergeFilesWithUrl(videoUrl:URL, audioUrl:URL)
{
let mixComposition : AVMutableComposition = AVMutableComposition()
var mutableCompositionVideoTrack : [AVMutableCompositionTrack] = []
var mutableCompositionAudioTrack : [AVMutableCompositionTrack] = []
let totalVideoCompositionInstruction : AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()
//start merge
let aVideoAsset : AVAsset = AVAsset(url: videoUrl)
let aAudioAsset : AVAsset = AVAsset(url: audioUrl)
mutableCompositionVideoTrack.append(mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid))
mutableCompositionAudioTrack.append( mixComposition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid))
let aVideoAssetTrack : AVAssetTrack = aVideoAsset.tracks(withMediaType: AVMediaTypeVideo)[0]
let aAudioAssetTrack : AVAssetTrack = aAudioAsset.tracks(withMediaType: AVMediaTypeAudio)[0]
do{
try mutableCompositionVideoTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), of: aVideoAssetTrack, at: kCMTimeZero)
//In my case my audio file is longer then video file so i took videoAsset duration
//instead of audioAsset duration
try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), of: aAudioAssetTrack, at: kCMTimeZero)
//Use this instead above line if your audiofile and video file's playing durations are same
// try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), ofTrack: aAudioAssetTrack, atTime: kCMTimeZero)
}catch{
}
totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero,aVideoAssetTrack.timeRange.duration )
let mutableVideoComposition : AVMutableVideoComposition = AVMutableVideoComposition()
mutableVideoComposition.frameDuration = CMTimeMake(1, 30)
mutableVideoComposition.renderSize = CGSize(width: 1280, height: 720)
// playerItem = AVPlayerItem(asset: mixComposition)
// player = AVPlayer(playerItem: playerItem!)
//
//
// AVPlayerVC.player = player
//find your video on this URl
let savePathUrl : URL = URL(fileURLWithPath: NSHomeDirectory() + "/Documents/newVideo.mp4")
let assetExport: AVAssetExportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)!
assetExport.outputFileType = AVFileTypeMPEG4
assetExport.outputURL = savePathUrl
assetExport.shouldOptimizeForNetworkUse = true
assetExport.exportAsynchronously { () -> Void in
switch assetExport.status {
case AVAssetExportSessionStatus.completed:
//Uncomment this if u want to store your video in asset
//let assetsLib = ALAssetsLibrary()
//assetsLib.writeVideoAtPathToSavedPhotosAlbum(savePathUrl, completionBlock: nil)
print("success")
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport.error)")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport.error)")
default:
print("complete")
}
}
}
Swift 5 version (Also repeats audio if video is larger than audio) : Just pass audio and video URLs. I have tried this with local video and remote audio url.
func mergeVideoWithAudio(videoUrl: URL,
audioUrl: URL,
success: #escaping ((URL) -> Void),
failure: #escaping ((Error?) -> Void)) {
let mixComposition: AVMutableComposition = AVMutableComposition()
var mutableCompositionVideoTrack: [AVMutableCompositionTrack] = []
var mutableCompositionAudioTrack: [AVMutableCompositionTrack] = []
let totalVideoCompositionInstruction: AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()
let aVideoAsset: AVAsset = AVAsset(url: videoUrl)
let aAudioAsset: AVAsset = AVAsset(url: audioUrl)
if let videoTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid), let audioTrack = mixComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid) {
mutableCompositionVideoTrack.append( videoTrack )
mutableCompositionAudioTrack.append( audioTrack )
if let aVideoAssetTrack: AVAssetTrack = aVideoAsset.tracks(withMediaType: .video).first, let aAudioAssetTrack: AVAssetTrack = aAudioAsset.tracks(withMediaType: .audio).first {
do {
try mutableCompositionVideoTrack.first?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: aVideoAssetTrack.timeRange.duration), of: aVideoAssetTrack, at: CMTime.zero)
let videoDuration = aVideoAsset.duration
if CMTimeCompare(videoDuration, aAudioAsset.duration) == -1 {
try mutableCompositionAudioTrack.first?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: aVideoAssetTrack.timeRange.duration), of: aAudioAssetTrack, at: CMTime.zero)
} else if CMTimeCompare(videoDuration, aAudioAsset.duration) == 1 {
var currentTime = CMTime.zero
while true {
var audioDuration = aAudioAsset.duration
let totalDuration = CMTimeAdd(currentTime, audioDuration)
if CMTimeCompare(totalDuration, videoDuration) == 1 {
audioDuration = CMTimeSubtract(totalDuration, videoDuration)
}
try mutableCompositionAudioTrack.first?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: aVideoAssetTrack.timeRange.duration), of: aAudioAssetTrack, at: currentTime)
currentTime = CMTimeAdd(currentTime, audioDuration)
if CMTimeCompare(currentTime, videoDuration) == 1 || CMTimeCompare(currentTime, videoDuration) == 0 {
break
}
}
}
videoTrack.preferredTransform = aVideoAssetTrack.preferredTransform
} catch {
print(error)
}
totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(start: CMTime.zero, duration: aVideoAssetTrack.timeRange.duration)
}
}
let mutableVideoComposition: AVMutableVideoComposition = AVMutableVideoComposition()
mutableVideoComposition.frameDuration = CMTimeMake(value: 1, timescale: 30)
mutableVideoComposition.renderSize = CGSize(width: 480, height: 640)
if let documentsPath = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first {
let outputURL = URL(fileURLWithPath: documentsPath).appendingPathComponent("\("fileName").m4v")
do {
if FileManager.default.fileExists(atPath: outputURL.path) {
try FileManager.default.removeItem(at: outputURL)
}
} catch { }
if let exportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) {
exportSession.outputURL = outputURL
exportSession.outputFileType = AVFileType.mp4
exportSession.shouldOptimizeForNetworkUse = true
// try to export the file and handle the status cases
exportSession.exportAsynchronously(completionHandler: {
switch exportSession.status {
case .failed:
if let error = exportSession.error {
failure(error)
}
case .cancelled:
if let error = exportSession.error {
failure(error)
}
default:
print("finished")
success(outputURL)
}
})
} else {
failure(nil)
}
}
}
Updated for Swift Concurrency (Swift 5.7)
Some errors to throw:
enum VideoAudioMergeError: Error {
case compositionAddVideoFailed, compositionAddAudioFailed, compositionAddAudioOfVideoFailed, unknownError
}
And the method:
/// Merges video and sound while keeping sound of the video too
///
/// - Parameters:
/// - videoUrl: URL to video file
/// - audioUrl: URL to audio file
/// - shouldFlipHorizontally: pass True if video was recorded using frontal camera otherwise pass False
func mergeVideoAndAudio(videoUrl: URL,
audioUrl: URL,
shouldFlipHorizontally: Bool = false) async throws -> URL {
let mixComposition = AVMutableComposition()
var mutableCompositionVideoTrack = [AVMutableCompositionTrack]()
var mutableCompositionAudioTrack = [AVMutableCompositionTrack]()
var mutableCompositionAudioOfVideoTrack = [AVMutableCompositionTrack]()
//start merge
let aVideoAsset = AVAsset(url: videoUrl)
let aAudioAsset = AVAsset(url: audioUrl)
guard let compositionAddVideo = mixComposition.addMutableTrack(withMediaType: AVMediaType.video,
preferredTrackID: kCMPersistentTrackID_Invalid) else {
throw VideoAudioMergeError.compositionAddVideoFailed
}
guard let compositionAddAudio = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio,
preferredTrackID: kCMPersistentTrackID_Invalid) else {
throw VideoAudioMergeError.compositionAddAudioFailed
}
guard let compositionAddAudioOfVideo = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio,
preferredTrackID: kCMPersistentTrackID_Invalid) else {
throw VideoAudioMergeError.compositionAddAudioOfVideoFailed
}
do {
let aVideoAssetTrack: AVAssetTrack = try await aVideoAsset.loadTracks(withMediaType: AVMediaType.video)[0]
let aAudioOfVideoAssetTrack: AVAssetTrack? = try await aVideoAsset.loadTracks(withMediaType: AVMediaType.audio).first
let aAudioAssetTrack: AVAssetTrack = try await aAudioAsset.loadTracks(withMediaType: AVMediaType.audio)[0]
// Default must have transformation
compositionAddVideo.preferredTransform = try await aVideoAssetTrack.load(.preferredTransform)
if shouldFlipHorizontally {
// Flip video horizontally
var frontalTransform: CGAffineTransform = CGAffineTransform(scaleX: -1.0, y: 1.0)
let naturalSize = try await aVideoAssetTrack.load(.naturalSize)
frontalTransform = frontalTransform.translatedBy(x: -naturalSize.width, y: 0.0)
frontalTransform = frontalTransform.translatedBy(x: 0.0, y: -naturalSize.width)
compositionAddVideo.preferredTransform = frontalTransform
}
mutableCompositionVideoTrack.append(compositionAddVideo)
mutableCompositionAudioTrack.append(compositionAddAudio)
mutableCompositionAudioOfVideoTrack.append(compositionAddAudioOfVideo)
let videoTimeRange = try await aVideoAssetTrack.load(.timeRange)
try mutableCompositionVideoTrack[0].insertTimeRange(CMTimeRangeMake(start: CMTime.zero,
duration: videoTimeRange.duration),
of: aVideoAssetTrack,
at: CMTime.zero)
//In my case my audio file is longer then video file so i took videoAsset duration
//instead of audioAsset duration
try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(start: CMTime.zero,
duration: videoTimeRange.duration),
of: aAudioAssetTrack,
at: CMTime.zero)
// adding audio (of the video if exists) asset to the final composition
if let aAudioOfVideoAssetTrack = aAudioOfVideoAssetTrack {
try mutableCompositionAudioOfVideoTrack[0].insertTimeRange(CMTimeRangeMake(start: CMTime.zero,
duration: videoTimeRange.duration),
of: aAudioOfVideoAssetTrack,
at: CMTime.zero)
}
} catch {
throw error
}
// Exporting
let savePathUrl: URL = URL(fileURLWithPath: NSHomeDirectory() + "/Documents/newVideo.mp4")
do { // delete old video
try FileManager.default.removeItem(at: savePathUrl)
} catch { print(error.localizedDescription) }
let assetExport: AVAssetExportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)!
assetExport.outputFileType = AVFileType.mp4
assetExport.outputURL = savePathUrl
assetExport.shouldOptimizeForNetworkUse = true
await assetExport.export()
if assetExport.status == .completed {
return savePathUrl
}
if let error = assetExport.error {
throw error
} else {
throw VideoAudioMergeError.unknownError
}
}

Resources