I want to merge multiple videos and I want to keep their actual orientation in final video. This is my code
func mergeSelectedVideos() {
let composition = AVMutableComposition()
let videoTrack = composition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
let audioTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
var time: Double = 0.0
for video in self.arrayOfSelectedVideoURL {
let sourceAsset = AVAsset(url: video as URL)
let videoAssetTrack = sourceAsset.tracks(withMediaType: AVMediaTypeVideo)[0]
let audioAssetTrack = sourceAsset.tracks(withMediaType: AVMediaTypeAudio)[0]
let atTime = CMTime(seconds: time, preferredTimescale:1)
do{
try videoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, sourceAsset.duration) , of: videoAssetTrack, at: atTime)
try audioTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, sourceAsset.duration) , of: audioAssetTrack, at: atTime)
}catch{
print("-----Something wrong.")
}
time += sourceAsset.duration.seconds
}
let mergedVideoURL = NSURL(fileURLWithPath: NSHomeDirectory() + "/Documents/mergedVideo.mp4")
let exporter: AVAssetExportSession = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality)!
exporter.outputFileType = AVFileTypeMPEG4
exporter.outputURL = mergedVideoURL as URL
removeFileAtURLIfExists(url: mergedVideoURL)
exporter.exportAsynchronously(completionHandler:
{
switch exporter.status
{
case AVAssetExportSessionStatus.failed:
print("failed \(String(describing: exporter.error))")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(String(describing: exporter.error))")
case AVAssetExportSessionStatus.unknown:
print("unknown\(String(describing: exporter.error))")
case AVAssetExportSessionStatus.waiting:
print("waiting\(String(describing: exporter.error))")
case AVAssetExportSessionStatus.exporting:
print("exporting\(String(describing: exporter.error))")
default:
print("-----Merged video exportation complete.\(self.mergedVideoURL)")
}
})
}
with this code I am able to merge multiple videos but portrait videos get rotated in merged video. Is it possible to merge them in a way, where portrait video will stay portrait and landscape video will stay landscape in final video? I searched a lot but didn't find any.
Im trying to merge N audio tracks, to an video file.
The video is in MP4 format, and all audios are m4a.
All the preparation works well but when the export finishes, always fails.
Heres my code :
func mixAudioAndVideo() {
self.player?.pause()
let mixComposition = AVMutableComposition()
let paths = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)
let docsDirect = paths[0]
for audioTrack in self.audioTracks {
let musicFile = docsDirect.URLByAppendingPathComponent(audioTrack.audioName)
let audioAsset = AVURLAsset(URL: musicFile!, options: nil)
let audioTimeRange = CMTimeRangeMake(audioTrack.audioTime!, audioAsset.duration)
let compositionAudioTrack:AVMutableCompositionTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
do {
try compositionAudioTrack.insertTimeRange(audioTimeRange, ofTrack: audioAsset.tracksWithMediaType(AVMediaTypeAudio).first!, atTime: audioTrack.audioTime!)
} catch let error {
print(error)
}
}
let videoAsset = AVURLAsset(URL: video!.movieURL, options: nil)
let videoTimeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
let compositionVideoTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
do {
try compositionVideoTrack.insertTimeRange(videoTimeRange, ofTrack: videoAsset.tracksWithMediaType(AVMediaTypeVideo).first!, atTime: kCMTimeZero)
} catch let error {
print(error)
}
let videoName = "video\(self.audioTracks.count).mov"
let outputFilePath = docsDirect.URLByAppendingPathComponent(videoName)
let assetExport = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)
assetExport!.outputFileType = AVFileTypeQuickTimeMovie
assetExport!.outputURL = outputFilePath!
assetExport?.exportAsynchronouslyWithCompletionHandler({
dispatch_async(dispatch_get_main_queue()){
print("finished exporting \(outputFilePath)")
print("status \(assetExport?.status)")
print("error \(assetExport?.error)")
SVProgressHUD.dismiss()
}
})
}
And the code i get :
error Optional(Error Domain=NSURLErrorDomain Code=-1 "unknown error" UserInfo={NSLocalizedDescription=unknown error, NSUnderlyingError=0x170056140 {Error Domain=NSOSStatusErrorDomain Code=-12935 "(null)"}})
Swift: 3
first merge N audio tracks
var mergeAudioURL = NSURL()
func mergeAudioFiles(audioFileUrls: NSArray) {
let composition = AVMutableComposition()
for i in 0 ..< audioFileUrls.count {
let compositionAudioTrack :AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
let asset = AVURLAsset(url: (audioFileUrls[i] as! NSURL) as URL)
let track = asset.tracks(withMediaType: AVMediaTypeAudio)[0]
let timeRange = CMTimeRange(start: CMTimeMake(0, 600), duration: track.timeRange.duration)
try! compositionAudioTrack.insertTimeRange(timeRange, of: track, at: composition.duration)
}
let documentDirectoryURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first! as NSURL
self.mergeAudioURL = documentDirectoryURL.appendingPathComponent("Merge Audio.m4a")! as URL as NSURL
let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A)
assetExport?.outputFileType = AVFileTypeAppleM4A
assetExport?.outputURL = mergeAudioURL as URL
removeFileAtURLIfExists(url: mergeAudioURL)
assetExport?.exportAsynchronously(completionHandler:
{
switch assetExport!.status
{
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport?.error)")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport?.error)")
case AVAssetExportSessionStatus.unknown:
print("unknown\(assetExport?.error)")
case AVAssetExportSessionStatus.waiting:
print("waiting\(assetExport?.error)")
case AVAssetExportSessionStatus.exporting:
print("exporting\(assetExport?.error)")
default:
print("-----Merge audio exportation complete.\(self.mergeAudioURL)")
}
})
}
then merge audio with video
var mergedAudioVideoURl = NSURL()
func mergeMutableVideoWithAudio(videoUrl:NSURL, audioUrl:NSURL){
let mixComposition : AVMutableComposition = AVMutableComposition()
var mutableCompositionVideoTrack : [AVMutableCompositionTrack] = []
var mutableCompositionAudioTrack : [AVMutableCompositionTrack] = []
let totalVideoCompositionInstruction : AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()
//start merge
let aVideoAsset : AVAsset = AVAsset(url: videoUrl as URL)
let aAudioAsset : AVAsset = AVAsset(url: audioUrl as URL)
mutableCompositionVideoTrack.append(mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid))
mutableCompositionAudioTrack.append( mixComposition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid))
let aVideoAssetTrack : AVAssetTrack = aVideoAsset.tracks(withMediaType: AVMediaTypeVideo)[0]
let aAudioAssetTrack : AVAssetTrack = aAudioAsset.tracks(withMediaType: AVMediaTypeAudio)[0]
do{
try mutableCompositionVideoTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), of: aVideoAssetTrack, at: kCMTimeZero)
try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), of: aAudioAssetTrack, at: kCMTimeZero)
}catch{
}
totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero,aVideoAssetTrack.timeRange.duration )
let mutableVideoComposition : AVMutableVideoComposition = AVMutableVideoComposition()
mutableVideoComposition.frameDuration = CMTimeMake(1, 30)
mutableVideoComposition.renderSize = CGSize(width: 1280, height: 720)
mergedAudioVideoURl = NSURL(fileURLWithPath: NSHomeDirectory() + "/Documents/FinalVideo.mp4")
let assetExport: AVAssetExportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)!
assetExport.outputFileType = AVFileTypeMPEG4
assetExport.outputURL = mergedAudioVideoURl as URL
removeFileAtURLIfExists(url: mergedAudioVideoURl)
assetExport.shouldOptimizeForNetworkUse = true
assetExport.exportAsynchronously { () -> Void in
switch assetExport.status {
case AVAssetExportSessionStatus.completed:
print("-----Merge mutable video with trimmed audio exportation complete.\(self.mergedAudioVideoURl)")
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport.error)")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport.error)")
default:
print("complete")
}
}
}
func removeFileAtURLIfExists(url: NSURL) {
if let filePath = url.path {
let fileManager = FileManager.default
if fileManager.fileExists(atPath: filePath) {
do{
try fileManager.removeItem(atPath: filePath)
} catch let error as NSError {
print("-----Couldn't remove existing destination file: \(error)")
}
}
}
}
it crashes when i try to play the merged audio
I am trying to merging two audio file . They merged successfully, but I am seeing with a problem that when i play that audio with AVAudioPlayer it crashes . also there is a problem with formats as the merge audio only store with .m4a format if i save that audio with .wav format the it crashes.
func merge(audio1: NSURL, audio2: NSURL) {
let finalURL = getMergeFileURL()
let preferredTimeScale : Int32 = 100
//This object will be edited to include both audio files
let composition = AVMutableComposition()
let compositionAudioTrack1:AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
//let url1 = audio1
let avAsset1 = AVURLAsset(url: audio1 as URL, options: nil)
let tracks1 = avAsset1.tracks(withMediaType: AVMediaTypeAudio)
let assetTrack1:AVAssetTrack = tracks1[0]
let duration1: CMTime = CMTimeMakeWithSeconds(30.0, preferredTimeScale)
let startCMTime = CMTimeMakeWithSeconds(Double(30.0), preferredTimeScale)
let timeRange1 = CMTimeRangeMake(startCMTime, duration1)
let compositionAudioTrack2:AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
//let url2 = audio2
let avAsset2 = AVURLAsset(url: audio2 as URL, options: nil)
let tracks2 = avAsset2.tracks(withMediaType: AVMediaTypeAudio)
let assetTrack2:AVAssetTrack = tracks2[0]
let duration2: CMTime = CMTimeMakeWithSeconds(30.0, preferredTimeScale)
let startCMTime2 = CMTimeMakeWithSeconds(Double(30.0), preferredTimeScale)
let timeRange2 = CMTimeRangeMake(startCMTime, duration1)
//Insert the tracks into the composition
do {
try compositionAudioTrack1.insertTimeRange(timeRange1, of: assetTrack1, at: kCMTimeZero)
try compositionAudioTrack2.insertTimeRange(timeRange2, of: assetTrack2, at: duration1)
} catch {
print(error)
}
//Perform the merge
let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A)
assetExport!.outputFileType = AVFileTypeAppleM4A
assetExport!.outputURL = finalURL as URL // final url is the url of that merged file
assetExport!.exportAsynchronously(completionHandler: {
switch assetExport!.status{
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport!.error)")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport!.error)")
default:
print("complete")
}
})
}
I wrote a program in Swift.I want to merge a video with an audio file, but got this error.
"failed Error Domain=AVFoundationErrorDomain Code=-11838 "Operation Stopped" UserInfo=0x17da4230 {NSLocalizedDescription=Operation Stopped, NSLocalizedFailureReason=The operation is not supported for this media.}"
code
func mergeAudio(audioURL: NSURL, moviePathUrl: NSURL, savePathUrl: NSURL) {
var composition = AVMutableComposition()
let trackVideo:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
let trackAudio:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
let option = NSDictionary(object: true, forKey: "AVURLAssetPreferPreciseDurationAndTimingKey")
let sourceAsset = AVURLAsset(URL: moviePathUrl, options: option as [NSObject : AnyObject])
let audioAsset = AVURLAsset(URL: audioURL, options: option as [NSObject : AnyObject])
let tracks = sourceAsset.tracksWithMediaType(AVMediaTypeVideo)
let audios = audioAsset.tracksWithMediaType(AVMediaTypeAudio)
if tracks.count > 0 {
let assetTrack:AVAssetTrack = tracks[0] as! AVAssetTrack
let assetTrackAudio:AVAssetTrack = audios[0] as! AVAssetTrack
let audioDuration:CMTime = assetTrackAudio.timeRange.duration
let audioSeconds:Float64 = CMTimeGetSeconds(assetTrackAudio.timeRange.duration)
trackVideo.insertTimeRange(CMTimeRangeMake(kCMTimeZero,audioDuration), ofTrack: assetTrack, atTime: kCMTimeZero, error: nil)
trackAudio.insertTimeRange(CMTimeRangeMake(kCMTimeZero,audioDuration), ofTrack: assetTrackAudio, atTime: kCMTimeZero, error: nil)
}
var assetExport: AVAssetExportSession = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetPassthrough)
assetExport.outputFileType = AVFileTypeMPEG4
assetExport.outputURL = savePathUrl
self.tmpMovieURL = savePathUrl
assetExport.shouldOptimizeForNetworkUse = true
assetExport.exportAsynchronouslyWithCompletionHandler { () -> Void in
switch assetExport.status {
case AVAssetExportSessionStatus.Completed:
let assetsLib = ALAssetsLibrary()
assetsLib.writeVideoAtPathToSavedPhotosAlbum(savePathUrl, completionBlock: nil)
println("success")
case AVAssetExportSessionStatus.Failed:
println("failed \(assetExport.error)")
case AVAssetExportSessionStatus.Cancelled:
println("cancelled \(assetExport.error)")
default:
println("complete")
}
}
}
In my idea media type like mpeg4 is wrong.
Where is the problem? What am i missing?
Improved code (of Govind's answer) with some additional features:
Merge audio of the video + external audio (the initial answer was dropping the sound of the video)
Flip video horizontally if needed (I personally use it when user captures using frontal camera, btw instagram flips it too)
Apply preferredTransform correctly which solves the issue when video was saved rotated (video is external: captured by other device/generated by other app)
Removed some unused code with VideoComposition.
Added a completion handler to the method so that it can be called from a different class.
Update to Swift 4.
Step 1.
import UIKit
import AVFoundation
import AVKit
import AssetsLibrary
Step 2.
/// Merges video and sound while keeping sound of the video too
///
/// - Parameters:
/// - videoUrl: URL to video file
/// - audioUrl: URL to audio file
/// - shouldFlipHorizontally: pass True if video was recorded using frontal camera otherwise pass False
/// - completion: completion of saving: error or url with final video
func mergeVideoAndAudio(videoUrl: URL,
audioUrl: URL,
shouldFlipHorizontally: Bool = false,
completion: #escaping (_ error: Error?, _ url: URL?) -> Void) {
let mixComposition = AVMutableComposition()
var mutableCompositionVideoTrack = [AVMutableCompositionTrack]()
var mutableCompositionAudioTrack = [AVMutableCompositionTrack]()
var mutableCompositionAudioOfVideoTrack = [AVMutableCompositionTrack]()
//start merge
let aVideoAsset = AVAsset(url: videoUrl)
let aAudioAsset = AVAsset(url: audioUrl)
let compositionAddVideo = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo,
preferredTrackID: kCMPersistentTrackID_Invalid)
let compositionAddAudio = mixComposition.addMutableTrack(withMediaType: AVMediaTypeAudio,
preferredTrackID: kCMPersistentTrackID_Invalid)
let compositionAddAudioOfVideo = mixComposition.addMutableTrack(withMediaType: AVMediaTypeAudio,
preferredTrackID: kCMPersistentTrackID_Invalid)
let aVideoAssetTrack: AVAssetTrack = aVideoAsset.tracks(withMediaType: AVMediaTypeVideo)[0]
let aAudioOfVideoAssetTrack: AVAssetTrack? = aVideoAsset.tracks(withMediaType: AVMediaTypeAudio).first
let aAudioAssetTrack: AVAssetTrack = aAudioAsset.tracks(withMediaType: AVMediaTypeAudio)[0]
// Default must have tranformation
compositionAddVideo.preferredTransform = aVideoAssetTrack.preferredTransform
if shouldFlipHorizontally {
// Flip video horizontally
var frontalTransform: CGAffineTransform = CGAffineTransform(scaleX: -1.0, y: 1.0)
frontalTransform = frontalTransform.translatedBy(x: -aVideoAssetTrack.naturalSize.width, y: 0.0)
frontalTransform = frontalTransform.translatedBy(x: 0.0, y: -aVideoAssetTrack.naturalSize.width)
compositionAddVideo.preferredTransform = frontalTransform
}
mutableCompositionVideoTrack.append(compositionAddVideo)
mutableCompositionAudioTrack.append(compositionAddAudio)
mutableCompositionAudioOfVideoTrack.append(compositionAddAudioOfVideo)
do {
try mutableCompositionVideoTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero,
aVideoAssetTrack.timeRange.duration),
of: aVideoAssetTrack,
at: kCMTimeZero)
//In my case my audio file is longer then video file so i took videoAsset duration
//instead of audioAsset duration
try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero,
aVideoAssetTrack.timeRange.duration),
of: aAudioAssetTrack,
at: kCMTimeZero)
// adding audio (of the video if exists) asset to the final composition
if let aAudioOfVideoAssetTrack = aAudioOfVideoAssetTrack {
try mutableCompositionAudioOfVideoTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero,
aVideoAssetTrack.timeRange.duration),
of: aAudioOfVideoAssetTrack,
at: kCMTimeZero)
}
} catch {
print(error.localizedDescription)
}
// Exporting
let savePathUrl: URL = URL(fileURLWithPath: NSHomeDirectory() + "/Documents/newVideo.mp4")
do { // delete old video
try FileManager.default.removeItem(at: savePathUrl)
} catch { print(error.localizedDescription) }
let assetExport: AVAssetExportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)!
assetExport.outputFileType = AVFileTypeMPEG4
assetExport.outputURL = savePathUrl
assetExport.shouldOptimizeForNetworkUse = true
assetExport.exportAsynchronously { () -> Void in
switch assetExport.status {
case AVAssetExportSessionStatus.completed:
print("success")
completion(nil, savePathUrl)
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport.error?.localizedDescription ?? "error nil")")
completion(assetExport.error, nil)
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport.error?.localizedDescription ?? "error nil")")
completion(assetExport.error, nil)
default:
print("complete")
completion(assetExport.error, nil)
}
}
}
Again thanks to #Govind's answer! It helped me a lot!
Hope this update helps someone too:)
In Above question same error I found due to wrong savePathUrl, destination URL should be like below code including new video name.
I was looking for the code to Merge audio and video files into one video but couldn't find anywhere so after spending hours while reading apple docs I wrote this code.
NOTE : This is tested and 100% working code for me.
Stap : 1
Import this modules in your viewController.
import UIKit
import AVFoundation
import AVKit
import AssetsLibrary
step 2:
Add this function in your code
func mergeFilesWithUrl(videoUrl:NSURL, audioUrl:NSURL)
{
let mixComposition : AVMutableComposition = AVMutableComposition()
var mutableCompositionVideoTrack : [AVMutableCompositionTrack] = []
var mutableCompositionAudioTrack : [AVMutableCompositionTrack] = []
let totalVideoCompositionInstruction : AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()
//start merge
let aVideoAsset : AVAsset = AVAsset(URL: videoUrl)
let aAudioAsset : AVAsset = AVAsset(URL: audioUrl)
mutableCompositionVideoTrack.append(mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid))
mutableCompositionAudioTrack.append( mixComposition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid))
let aVideoAssetTrack : AVAssetTrack = aVideoAsset.tracksWithMediaType(AVMediaTypeVideo)[0]
let aAudioAssetTrack : AVAssetTrack = aAudioAsset.tracksWithMediaType(AVMediaTypeAudio)[0]
do{
try mutableCompositionVideoTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), ofTrack: aVideoAssetTrack, atTime: kCMTimeZero)
//In my case my audio file is longer then video file so i took videoAsset duration
//instead of audioAsset duration
try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), ofTrack: aAudioAssetTrack, atTime: kCMTimeZero)
//Use this instead above line if your audiofile and video file's playing durations are same
// try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), ofTrack: aAudioAssetTrack, atTime: kCMTimeZero)
}catch{
}
totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero,aVideoAssetTrack.timeRange.duration )
let mutableVideoComposition : AVMutableVideoComposition = AVMutableVideoComposition()
mutableVideoComposition.frameDuration = CMTimeMake(1, 30)
mutableVideoComposition.renderSize = CGSizeMake(1280,720)
// playerItem = AVPlayerItem(asset: mixComposition)
// player = AVPlayer(playerItem: playerItem!)
//
//
// AVPlayerVC.player = player
//find your video on this URl
let savePathUrl : NSURL = NSURL(fileURLWithPath: NSHomeDirectory() + "/Documents/newVideo.mp4")
let assetExport: AVAssetExportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)!
assetExport.outputFileType = AVFileTypeMPEG4
assetExport.outputURL = savePathUrl
assetExport.shouldOptimizeForNetworkUse = true
assetExport.exportAsynchronouslyWithCompletionHandler { () -> Void in
switch assetExport.status {
case AVAssetExportSessionStatus.Completed:
//Uncomment this if u want to store your video in asset
//let assetsLib = ALAssetsLibrary()
//assetsLib.writeVideoAtPathToSavedPhotosAlbum(savePathUrl, completionBlock: nil)
print("success")
case AVAssetExportSessionStatus.Failed:
print("failed \(assetExport.error)")
case AVAssetExportSessionStatus.Cancelled:
print("cancelled \(assetExport.error)")
default:
print("complete")
}
}
}
Step 3:
Call function where u want like this
let videoUrl : NSURL = NSURL(fileURLWithPath: NSBundle.mainBundle().pathForResource("SampleVideo", ofType: "mp4")!)
let audioUrl : NSURL = NSURL(fileURLWithPath: NSBundle.mainBundle().pathForResource("SampleAudio", ofType: "mp3")!)
mergeFilesWithUrl(videoUrl, audioUrl: audioUrl)
hope this will help you and will save your time.
Swift 4.2 / 5
func mergeVideoWithAudio(videoUrl: URL, audioUrl: URL, success: #escaping ((URL) -> Void), failure: #escaping ((Error?) -> Void)) {
let mixComposition: AVMutableComposition = AVMutableComposition()
var mutableCompositionVideoTrack: [AVMutableCompositionTrack] = []
var mutableCompositionAudioTrack: [AVMutableCompositionTrack] = []
let totalVideoCompositionInstruction : AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()
let aVideoAsset: AVAsset = AVAsset(url: videoUrl)
let aAudioAsset: AVAsset = AVAsset(url: audioUrl)
if let videoTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid), let audioTrack = mixComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid) {
mutableCompositionVideoTrack.append(videoTrack)
mutableCompositionAudioTrack.append(audioTrack)
if let aVideoAssetTrack: AVAssetTrack = aVideoAsset.tracks(withMediaType: .video).first, let aAudioAssetTrack: AVAssetTrack = aAudioAsset.tracks(withMediaType: .audio).first {
do {
try mutableCompositionVideoTrack.first?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: aVideoAssetTrack.timeRange.duration), of: aVideoAssetTrack, at: CMTime.zero)
try mutableCompositionAudioTrack.first?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: aVideoAssetTrack.timeRange.duration), of: aAudioAssetTrack, at: CMTime.zero)
videoTrack.preferredTransform = aVideoAssetTrack.preferredTransform
} catch{
print(error)
}
totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(start: CMTime.zero,duration: aVideoAssetTrack.timeRange.duration)
}
}
let mutableVideoComposition: AVMutableVideoComposition = AVMutableVideoComposition()
mutableVideoComposition.frameDuration = CMTimeMake(value: 1, timescale: 30)
mutableVideoComposition.renderSize = CGSize(width: 480, height: 640)
if let documentsPath = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first {
let outputURL = URL(fileURLWithPath: documentsPath).appendingPathComponent("\("fileName").m4v")
do {
if FileManager.default.fileExists(atPath: outputURL.path) {
try FileManager.default.removeItem(at: outputURL)
}
} catch { }
if let exportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) {
exportSession.outputURL = outputURL
exportSession.outputFileType = AVFileType.mp4
exportSession.shouldOptimizeForNetworkUse = true
/// try to export the file and handle the status cases
exportSession.exportAsynchronously(completionHandler: {
switch exportSession.status {
case .failed:
if let _error = exportSession.error {
failure(_error)
}
case .cancelled:
if let _error = exportSession.error {
failure(_error)
}
default:
print("finished")
success(outputURL)
}
})
} else {
failure(nil)
}
}
}
Version Swift3 with URL and new syntax.
func mergeFilesWithUrl(videoUrl:URL, audioUrl:URL)
{
let mixComposition : AVMutableComposition = AVMutableComposition()
var mutableCompositionVideoTrack : [AVMutableCompositionTrack] = []
var mutableCompositionAudioTrack : [AVMutableCompositionTrack] = []
let totalVideoCompositionInstruction : AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()
//start merge
let aVideoAsset : AVAsset = AVAsset(url: videoUrl)
let aAudioAsset : AVAsset = AVAsset(url: audioUrl)
mutableCompositionVideoTrack.append(mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid))
mutableCompositionAudioTrack.append( mixComposition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid))
let aVideoAssetTrack : AVAssetTrack = aVideoAsset.tracks(withMediaType: AVMediaTypeVideo)[0]
let aAudioAssetTrack : AVAssetTrack = aAudioAsset.tracks(withMediaType: AVMediaTypeAudio)[0]
do{
try mutableCompositionVideoTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), of: aVideoAssetTrack, at: kCMTimeZero)
//In my case my audio file is longer then video file so i took videoAsset duration
//instead of audioAsset duration
try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), of: aAudioAssetTrack, at: kCMTimeZero)
//Use this instead above line if your audiofile and video file's playing durations are same
// try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), ofTrack: aAudioAssetTrack, atTime: kCMTimeZero)
}catch{
}
totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero,aVideoAssetTrack.timeRange.duration )
let mutableVideoComposition : AVMutableVideoComposition = AVMutableVideoComposition()
mutableVideoComposition.frameDuration = CMTimeMake(1, 30)
mutableVideoComposition.renderSize = CGSize(width: 1280, height: 720)
// playerItem = AVPlayerItem(asset: mixComposition)
// player = AVPlayer(playerItem: playerItem!)
//
//
// AVPlayerVC.player = player
//find your video on this URl
let savePathUrl : URL = URL(fileURLWithPath: NSHomeDirectory() + "/Documents/newVideo.mp4")
let assetExport: AVAssetExportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)!
assetExport.outputFileType = AVFileTypeMPEG4
assetExport.outputURL = savePathUrl
assetExport.shouldOptimizeForNetworkUse = true
assetExport.exportAsynchronously { () -> Void in
switch assetExport.status {
case AVAssetExportSessionStatus.completed:
//Uncomment this if u want to store your video in asset
//let assetsLib = ALAssetsLibrary()
//assetsLib.writeVideoAtPathToSavedPhotosAlbum(savePathUrl, completionBlock: nil)
print("success")
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport.error)")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport.error)")
default:
print("complete")
}
}
}
Swift 5 version (Also repeats audio if video is larger than audio) : Just pass audio and video URLs. I have tried this with local video and remote audio url.
func mergeVideoWithAudio(videoUrl: URL,
audioUrl: URL,
success: #escaping ((URL) -> Void),
failure: #escaping ((Error?) -> Void)) {
let mixComposition: AVMutableComposition = AVMutableComposition()
var mutableCompositionVideoTrack: [AVMutableCompositionTrack] = []
var mutableCompositionAudioTrack: [AVMutableCompositionTrack] = []
let totalVideoCompositionInstruction: AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()
let aVideoAsset: AVAsset = AVAsset(url: videoUrl)
let aAudioAsset: AVAsset = AVAsset(url: audioUrl)
if let videoTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid), let audioTrack = mixComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid) {
mutableCompositionVideoTrack.append( videoTrack )
mutableCompositionAudioTrack.append( audioTrack )
if let aVideoAssetTrack: AVAssetTrack = aVideoAsset.tracks(withMediaType: .video).first, let aAudioAssetTrack: AVAssetTrack = aAudioAsset.tracks(withMediaType: .audio).first {
do {
try mutableCompositionVideoTrack.first?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: aVideoAssetTrack.timeRange.duration), of: aVideoAssetTrack, at: CMTime.zero)
let videoDuration = aVideoAsset.duration
if CMTimeCompare(videoDuration, aAudioAsset.duration) == -1 {
try mutableCompositionAudioTrack.first?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: aVideoAssetTrack.timeRange.duration), of: aAudioAssetTrack, at: CMTime.zero)
} else if CMTimeCompare(videoDuration, aAudioAsset.duration) == 1 {
var currentTime = CMTime.zero
while true {
var audioDuration = aAudioAsset.duration
let totalDuration = CMTimeAdd(currentTime, audioDuration)
if CMTimeCompare(totalDuration, videoDuration) == 1 {
audioDuration = CMTimeSubtract(totalDuration, videoDuration)
}
try mutableCompositionAudioTrack.first?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: aVideoAssetTrack.timeRange.duration), of: aAudioAssetTrack, at: currentTime)
currentTime = CMTimeAdd(currentTime, audioDuration)
if CMTimeCompare(currentTime, videoDuration) == 1 || CMTimeCompare(currentTime, videoDuration) == 0 {
break
}
}
}
videoTrack.preferredTransform = aVideoAssetTrack.preferredTransform
} catch {
print(error)
}
totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(start: CMTime.zero, duration: aVideoAssetTrack.timeRange.duration)
}
}
let mutableVideoComposition: AVMutableVideoComposition = AVMutableVideoComposition()
mutableVideoComposition.frameDuration = CMTimeMake(value: 1, timescale: 30)
mutableVideoComposition.renderSize = CGSize(width: 480, height: 640)
if let documentsPath = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first {
let outputURL = URL(fileURLWithPath: documentsPath).appendingPathComponent("\("fileName").m4v")
do {
if FileManager.default.fileExists(atPath: outputURL.path) {
try FileManager.default.removeItem(at: outputURL)
}
} catch { }
if let exportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) {
exportSession.outputURL = outputURL
exportSession.outputFileType = AVFileType.mp4
exportSession.shouldOptimizeForNetworkUse = true
// try to export the file and handle the status cases
exportSession.exportAsynchronously(completionHandler: {
switch exportSession.status {
case .failed:
if let error = exportSession.error {
failure(error)
}
case .cancelled:
if let error = exportSession.error {
failure(error)
}
default:
print("finished")
success(outputURL)
}
})
} else {
failure(nil)
}
}
}
Updated for Swift Concurrency (Swift 5.7)
Some errors to throw:
enum VideoAudioMergeError: Error {
case compositionAddVideoFailed, compositionAddAudioFailed, compositionAddAudioOfVideoFailed, unknownError
}
And the method:
/// Merges video and sound while keeping sound of the video too
///
/// - Parameters:
/// - videoUrl: URL to video file
/// - audioUrl: URL to audio file
/// - shouldFlipHorizontally: pass True if video was recorded using frontal camera otherwise pass False
func mergeVideoAndAudio(videoUrl: URL,
audioUrl: URL,
shouldFlipHorizontally: Bool = false) async throws -> URL {
let mixComposition = AVMutableComposition()
var mutableCompositionVideoTrack = [AVMutableCompositionTrack]()
var mutableCompositionAudioTrack = [AVMutableCompositionTrack]()
var mutableCompositionAudioOfVideoTrack = [AVMutableCompositionTrack]()
//start merge
let aVideoAsset = AVAsset(url: videoUrl)
let aAudioAsset = AVAsset(url: audioUrl)
guard let compositionAddVideo = mixComposition.addMutableTrack(withMediaType: AVMediaType.video,
preferredTrackID: kCMPersistentTrackID_Invalid) else {
throw VideoAudioMergeError.compositionAddVideoFailed
}
guard let compositionAddAudio = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio,
preferredTrackID: kCMPersistentTrackID_Invalid) else {
throw VideoAudioMergeError.compositionAddAudioFailed
}
guard let compositionAddAudioOfVideo = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio,
preferredTrackID: kCMPersistentTrackID_Invalid) else {
throw VideoAudioMergeError.compositionAddAudioOfVideoFailed
}
do {
let aVideoAssetTrack: AVAssetTrack = try await aVideoAsset.loadTracks(withMediaType: AVMediaType.video)[0]
let aAudioOfVideoAssetTrack: AVAssetTrack? = try await aVideoAsset.loadTracks(withMediaType: AVMediaType.audio).first
let aAudioAssetTrack: AVAssetTrack = try await aAudioAsset.loadTracks(withMediaType: AVMediaType.audio)[0]
// Default must have transformation
compositionAddVideo.preferredTransform = try await aVideoAssetTrack.load(.preferredTransform)
if shouldFlipHorizontally {
// Flip video horizontally
var frontalTransform: CGAffineTransform = CGAffineTransform(scaleX: -1.0, y: 1.0)
let naturalSize = try await aVideoAssetTrack.load(.naturalSize)
frontalTransform = frontalTransform.translatedBy(x: -naturalSize.width, y: 0.0)
frontalTransform = frontalTransform.translatedBy(x: 0.0, y: -naturalSize.width)
compositionAddVideo.preferredTransform = frontalTransform
}
mutableCompositionVideoTrack.append(compositionAddVideo)
mutableCompositionAudioTrack.append(compositionAddAudio)
mutableCompositionAudioOfVideoTrack.append(compositionAddAudioOfVideo)
let videoTimeRange = try await aVideoAssetTrack.load(.timeRange)
try mutableCompositionVideoTrack[0].insertTimeRange(CMTimeRangeMake(start: CMTime.zero,
duration: videoTimeRange.duration),
of: aVideoAssetTrack,
at: CMTime.zero)
//In my case my audio file is longer then video file so i took videoAsset duration
//instead of audioAsset duration
try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(start: CMTime.zero,
duration: videoTimeRange.duration),
of: aAudioAssetTrack,
at: CMTime.zero)
// adding audio (of the video if exists) asset to the final composition
if let aAudioOfVideoAssetTrack = aAudioOfVideoAssetTrack {
try mutableCompositionAudioOfVideoTrack[0].insertTimeRange(CMTimeRangeMake(start: CMTime.zero,
duration: videoTimeRange.duration),
of: aAudioOfVideoAssetTrack,
at: CMTime.zero)
}
} catch {
throw error
}
// Exporting
let savePathUrl: URL = URL(fileURLWithPath: NSHomeDirectory() + "/Documents/newVideo.mp4")
do { // delete old video
try FileManager.default.removeItem(at: savePathUrl)
} catch { print(error.localizedDescription) }
let assetExport: AVAssetExportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)!
assetExport.outputFileType = AVFileType.mp4
assetExport.outputURL = savePathUrl
assetExport.shouldOptimizeForNetworkUse = true
await assetExport.export()
if assetExport.status == .completed {
return savePathUrl
}
if let error = assetExport.error {
throw error
} else {
throw VideoAudioMergeError.unknownError
}
}
I am trying to merge two recorded videos with this code:
func exportVideo3() {
transprentView.hidden = false
activityIndicator.startAnimating()
let composition = AVMutableComposition()
let trackVideo = composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
let trackAudio = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
var insertTime = kCMTimeZero
let path = NSFileManager.defaultManager().URLForDirectory(.DocumentDirectory, inDomain: .UserDomainMask, appropriateForURL: nil, create: false, error: nil)!.path!
for index in 1...2 {
let movieUrl = index == 1 ? video1URL : video2URL
let sourceAsset = AVURLAsset(URL: movieUrl, options: nil)
let tracks = sourceAsset.tracksWithMediaType(AVMediaTypeVideo)
let audios = sourceAsset.tracksWithMediaType(AVMediaTypeAudio)
if tracks.count > 0{
let assetTrack:AVAssetTrack = tracks[0] as! AVAssetTrack
trackVideo.insertTimeRange(CMTimeRangeMake(kCMTimeZero,sourceAsset.duration), ofTrack: assetTrack, atTime: insertTime, error: nil)
let assetTrackAudio:AVAssetTrack = audios[0] as! AVAssetTrack
trackAudio.insertTimeRange(CMTimeRangeMake(kCMTimeZero,sourceAsset.duration), ofTrack: assetTrackAudio, atTime: insertTime, error: nil)
insertTime = CMTimeAdd(insertTime, sourceAsset.duration)
}
}
let exporter = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality)
exporter.outputURL = documentsDirectoryURL.URLByAppendingPathComponent("mergedMovie.mp4")
if NSFileManager.defaultManager().fileExistsAtPath(exporter.outputURL.path!) {
NSFileManager.defaultManager().removeItemAtURL(exporter.outputURL, error: nil)
}
mergedVideoURL = exporter.outputURL
exporter.outputFileType = AVFileTypeMPEG4
exporter.exportAsynchronouslyWithCompletionHandler({
switch exporter.status{
case AVAssetExportSessionStatus.Failed:
println("failed \(exporter.error)")
case AVAssetExportSessionStatus.Cancelled:
println("cancelled \(exporter.error)")
default:
self.transprentView.hidden = true
self.activityIndicator.stopAnimating()
println("complete")
}
})
}
It is working fine but new Video have opposite orientation. i.e when I recored both video in portrait and merge it then the new video plays in landscape mode and when I record both in landscape and merge it then the new video plays in portrait mode.
So how can I solve this issue like when I record video in landscape mode I want output in landscape mode and same as portrait.
HERE is my sample project for more reference.