How to change video resolution in Swift - ios

Is there a way to export a video with resolution 480 x 960 ? I know there are libraries for this but I'd rather do it without installing more pods on my project if possible.
I am converting a captured video in .MOV to .MP4. I used the method suggested on this thread.
The available options from AVAssetExport are these:
AVAssetExportPresetLowQuality
AVAssetExportPresetMediumQuality
AVAssetExportPresetHighestQuality
AVAssetExportPresetHEVCHighestQuality
AVAssetExportPreset640x480
AVAssetExportPreset960x540
AVAssetExportPreset1280x720
AVAssetExportPreset1920x1080
AVAssetExportPreset3840x2160
Is this the correct approach if the exported video is MP4? The documentation for AVAssetExportSession says this is for quicktime movies so I am a bit confused about this.
func exportVideo(inputurl: URL,
presetName: String = AVAssetExportPresetHighestQuality,
outputFileType: AVFileType = .mp4,
fileExtension: String = "mp4",
then completion: #escaping (URL?) -> Void)
{
let asset = AVAsset(url: inputurl)
let filename = filePath.deletingPathExtension().appendingPathExtension(fileExtension).lastPathComponent
outputURL = FileManager.default.temporaryDirectory.appendingPathComponent(filename)
if let session = AVAssetExportSession(asset: asset, presetName: presetName) {
session.outputURL = outputURL
session.outputFileType = outputFileType
session.shouldOptimizeForNetworkUse = true
session.exportAsynchronously {
switch session.status {
case .completed:
completion(self.outputURL)
case .cancelled:
debugPrint("Video export cancelled.")
completion(nil)
case .failed:
let errorMessage = session.error?.localizedDescription ?? "n/a"
debugPrint("Video export failed with error: \(errorMessage)")
completion(nil)
default:
break
}
}
} else {
completion(nil)
}
}

You must transform video tranck and video composition to your export session...
You have to do something like this:
//transform video
let rotationTransform = CGAffineTransform(rotationAngle: .pi)
videoTrack.preferredTransform = rotationTransform;
let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
layerInstruction.setTransform(videoAssetTrack.preferredTransform, at: kCMTimeZero)
let videoCompositionInstruction = AVMutableVideoCompositionInstruction()
videoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
videoCompositionInstruction.layerInstructions = [layerInstruction]
let videoComposition = AVMutableVideoComposition()
videoComposition.instructions = [videoCompositionInstruction]
videoComposition.frameDuration = CMTime(value: 1, timescale: 30)
videoComposition.renderSize = videoSize
//saving...
guard let exportSession = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetMediumQuality) else { return }
//exportSession.outputURL = your output url
exportSession.videoComposition = videoComposition

Related

Error Domain=AVFoundationErrorDomain Code=-11800 "The operation could not be completed" {Error Domain=NSOSStatusErrorDomain Code=-16976 "(null)"}

I am working on Video application in Swift3 iOS. Basically I have to merged the Video Assets and Audios into one with Fade Effect and save this to iPhone gallery. To achieve this, I am using below method:
private func doMerge(arrayVideos:[AVAsset], arrayAudios:[AVAsset], animation:Bool, completion:#escaping Completion) -> Void {
var insertTime = kCMTimeZero
var audioInsertTime = kCMTimeZero
var arrayLayerInstructions:[AVMutableVideoCompositionLayerInstruction] = []
var outputSize = CGSize.init(width: 0, height: 0)
// Determine video output size
for videoAsset in arrayVideos {
let videoTrack = videoAsset.tracks(withMediaType: AVMediaTypeVideo)[0]
let assetInfo = orientationFromTransform(transform: videoTrack.preferredTransform)
var videoSize = videoTrack.naturalSize
if assetInfo.isPortrait == true {
videoSize.width = videoTrack.naturalSize.height
videoSize.height = videoTrack.naturalSize.width
}
outputSize = videoSize
}
// Init composition
let mixComposition = AVMutableComposition.init()
for index in 0..<arrayVideos.count {
// Get video track
guard let videoTrack = arrayVideos[index].tracks(withMediaType: AVMediaTypeVideo).first else { continue }
// Get audio track
var audioTrack:AVAssetTrack?
if index < arrayAudios.count {
if arrayAudios[index].tracks(withMediaType: AVMediaTypeAudio).count > 0 {
audioTrack = arrayAudios[index].tracks(withMediaType: AVMediaTypeAudio).first
}
}
// Init video & audio composition track
let videoCompositionTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
let audioCompositionTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
do {
let startTime = kCMTimeZero
let duration = arrayVideos[index].duration
// Add video track to video composition at specific time
try videoCompositionTrack.insertTimeRange(CMTimeRangeMake(startTime, duration), of: videoTrack, at: insertTime)
// Add audio track to audio composition at specific time
var audioDuration = kCMTimeZero
if index < arrayAudios.count {
audioDuration = arrayAudios[index].duration
}
if let audioTrack = audioTrack {
do {
try audioCompositionTrack.insertTimeRange(CMTimeRangeMake(startTime, audioDuration), of: audioTrack, at: audioInsertTime)
}
catch {
print(error.localizedDescription)
}
}
// Add instruction for video track
let layerInstruction = videoCompositionInstructionForTrack(track: videoCompositionTrack, asset: arrayVideos[index], standardSize: outputSize, atTime: insertTime)
// Hide video track before changing to new track
let endTime = CMTimeAdd(insertTime, duration)
if animation {
let timeScale = arrayVideos[index].duration.timescale
let durationAnimation = CMTime.init(seconds: 1, preferredTimescale: timeScale)
layerInstruction.setOpacityRamp (fromStartOpacity: 1.0, toEndOpacity: 0.0, timeRange: CMTimeRange.init(start: endTime, duration: durationAnimation))
}
else {
layerInstruction.setOpacity(0, at: endTime)
}
arrayLayerInstructions.append(layerInstruction)
// Increase the insert time
audioInsertTime = CMTimeAdd(audioInsertTime, audioDuration)
insertTime = CMTimeAdd(insertTime, duration)
}
catch {
print("Load track error")
}
}
// Main video composition instruction
let mainInstruction = AVMutableVideoCompositionInstruction()
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, insertTime)
mainInstruction.layerInstructions = arrayLayerInstructions
// Main video composition
let mainComposition = AVMutableVideoComposition()
mainComposition.instructions = [mainInstruction]
mainComposition.frameDuration = CMTimeMake(1, 30)
mainComposition.renderSize = outputSize
// Export to file
let path = NSTemporaryDirectory().appending("mergedVideo.mp4")
let exportURL = URL.init(fileURLWithPath: path)
// Remove file if existed
FileManager.default.removeItemIfExisted(exportURL)
// Init exporter
let exporter = AVAssetExportSession.init(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)
exporter?.outputURL = exportURL
exporter?.outputFileType = AVFileTypeQuickTimeMovie//AVFileType.mp4
exporter?.shouldOptimizeForNetworkUse = false //true
exporter?.videoComposition = mainComposition
// Do export
exporter?.exportAsynchronously(completionHandler: {
DispatchQueue.main.async {
self.exportDidFinish(exporter: exporter, videoURL: exportURL, completion: completion)
}
})
}
fileprivate func exportDidFinish(exporter:AVAssetExportSession?, videoURL:URL, completion:#escaping Completion) -> Void {
if exporter?.status == AVAssetExportSessionStatus.completed {
print("Exported file: \(videoURL.absoluteString)")
completion(videoURL,nil)
}
else if exporter?.status == AVAssetExportSessionStatus.failed {
completion(videoURL,exporter?.error)
print(exporter?.error as Any)
}
}
Problem: In my exportDidFinish method, AVAssetExportSessionStatus is getting failed with below error message:
Error Domain=AVFoundationErrorDomain Code=-11800 "The operation could
not be completed" UserInfo={NSLocalizedFailureReason=An unknown error
occurred (-16976), NSLocalizedDescription=The operation could not be
completed, NSUnderlyingError=0x1c065fb30 {Error
Domain=NSOSStatusErrorDomain Code=-16976 "(null)"}}
Can anyone suggest me on this.
I had the exact same error and only on the iPhone 5S simulator running iOS11. I fixed it by changing the quality setting on the export operation from "Highest" (AVAssetExportPresetHighestQuality) to "Pass through" (AVAssetExportPresetPassthrough) (keeping original quality):
/// try to start an export session and set the path and file type
if let exportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetPassthrough) { /* AVAssetExportPresetHighestQuality */
exportSession.outputURL = videoOutputURL
exportSession.outputFileType = AVFileType.mp4
exportSession.shouldOptimizeForNetworkUse = true
exportSession.exportAsynchronously(completionHandler: {
switch exportSession.status {
case .failed:
if let _error = exportSession.error {
// !!!used to fail over here with 11800, -16976 codes, if using AVAssetExportPresetHighestQuality. But works fine when using: AVAssetExportPresetPassthrough
failure(_error)
}
....
Hope this helps someone, because that error code and message doesn't provide any information. It's just an "Unknown error". Besides changing the quality setting, I would try changing other settings and stripping down the export operation to identify a specific component of that operation that may be failing. (Some specific image, audio or video asset). When you have such a general error message, it's good to use the process of elimination, cutting the code in half each time, to get to the problem in Logarithmic time.

Overlay Two Videos with AVFoundation

I am trying to overlay two videos, with the foreground video being somewhat alpha transparent. I have been following the Apple Docs as well as This tutorial.
Whenever I try putting two of the same video through my code it doesn't crash; however, when I try feeding it two different videos I receive this error:
VideoMaskingUtils.exportVideo Error: Optional(Error Domain=AVFoundationErrorDomain Code=-11841 "Operation Stopped" UserInfo={NSLocalizedDescription=Operation Stopped, NSLocalizedFailureReason=The video could not be composed.})
VideoMaskingUtils.exportVideo Description: <AVAssetExportSession: 0x1556be30, asset = <AVMutableComposition: 0x15567f10 tracks = (
"<AVMutableCompositionTrack: 0x15658030 trackID = 1, mediaType = vide, editCount = 1>",
"<AVMutableCompositionTrack: 0x1556e250 trackID = 2, mediaType = vide, editCount = 1>"
)>, presetName = AVAssetExportPresetHighestQuality, outputFileType = public.mpeg-4
Error Domain=AVFoundationErrorDomain Code=-11841 "Operation Stopped" UserInfo={NSLocalizedDescription=Operation Stopped, NSLocalizedFailureReason=The video could not be composed.}
I understand that you can't save a video with an alpha channel on iOS -- I want to flatten the two videos into one opaque video.
When trying to overlap the two videos and apply a PiP style using CATransforms, it crashes; simply overlapping them (w/o alpha or any other effects applied work)
Any help is appreciated.
Here's my code (with both approaches in it):
class func overlay(video firstAsset: AVURLAsset, withSecondVideo secondAsset: AVURLAsset, andAlpha alpha: Float) {
let mixComposition = AVMutableComposition()
let firstTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
let secondTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
guard let firstMediaTrack = firstAsset.tracksWithMediaType(AVMediaTypeVideo).first else { return }
guard let secondMediaTrack = secondAsset.tracksWithMediaType(AVMediaTypeVideo).first else { return }
do {
try firstTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, firstAsset.duration), ofTrack: firstMediaTrack, atTime: kCMTimeZero)
try secondTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, secondAsset.duration), ofTrack: secondMediaTrack, atTime: kCMTimeZero)
} catch (let error) {
print(error)
}
let width = max(firstMediaTrack.naturalSize.width, secondMediaTrack.naturalSize.width)
let height = max(firstMediaTrack.naturalSize.height, secondMediaTrack.naturalSize.height)
let videoComposition = AVMutableVideoComposition()
videoComposition.renderSize = CGSizeMake(width, height)
videoComposition.frameDuration = firstMediaTrack.minFrameDuration
let firstApproach = false
if firstApproach {
let mainInstruction = AVMutableVideoCompositionInstruction()
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, firstAsset.duration)
mainInstruction.backgroundColor = UIColor.redColor().CGColor
let firstlayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: firstTrack)
firstlayerInstruction.setTransform(firstAsset.preferredTransform, atTime: kCMTimeZero)
let secondInstruction = AVMutableVideoCompositionInstruction()
secondInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, secondAsset.duration)
let backgroundColor = UIColor(colorLiteralRed: 1.0, green: 1.0, blue: 1.0, alpha: alpha)
secondInstruction.backgroundColor = backgroundColor.CGColor
let secondlayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: secondTrack)
secondlayerInstruction.setTransform(secondAsset.preferredTransform, atTime: kCMTimeZero)
secondInstruction.layerInstructions = [secondlayerInstruction]
mainInstruction.layerInstructions = [firstlayerInstruction]//, secondlayerInstruction]
videoComposition.instructions = [mainInstruction, secondInstruction]
} else {
let firstLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: firstMediaTrack)
firstLayerInstruction.setTransform(firstMediaTrack.preferredTransform, atTime: kCMTimeZero)
firstLayerInstruction.setOpacity(1.0, atTime: kCMTimeZero)
let secondlayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: secondMediaTrack)
secondlayerInstruction.setTransform(secondMediaTrack.preferredTransform, atTime: kCMTimeZero)
secondlayerInstruction.setOpacity(alpha, atTime: kCMTimeZero)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, min(firstAsset.duration, secondAsset.duration))
instruction.layerInstructions = [firstLayerInstruction, secondlayerInstruction]
videoComposition.instructions = [instruction]
}
let outputUrl = VideoMaskingUtils.getPathForTempFileNamed("output.mov")
VideoMaskingUtils.exportCompositedVideo(mixComposition, toURL: outputUrl, withVideoComposition: videoComposition)
VideoMaskingUtils.removeTempFileAtPath(outputUrl.absoluteString)
}
Here is my exportCompositedVideo function.
private class func exportCompositedVideo(compiledVideo: AVMutableComposition, toURL outputUrl: NSURL, withVideoComposition videoComposition: AVMutableVideoComposition) {
guard let exporter = AVAssetExportSession(asset: compiledVideo, presetName: AVAssetExportPresetHighestQuality) else { return }
exporter.outputURL = outputUrl
exporter.videoComposition = videoComposition
exporter.outputFileType = AVFileTypeQuickTimeMovie
exporter.shouldOptimizeForNetworkUse = true
exporter.exportAsynchronouslyWithCompletionHandler({
switch exporter.status {
case .Completed:
// we can be confident that there is a URL because
// we got this far. Otherwise it would've failed.
UISaveVideoAtPathToSavedPhotosAlbum(exporter.outputURL!.path!, nil, nil, nil)
print("VideoMaskingUtils.exportVideo SUCCESS!")
if exporter.error != nil {
print("VideoMaskingUtils.exportVideo Error: \(exporter.error)")
print("VideoMaskingUtils.exportVideo Description: \(exporter.description)")
}
NSNotificationCenter.defaultCenter().postNotificationName("videoExportDone", object: exporter.error)
break
case .Exporting:
let progress = exporter.progress
print("VideoMaskingUtils.exportVideo \(progress)")
NSNotificationCenter.defaultCenter().postNotificationName("videoExportProgress", object: progress)
break
case .Failed:
print("VideoMaskingUtils.exportVideo Error: \(exporter.error)")
print("VideoMaskingUtils.exportVideo Description: \(exporter.description)")
NSNotificationCenter.defaultCenter().postNotificationName("videoExportDone", object: exporter.error)
break
default: break
}
})
}
Your min should be max...
Replace this line
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, min(firstAsset.duration, secondAsset.duration))
With this line and it will work :
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, max(firstAsset.duration, secondAsset.duration))

How to flip a video using AVFoundation

I've recorded a video with the front facing camera and the output is mirrored...
I've tried using AVMutablecomposition and layerinstructions to flip the video but no luck.
Googling and searching Stack Overflow has been fruitless so I bet a simple, straight forward example of how to do this is something that would benefit many.
Theres no indication on what you are using to record the video, ill assume AVCaptureSession + AVCaptureVideoDataOutput
lazy var videoFileOutput: AVCaptureVideoDataOutput = AVCaptureVideoDataOutput()
let v = videoFileOutput.connectionWithMediaType(AVMediaTypeVideo)
v.videoOrientation = .Portrait
v.videoMirrored = true
You can use -[AVMutableVideoCompositionLayerInstruction setTransform:atTime:]
CGAffineTransform transform = CGAffineTransformMakeTranslation(self.config.videoSize, 0);
transform = CGAffineTransformScale(transform, -1.0, 1.0);
[videoCompositionLayerInstruction setTransform:transform atTime:videoTime];
// then append video tracks
// [compositionTrack insertTimeRange:timeRange ofTrack:track atTime:atTime error:&error];
// apply instructions
videoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, composition.duration);
videoCompositionInstruction.layerInstructions = #[videoCompositionLayerInstruction];
videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.renderSize = CGSizeMake(self.config.videoSize, self.config.videoSize);
videoComposition.frameDuration = CMTimeMake(1, self.config.videoFrameRate);
videoComposition.instructions = #[videoCompositionInstruction];
https://github.com/ElfSundae/AVDemo/tree/ef2ca437d0d8dcb3dd41c5a272c8754a29d8a936/AVSimpleEditoriOS
Export composition:
AVAssetExportSession *exportSession = [AVAssetExportSession exportSessionWithAsset:composition presetName:presetName];
exportSession.outputFileType = AVFileTypeMPEG4;
exportSession.outputURL = outputURL;
exportSession.shouldOptimizeForNetworkUse = YES;
// videoComposition contains transform instructions for video tracks
exportSession.videoComposition = videoComposition;
// audioMix contains background music for audio tracks
exportSession.audioMix = audioMix;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
AVAssetExportSessionStatus status = exportSession.status;
if (status != AVAssetExportSessionStatusCompleted) {
// exportSession.error
} else {
// exportSession.outputURL
}
}];
After you get your output transform your video
func mirrorVideo(inputURL: URL, completion: #escaping (_ outputURL : URL?) -> ())
{
let videoAsset: AVAsset = AVAsset( url: inputURL )
let clipVideoTrack = videoAsset.tracks( withMediaType: AVMediaType.video ).first! as AVAssetTrack
let composition = AVMutableComposition()
composition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: CMPersistentTrackID())
let videoComposition = AVMutableVideoComposition()
videoComposition.renderSize = CGSize(width: clipVideoTrack.naturalSize.height, height: clipVideoTrack.naturalSize.width)
videoComposition.frameDuration = CMTimeMake(1, 30)
let transformer = AVMutableVideoCompositionLayerInstruction(assetTrack: clipVideoTrack)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(60, 30))
var transform:CGAffineTransform = CGAffineTransform(scaleX: -1.0, y: 1.0)
transform = transform.translatedBy(x: -clipVideoTrack.naturalSize.width, y: 0.0)
transform = transform.rotated(by: CGFloat(Double.pi/2))
transform = transform.translatedBy(x: 0.0, y: -clipVideoTrack.naturalSize.width)
transformer.setTransform(transform, at: kCMTimeZero)
instruction.layerInstructions = [transformer]
videoComposition.instructions = [instruction]
// Export
let exportSession = AVAssetExportSession(asset: videoAsset, presetName: AVAssetExportPreset640x480)!
let fileName = UniqueIDGenerator.generate().appending(".mp4")
let filePath = documentsURL.appendingPathComponent(fileName)
let croppedOutputFileUrl = filePath
exportSession.outputURL = croppedOutputFileUrl
exportSession.outputFileType = AVFileType.mp4
exportSession.videoComposition = videoComposition
exportSession.exportAsynchronously {
if exportSession.status == .completed {
DispatchQueue.main.async(execute: {
completion(croppedOutputFileUrl)
})
return
} else if exportSession.status == .failed {
print("Export failed - \(String(describing: exportSession.error))")
}
completion(nil)
return
}
}
Swift 5. AVCaptureSession:
let movieFileOutput = AVCaptureMovieFileOutput()
let connection = movieFileOutput.connection(with: .video)
if connection?.isVideoMirroringSupported ?? false {
connection?.isVideoMirrored = true
}
Same for PhotoOutput.

Swift Merge audio and video files into one video

I wrote a program in Swift.I want to merge a video with an audio file, but got this error.
"failed Error Domain=AVFoundationErrorDomain Code=-11838 "Operation Stopped" UserInfo=0x17da4230 {NSLocalizedDescription=Operation Stopped, NSLocalizedFailureReason=The operation is not supported for this media.}"
code
func mergeAudio(audioURL: NSURL, moviePathUrl: NSURL, savePathUrl: NSURL) {
var composition = AVMutableComposition()
let trackVideo:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
let trackAudio:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
let option = NSDictionary(object: true, forKey: "AVURLAssetPreferPreciseDurationAndTimingKey")
let sourceAsset = AVURLAsset(URL: moviePathUrl, options: option as [NSObject : AnyObject])
let audioAsset = AVURLAsset(URL: audioURL, options: option as [NSObject : AnyObject])
let tracks = sourceAsset.tracksWithMediaType(AVMediaTypeVideo)
let audios = audioAsset.tracksWithMediaType(AVMediaTypeAudio)
if tracks.count > 0 {
let assetTrack:AVAssetTrack = tracks[0] as! AVAssetTrack
let assetTrackAudio:AVAssetTrack = audios[0] as! AVAssetTrack
let audioDuration:CMTime = assetTrackAudio.timeRange.duration
let audioSeconds:Float64 = CMTimeGetSeconds(assetTrackAudio.timeRange.duration)
trackVideo.insertTimeRange(CMTimeRangeMake(kCMTimeZero,audioDuration), ofTrack: assetTrack, atTime: kCMTimeZero, error: nil)
trackAudio.insertTimeRange(CMTimeRangeMake(kCMTimeZero,audioDuration), ofTrack: assetTrackAudio, atTime: kCMTimeZero, error: nil)
}
var assetExport: AVAssetExportSession = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetPassthrough)
assetExport.outputFileType = AVFileTypeMPEG4
assetExport.outputURL = savePathUrl
self.tmpMovieURL = savePathUrl
assetExport.shouldOptimizeForNetworkUse = true
assetExport.exportAsynchronouslyWithCompletionHandler { () -> Void in
switch assetExport.status {
case AVAssetExportSessionStatus.Completed:
let assetsLib = ALAssetsLibrary()
assetsLib.writeVideoAtPathToSavedPhotosAlbum(savePathUrl, completionBlock: nil)
println("success")
case AVAssetExportSessionStatus.Failed:
println("failed \(assetExport.error)")
case AVAssetExportSessionStatus.Cancelled:
println("cancelled \(assetExport.error)")
default:
println("complete")
}
}
}
In my idea media type like mpeg4 is wrong.
Where is the problem? What am i missing?
Improved code (of Govind's answer) with some additional features:
Merge audio of the video + external audio (the initial answer was dropping the sound of the video)
Flip video horizontally if needed (I personally use it when user captures using frontal camera, btw instagram flips it too)
Apply preferredTransform correctly which solves the issue when video was saved rotated (video is external: captured by other device/generated by other app)
Removed some unused code with VideoComposition.
Added a completion handler to the method so that it can be called from a different class.
Update to Swift 4.
Step 1.
import UIKit
import AVFoundation
import AVKit
import AssetsLibrary
Step 2.
/// Merges video and sound while keeping sound of the video too
///
/// - Parameters:
/// - videoUrl: URL to video file
/// - audioUrl: URL to audio file
/// - shouldFlipHorizontally: pass True if video was recorded using frontal camera otherwise pass False
/// - completion: completion of saving: error or url with final video
func mergeVideoAndAudio(videoUrl: URL,
audioUrl: URL,
shouldFlipHorizontally: Bool = false,
completion: #escaping (_ error: Error?, _ url: URL?) -> Void) {
let mixComposition = AVMutableComposition()
var mutableCompositionVideoTrack = [AVMutableCompositionTrack]()
var mutableCompositionAudioTrack = [AVMutableCompositionTrack]()
var mutableCompositionAudioOfVideoTrack = [AVMutableCompositionTrack]()
//start merge
let aVideoAsset = AVAsset(url: videoUrl)
let aAudioAsset = AVAsset(url: audioUrl)
let compositionAddVideo = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo,
preferredTrackID: kCMPersistentTrackID_Invalid)
let compositionAddAudio = mixComposition.addMutableTrack(withMediaType: AVMediaTypeAudio,
preferredTrackID: kCMPersistentTrackID_Invalid)
let compositionAddAudioOfVideo = mixComposition.addMutableTrack(withMediaType: AVMediaTypeAudio,
preferredTrackID: kCMPersistentTrackID_Invalid)
let aVideoAssetTrack: AVAssetTrack = aVideoAsset.tracks(withMediaType: AVMediaTypeVideo)[0]
let aAudioOfVideoAssetTrack: AVAssetTrack? = aVideoAsset.tracks(withMediaType: AVMediaTypeAudio).first
let aAudioAssetTrack: AVAssetTrack = aAudioAsset.tracks(withMediaType: AVMediaTypeAudio)[0]
// Default must have tranformation
compositionAddVideo.preferredTransform = aVideoAssetTrack.preferredTransform
if shouldFlipHorizontally {
// Flip video horizontally
var frontalTransform: CGAffineTransform = CGAffineTransform(scaleX: -1.0, y: 1.0)
frontalTransform = frontalTransform.translatedBy(x: -aVideoAssetTrack.naturalSize.width, y: 0.0)
frontalTransform = frontalTransform.translatedBy(x: 0.0, y: -aVideoAssetTrack.naturalSize.width)
compositionAddVideo.preferredTransform = frontalTransform
}
mutableCompositionVideoTrack.append(compositionAddVideo)
mutableCompositionAudioTrack.append(compositionAddAudio)
mutableCompositionAudioOfVideoTrack.append(compositionAddAudioOfVideo)
do {
try mutableCompositionVideoTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero,
aVideoAssetTrack.timeRange.duration),
of: aVideoAssetTrack,
at: kCMTimeZero)
//In my case my audio file is longer then video file so i took videoAsset duration
//instead of audioAsset duration
try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero,
aVideoAssetTrack.timeRange.duration),
of: aAudioAssetTrack,
at: kCMTimeZero)
// adding audio (of the video if exists) asset to the final composition
if let aAudioOfVideoAssetTrack = aAudioOfVideoAssetTrack {
try mutableCompositionAudioOfVideoTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero,
aVideoAssetTrack.timeRange.duration),
of: aAudioOfVideoAssetTrack,
at: kCMTimeZero)
}
} catch {
print(error.localizedDescription)
}
// Exporting
let savePathUrl: URL = URL(fileURLWithPath: NSHomeDirectory() + "/Documents/newVideo.mp4")
do { // delete old video
try FileManager.default.removeItem(at: savePathUrl)
} catch { print(error.localizedDescription) }
let assetExport: AVAssetExportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)!
assetExport.outputFileType = AVFileTypeMPEG4
assetExport.outputURL = savePathUrl
assetExport.shouldOptimizeForNetworkUse = true
assetExport.exportAsynchronously { () -> Void in
switch assetExport.status {
case AVAssetExportSessionStatus.completed:
print("success")
completion(nil, savePathUrl)
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport.error?.localizedDescription ?? "error nil")")
completion(assetExport.error, nil)
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport.error?.localizedDescription ?? "error nil")")
completion(assetExport.error, nil)
default:
print("complete")
completion(assetExport.error, nil)
}
}
}
Again thanks to #Govind's answer! It helped me a lot!
Hope this update helps someone too:)
In Above question same error I found due to wrong savePathUrl, destination URL should be like below code including new video name.
I was looking for the code to Merge audio and video files into one video but couldn't find anywhere so after spending hours while reading apple docs I wrote this code.
NOTE : This is tested and 100% working code for me.
Stap : 1
Import this modules in your viewController.
import UIKit
import AVFoundation
import AVKit
import AssetsLibrary
step 2:
Add this function in your code
func mergeFilesWithUrl(videoUrl:NSURL, audioUrl:NSURL)
{
let mixComposition : AVMutableComposition = AVMutableComposition()
var mutableCompositionVideoTrack : [AVMutableCompositionTrack] = []
var mutableCompositionAudioTrack : [AVMutableCompositionTrack] = []
let totalVideoCompositionInstruction : AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()
//start merge
let aVideoAsset : AVAsset = AVAsset(URL: videoUrl)
let aAudioAsset : AVAsset = AVAsset(URL: audioUrl)
mutableCompositionVideoTrack.append(mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid))
mutableCompositionAudioTrack.append( mixComposition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid))
let aVideoAssetTrack : AVAssetTrack = aVideoAsset.tracksWithMediaType(AVMediaTypeVideo)[0]
let aAudioAssetTrack : AVAssetTrack = aAudioAsset.tracksWithMediaType(AVMediaTypeAudio)[0]
do{
try mutableCompositionVideoTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), ofTrack: aVideoAssetTrack, atTime: kCMTimeZero)
//In my case my audio file is longer then video file so i took videoAsset duration
//instead of audioAsset duration
try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), ofTrack: aAudioAssetTrack, atTime: kCMTimeZero)
//Use this instead above line if your audiofile and video file's playing durations are same
// try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), ofTrack: aAudioAssetTrack, atTime: kCMTimeZero)
}catch{
}
totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero,aVideoAssetTrack.timeRange.duration )
let mutableVideoComposition : AVMutableVideoComposition = AVMutableVideoComposition()
mutableVideoComposition.frameDuration = CMTimeMake(1, 30)
mutableVideoComposition.renderSize = CGSizeMake(1280,720)
// playerItem = AVPlayerItem(asset: mixComposition)
// player = AVPlayer(playerItem: playerItem!)
//
//
// AVPlayerVC.player = player
//find your video on this URl
let savePathUrl : NSURL = NSURL(fileURLWithPath: NSHomeDirectory() + "/Documents/newVideo.mp4")
let assetExport: AVAssetExportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)!
assetExport.outputFileType = AVFileTypeMPEG4
assetExport.outputURL = savePathUrl
assetExport.shouldOptimizeForNetworkUse = true
assetExport.exportAsynchronouslyWithCompletionHandler { () -> Void in
switch assetExport.status {
case AVAssetExportSessionStatus.Completed:
//Uncomment this if u want to store your video in asset
//let assetsLib = ALAssetsLibrary()
//assetsLib.writeVideoAtPathToSavedPhotosAlbum(savePathUrl, completionBlock: nil)
print("success")
case AVAssetExportSessionStatus.Failed:
print("failed \(assetExport.error)")
case AVAssetExportSessionStatus.Cancelled:
print("cancelled \(assetExport.error)")
default:
print("complete")
}
}
}
Step 3:
Call function where u want like this
let videoUrl : NSURL = NSURL(fileURLWithPath: NSBundle.mainBundle().pathForResource("SampleVideo", ofType: "mp4")!)
let audioUrl : NSURL = NSURL(fileURLWithPath: NSBundle.mainBundle().pathForResource("SampleAudio", ofType: "mp3")!)
mergeFilesWithUrl(videoUrl, audioUrl: audioUrl)
hope this will help you and will save your time.
Swift 4.2 / 5
func mergeVideoWithAudio(videoUrl: URL, audioUrl: URL, success: #escaping ((URL) -> Void), failure: #escaping ((Error?) -> Void)) {
let mixComposition: AVMutableComposition = AVMutableComposition()
var mutableCompositionVideoTrack: [AVMutableCompositionTrack] = []
var mutableCompositionAudioTrack: [AVMutableCompositionTrack] = []
let totalVideoCompositionInstruction : AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()
let aVideoAsset: AVAsset = AVAsset(url: videoUrl)
let aAudioAsset: AVAsset = AVAsset(url: audioUrl)
if let videoTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid), let audioTrack = mixComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid) {
mutableCompositionVideoTrack.append(videoTrack)
mutableCompositionAudioTrack.append(audioTrack)
if let aVideoAssetTrack: AVAssetTrack = aVideoAsset.tracks(withMediaType: .video).first, let aAudioAssetTrack: AVAssetTrack = aAudioAsset.tracks(withMediaType: .audio).first {
do {
try mutableCompositionVideoTrack.first?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: aVideoAssetTrack.timeRange.duration), of: aVideoAssetTrack, at: CMTime.zero)
try mutableCompositionAudioTrack.first?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: aVideoAssetTrack.timeRange.duration), of: aAudioAssetTrack, at: CMTime.zero)
videoTrack.preferredTransform = aVideoAssetTrack.preferredTransform
} catch{
print(error)
}
totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(start: CMTime.zero,duration: aVideoAssetTrack.timeRange.duration)
}
}
let mutableVideoComposition: AVMutableVideoComposition = AVMutableVideoComposition()
mutableVideoComposition.frameDuration = CMTimeMake(value: 1, timescale: 30)
mutableVideoComposition.renderSize = CGSize(width: 480, height: 640)
if let documentsPath = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first {
let outputURL = URL(fileURLWithPath: documentsPath).appendingPathComponent("\("fileName").m4v")
do {
if FileManager.default.fileExists(atPath: outputURL.path) {
try FileManager.default.removeItem(at: outputURL)
}
} catch { }
if let exportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) {
exportSession.outputURL = outputURL
exportSession.outputFileType = AVFileType.mp4
exportSession.shouldOptimizeForNetworkUse = true
/// try to export the file and handle the status cases
exportSession.exportAsynchronously(completionHandler: {
switch exportSession.status {
case .failed:
if let _error = exportSession.error {
failure(_error)
}
case .cancelled:
if let _error = exportSession.error {
failure(_error)
}
default:
print("finished")
success(outputURL)
}
})
} else {
failure(nil)
}
}
}
Version Swift3 with URL and new syntax.
func mergeFilesWithUrl(videoUrl:URL, audioUrl:URL)
{
let mixComposition : AVMutableComposition = AVMutableComposition()
var mutableCompositionVideoTrack : [AVMutableCompositionTrack] = []
var mutableCompositionAudioTrack : [AVMutableCompositionTrack] = []
let totalVideoCompositionInstruction : AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()
//start merge
let aVideoAsset : AVAsset = AVAsset(url: videoUrl)
let aAudioAsset : AVAsset = AVAsset(url: audioUrl)
mutableCompositionVideoTrack.append(mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid))
mutableCompositionAudioTrack.append( mixComposition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid))
let aVideoAssetTrack : AVAssetTrack = aVideoAsset.tracks(withMediaType: AVMediaTypeVideo)[0]
let aAudioAssetTrack : AVAssetTrack = aAudioAsset.tracks(withMediaType: AVMediaTypeAudio)[0]
do{
try mutableCompositionVideoTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), of: aVideoAssetTrack, at: kCMTimeZero)
//In my case my audio file is longer then video file so i took videoAsset duration
//instead of audioAsset duration
try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), of: aAudioAssetTrack, at: kCMTimeZero)
//Use this instead above line if your audiofile and video file's playing durations are same
// try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), ofTrack: aAudioAssetTrack, atTime: kCMTimeZero)
}catch{
}
totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero,aVideoAssetTrack.timeRange.duration )
let mutableVideoComposition : AVMutableVideoComposition = AVMutableVideoComposition()
mutableVideoComposition.frameDuration = CMTimeMake(1, 30)
mutableVideoComposition.renderSize = CGSize(width: 1280, height: 720)
// playerItem = AVPlayerItem(asset: mixComposition)
// player = AVPlayer(playerItem: playerItem!)
//
//
// AVPlayerVC.player = player
//find your video on this URl
let savePathUrl : URL = URL(fileURLWithPath: NSHomeDirectory() + "/Documents/newVideo.mp4")
let assetExport: AVAssetExportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)!
assetExport.outputFileType = AVFileTypeMPEG4
assetExport.outputURL = savePathUrl
assetExport.shouldOptimizeForNetworkUse = true
assetExport.exportAsynchronously { () -> Void in
switch assetExport.status {
case AVAssetExportSessionStatus.completed:
//Uncomment this if u want to store your video in asset
//let assetsLib = ALAssetsLibrary()
//assetsLib.writeVideoAtPathToSavedPhotosAlbum(savePathUrl, completionBlock: nil)
print("success")
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport.error)")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport.error)")
default:
print("complete")
}
}
}
Swift 5 version (Also repeats audio if video is larger than audio) : Just pass audio and video URLs. I have tried this with local video and remote audio url.
func mergeVideoWithAudio(videoUrl: URL,
audioUrl: URL,
success: #escaping ((URL) -> Void),
failure: #escaping ((Error?) -> Void)) {
let mixComposition: AVMutableComposition = AVMutableComposition()
var mutableCompositionVideoTrack: [AVMutableCompositionTrack] = []
var mutableCompositionAudioTrack: [AVMutableCompositionTrack] = []
let totalVideoCompositionInstruction: AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()
let aVideoAsset: AVAsset = AVAsset(url: videoUrl)
let aAudioAsset: AVAsset = AVAsset(url: audioUrl)
if let videoTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid), let audioTrack = mixComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid) {
mutableCompositionVideoTrack.append( videoTrack )
mutableCompositionAudioTrack.append( audioTrack )
if let aVideoAssetTrack: AVAssetTrack = aVideoAsset.tracks(withMediaType: .video).first, let aAudioAssetTrack: AVAssetTrack = aAudioAsset.tracks(withMediaType: .audio).first {
do {
try mutableCompositionVideoTrack.first?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: aVideoAssetTrack.timeRange.duration), of: aVideoAssetTrack, at: CMTime.zero)
let videoDuration = aVideoAsset.duration
if CMTimeCompare(videoDuration, aAudioAsset.duration) == -1 {
try mutableCompositionAudioTrack.first?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: aVideoAssetTrack.timeRange.duration), of: aAudioAssetTrack, at: CMTime.zero)
} else if CMTimeCompare(videoDuration, aAudioAsset.duration) == 1 {
var currentTime = CMTime.zero
while true {
var audioDuration = aAudioAsset.duration
let totalDuration = CMTimeAdd(currentTime, audioDuration)
if CMTimeCompare(totalDuration, videoDuration) == 1 {
audioDuration = CMTimeSubtract(totalDuration, videoDuration)
}
try mutableCompositionAudioTrack.first?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: aVideoAssetTrack.timeRange.duration), of: aAudioAssetTrack, at: currentTime)
currentTime = CMTimeAdd(currentTime, audioDuration)
if CMTimeCompare(currentTime, videoDuration) == 1 || CMTimeCompare(currentTime, videoDuration) == 0 {
break
}
}
}
videoTrack.preferredTransform = aVideoAssetTrack.preferredTransform
} catch {
print(error)
}
totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(start: CMTime.zero, duration: aVideoAssetTrack.timeRange.duration)
}
}
let mutableVideoComposition: AVMutableVideoComposition = AVMutableVideoComposition()
mutableVideoComposition.frameDuration = CMTimeMake(value: 1, timescale: 30)
mutableVideoComposition.renderSize = CGSize(width: 480, height: 640)
if let documentsPath = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first {
let outputURL = URL(fileURLWithPath: documentsPath).appendingPathComponent("\("fileName").m4v")
do {
if FileManager.default.fileExists(atPath: outputURL.path) {
try FileManager.default.removeItem(at: outputURL)
}
} catch { }
if let exportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) {
exportSession.outputURL = outputURL
exportSession.outputFileType = AVFileType.mp4
exportSession.shouldOptimizeForNetworkUse = true
// try to export the file and handle the status cases
exportSession.exportAsynchronously(completionHandler: {
switch exportSession.status {
case .failed:
if let error = exportSession.error {
failure(error)
}
case .cancelled:
if let error = exportSession.error {
failure(error)
}
default:
print("finished")
success(outputURL)
}
})
} else {
failure(nil)
}
}
}
Updated for Swift Concurrency (Swift 5.7)
Some errors to throw:
enum VideoAudioMergeError: Error {
case compositionAddVideoFailed, compositionAddAudioFailed, compositionAddAudioOfVideoFailed, unknownError
}
And the method:
/// Merges video and sound while keeping sound of the video too
///
/// - Parameters:
/// - videoUrl: URL to video file
/// - audioUrl: URL to audio file
/// - shouldFlipHorizontally: pass True if video was recorded using frontal camera otherwise pass False
func mergeVideoAndAudio(videoUrl: URL,
audioUrl: URL,
shouldFlipHorizontally: Bool = false) async throws -> URL {
let mixComposition = AVMutableComposition()
var mutableCompositionVideoTrack = [AVMutableCompositionTrack]()
var mutableCompositionAudioTrack = [AVMutableCompositionTrack]()
var mutableCompositionAudioOfVideoTrack = [AVMutableCompositionTrack]()
//start merge
let aVideoAsset = AVAsset(url: videoUrl)
let aAudioAsset = AVAsset(url: audioUrl)
guard let compositionAddVideo = mixComposition.addMutableTrack(withMediaType: AVMediaType.video,
preferredTrackID: kCMPersistentTrackID_Invalid) else {
throw VideoAudioMergeError.compositionAddVideoFailed
}
guard let compositionAddAudio = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio,
preferredTrackID: kCMPersistentTrackID_Invalid) else {
throw VideoAudioMergeError.compositionAddAudioFailed
}
guard let compositionAddAudioOfVideo = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio,
preferredTrackID: kCMPersistentTrackID_Invalid) else {
throw VideoAudioMergeError.compositionAddAudioOfVideoFailed
}
do {
let aVideoAssetTrack: AVAssetTrack = try await aVideoAsset.loadTracks(withMediaType: AVMediaType.video)[0]
let aAudioOfVideoAssetTrack: AVAssetTrack? = try await aVideoAsset.loadTracks(withMediaType: AVMediaType.audio).first
let aAudioAssetTrack: AVAssetTrack = try await aAudioAsset.loadTracks(withMediaType: AVMediaType.audio)[0]
// Default must have transformation
compositionAddVideo.preferredTransform = try await aVideoAssetTrack.load(.preferredTransform)
if shouldFlipHorizontally {
// Flip video horizontally
var frontalTransform: CGAffineTransform = CGAffineTransform(scaleX: -1.0, y: 1.0)
let naturalSize = try await aVideoAssetTrack.load(.naturalSize)
frontalTransform = frontalTransform.translatedBy(x: -naturalSize.width, y: 0.0)
frontalTransform = frontalTransform.translatedBy(x: 0.0, y: -naturalSize.width)
compositionAddVideo.preferredTransform = frontalTransform
}
mutableCompositionVideoTrack.append(compositionAddVideo)
mutableCompositionAudioTrack.append(compositionAddAudio)
mutableCompositionAudioOfVideoTrack.append(compositionAddAudioOfVideo)
let videoTimeRange = try await aVideoAssetTrack.load(.timeRange)
try mutableCompositionVideoTrack[0].insertTimeRange(CMTimeRangeMake(start: CMTime.zero,
duration: videoTimeRange.duration),
of: aVideoAssetTrack,
at: CMTime.zero)
//In my case my audio file is longer then video file so i took videoAsset duration
//instead of audioAsset duration
try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(start: CMTime.zero,
duration: videoTimeRange.duration),
of: aAudioAssetTrack,
at: CMTime.zero)
// adding audio (of the video if exists) asset to the final composition
if let aAudioOfVideoAssetTrack = aAudioOfVideoAssetTrack {
try mutableCompositionAudioOfVideoTrack[0].insertTimeRange(CMTimeRangeMake(start: CMTime.zero,
duration: videoTimeRange.duration),
of: aAudioOfVideoAssetTrack,
at: CMTime.zero)
}
} catch {
throw error
}
// Exporting
let savePathUrl: URL = URL(fileURLWithPath: NSHomeDirectory() + "/Documents/newVideo.mp4")
do { // delete old video
try FileManager.default.removeItem(at: savePathUrl)
} catch { print(error.localizedDescription) }
let assetExport: AVAssetExportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)!
assetExport.outputFileType = AVFileType.mp4
assetExport.outputURL = savePathUrl
assetExport.shouldOptimizeForNetworkUse = true
await assetExport.export()
if assetExport.status == .completed {
return savePathUrl
}
if let error = assetExport.error {
throw error
} else {
throw VideoAudioMergeError.unknownError
}
}

Merging two recorded videos gives opposite orientation output

I am trying to merge two recorded videos with this code:
func exportVideo3() {
transprentView.hidden = false
activityIndicator.startAnimating()
let composition = AVMutableComposition()
let trackVideo = composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
let trackAudio = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
var insertTime = kCMTimeZero
let path = NSFileManager.defaultManager().URLForDirectory(.DocumentDirectory, inDomain: .UserDomainMask, appropriateForURL: nil, create: false, error: nil)!.path!
for index in 1...2 {
let movieUrl = index == 1 ? video1URL : video2URL
let sourceAsset = AVURLAsset(URL: movieUrl, options: nil)
let tracks = sourceAsset.tracksWithMediaType(AVMediaTypeVideo)
let audios = sourceAsset.tracksWithMediaType(AVMediaTypeAudio)
if tracks.count > 0{
let assetTrack:AVAssetTrack = tracks[0] as! AVAssetTrack
trackVideo.insertTimeRange(CMTimeRangeMake(kCMTimeZero,sourceAsset.duration), ofTrack: assetTrack, atTime: insertTime, error: nil)
let assetTrackAudio:AVAssetTrack = audios[0] as! AVAssetTrack
trackAudio.insertTimeRange(CMTimeRangeMake(kCMTimeZero,sourceAsset.duration), ofTrack: assetTrackAudio, atTime: insertTime, error: nil)
insertTime = CMTimeAdd(insertTime, sourceAsset.duration)
}
}
let exporter = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality)
exporter.outputURL = documentsDirectoryURL.URLByAppendingPathComponent("mergedMovie.mp4")
if NSFileManager.defaultManager().fileExistsAtPath(exporter.outputURL.path!) {
NSFileManager.defaultManager().removeItemAtURL(exporter.outputURL, error: nil)
}
mergedVideoURL = exporter.outputURL
exporter.outputFileType = AVFileTypeMPEG4
exporter.exportAsynchronouslyWithCompletionHandler({
switch exporter.status{
case AVAssetExportSessionStatus.Failed:
println("failed \(exporter.error)")
case AVAssetExportSessionStatus.Cancelled:
println("cancelled \(exporter.error)")
default:
self.transprentView.hidden = true
self.activityIndicator.stopAnimating()
println("complete")
}
})
}
It is working fine but new Video have opposite orientation. i.e when I recored both video in portrait and merge it then the new video plays in landscape mode and when I record both in landscape and merge it then the new video plays in portrait mode.
So how can I solve this issue like when I record video in landscape mode I want output in landscape mode and same as portrait.
HERE is my sample project for more reference.

Resources