I want to rotate my captured video from landscape to portrait, my code is below:
if self.isDeviceRotatedToLandscape
{
let outputPath : NSString = NSString(format: "%#%#", NSTemporaryDirectory(), "output2.mov")
let outputURL : NSURL = NSURL(fileURLWithPath: outputPath as String)!
let fileManager : NSFileManager = NSFileManager.defaultManager()
if(fileManager.fileExistsAtPath(outputPath as String))
{
let asset : AVURLAsset = AVURLAsset(URL: outputURL, options: nil)
if let clipVideoTrack: AVAssetTrack = asset.tracksWithMediaType(AVMediaTypeVideo)[0] as? AVAssetTrack
{
var FirstlayerInstruction:AVMutableVideoCompositionLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: clipVideoTrack)
if let FirstAssetTrack:AVAssetTrack = asset.tracksWithMediaType(AVMediaTypeVideo)[0] as? AVAssetTrack
{
let FirstAssetTrack_ = UIImageOrientation.Up
var isFirstAssetPortrait_ = false
var FirstAssetScaleToFitRatio:CGFloat = 1.0
var FirstAssetScaleFactor:CGAffineTransform = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio, FirstAssetScaleToFitRatio)
FirstlayerInstruction.setTransform(CGAffineTransformConcat(CGAffineTransformConcat(FirstAssetTrack.preferredTransform,FirstAssetScaleFactor), CGAffineTransformMakeTranslation(0, 160)), atTime: kCMTimeZero)
FirstlayerInstruction.setOpacity(0.0, atTime: asset.duration)
var MainInstruction:AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()
MainInstruction.layerInstructions = [FirstlayerInstruction]
var MainCompositionInst: AVMutableVideoComposition = AVMutableVideoComposition()
MainCompositionInst.instructions = [MainInstruction]
MainCompositionInst.frameDuration = CMTimeMake(1, 30)
MainCompositionInst.renderSize = CGSizeMake(612,612)
let exportPath : NSString = NSString(format: "%#%#", NSTemporaryDirectory(), "output1212.mov")
var exportUrl: NSURL = NSURL.fileURLWithPath(exportPath as! String)!
let fileManager : NSFileManager = NSFileManager.defaultManager()
if(fileManager.fileExistsAtPath(exportPath as String))
{
var error:NSError? = nil
if(fileManager.removeItemAtPath(exportPath as String, error: &error))
{
//Error - handle if requried
}
}
var exporter = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetHighestQuality)
exporter.videoComposition = MainCompositionInst
exporter.outputFileType = AVFileTypeQuickTimeMovie
exporter.outputURL = exportUrl
exporter.exportAsynchronouslyWithCompletionHandler({ () -> Void in
print("hi")
UISaveVideoAtPathToSavedPhotosAlbum(exportPath as String, self, nil, nil)
})
}
}
}
}
But video is not even created and save to photo app. I don't no where is mistake. Please help to solve this issue, I am stuck and need to solve this as soon as possible
Actually issue is in CGAffineTransform, need to set it according to landscape left or right and it worked as aspected.
code is below:
if isDeviceRotatedToLandscape
{
let outputPath : NSString = NSString(format: "%#%#", NSTemporaryDirectory(), "output2.mov")
let outputURL : NSURL = NSURL(fileURLWithPath: outputPath as String)!
let fileManager : NSFileManager = NSFileManager.defaultManager()
if(fileManager.fileExistsAtPath(outputPath as String))
{
let asset : AVURLAsset = AVURLAsset(URL: outputURL, options: nil)
if let clipVideoTrack: AVAssetTrack = asset.tracksWithMediaType(AVMediaTypeVideo)[0] as? AVAssetTrack
{
var videoComposition: AVMutableVideoComposition = AVMutableVideoComposition()
videoComposition.frameDuration = CMTimeMake(1, 30)
print(clipVideoTrack.naturalSize.height)
videoComposition.renderSize = CGSizeMake(612,612)
var instruction: AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(60, 30))
var transformer: AVMutableVideoCompositionLayerInstruction =
AVMutableVideoCompositionLayerInstruction(assetTrack: clipVideoTrack)
var t1: CGAffineTransform = CGAffineTransformMakeTranslation(isDeviceLandscapeLeft ? 0 : 612, isDeviceLandscapeLeft ? 612 : 0)
var t2: CGAffineTransform?
t2 = CGAffineTransformRotate(t1, CGFloat(isDeviceLandscapeLeft ? -M_PI_2 : M_PI_2))
var finalTransform: CGAffineTransform = t2!
transformer.setTransform(finalTransform, atTime: kCMTimeZero)
instruction.layerInstructions = NSArray(object: transformer) as [AnyObject]
videoComposition.instructions = NSArray(object: instruction) as [AnyObject]
let exportPath : NSString = NSString(format: "%#%#", NSTemporaryDirectory(), "output22.mov")
var exportUrl: NSURL = NSURL.fileURLWithPath(exportPath as! String)!
if(fileManager.fileExistsAtPath(exportPath as String))
{
var error:NSError? = nil
if(fileManager.removeItemAtPath(exportPath as String, error: &error))
{
//Error - handle if requried
}
}
var exporter = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetHighestQuality)
exporter.videoComposition = videoComposition
exporter.outputFileType = AVFileTypeQuickTimeMovie
exporter.outputURL = exportUrl
exporter.exportAsynchronouslyWithCompletionHandler({ () -> Void in
dispatch_async(dispatch_get_main_queue()) {
() -> Void in
UISaveVideoAtPathToSavedPhotosAlbum(exportPath as String, self, nil, nil)
}
})
}
}
}
Related
I want to merge a same video to one single video together. I searched already in the internet for a possible solution. I implemented some code, but the problem is that it won't merge all the videos. The end result is only the first video with a few seconds. I don't know what Im doing wrong.
This is my code:
var atTimeM: CMTime = CMTimeMake(value: 0, timescale: 0)
var layerInstructionsArray = [AVVideoCompositionLayerInstruction]()
var completeTrackDuration: CMTime = CMTimeMake(value: 0, timescale: 1)
var videoSize: CGSize = CGSize(width: 0.0, height: 0.0)
var totalTime : CMTime = CMTimeMake(value: 0, timescale: 0)
var videoArrayOfAssets = [AVAsset]()
func mergeVideoArray(){
print(videoArrayOfAssets)
let mixComposition = AVMutableComposition()
for videoAsset in videoArrayOfAssets {
let videoTrack =
mixComposition.addMutableTrack(withMediaType: AVMediaType.video,
preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
do {
if videoAsset == videoArrayOfAssets.first {
atTimeM = CMTime.zero
} else {
atTimeM = totalTime // <-- Use the total time for all the videos seen so far.
}
try videoTrack!.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: videoAsset.duration),
of: videoAsset.tracks(withMediaType: AVMediaType.video)[0],
at: atTimeM)
videoSize = videoTrack!.naturalSize
} catch let error as NSError {
print("error: \(error)")
}
completeTrackDuration = CMTimeAdd(completeTrackDuration, videoAsset.duration)
let videoInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack!)
if (videoAsset != videoArrayOfAssets.last) {
videoInstruction.setOpacity(0.0, at: completeTrackDuration)
}
layerInstructionsArray.append(videoInstruction)
}
let mainInstruction = AVMutableVideoCompositionInstruction()
mainInstruction.timeRange = CMTimeRangeMake(start: CMTime.zero, duration: completeTrackDuration)
mainInstruction.layerInstructions = layerInstructionsArray
let mainComposition = AVMutableVideoComposition()
mainComposition.instructions = [mainInstruction]
mainComposition.frameDuration = CMTimeMake(value: 1, timescale: 30)
mainComposition.renderSize = CGSize(width: videoSize.width, height: videoSize.height)
let documentDirectory = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0]
let dateFormatter = DateFormatter()
dateFormatter.dateStyle = .long
dateFormatter.timeStyle = .short
let date = dateFormatter.string(from: NSDate() as Date)
let savePath = (documentDirectory as NSString).appendingPathComponent("mergeVideo-\(date).mov")
let url = NSURL(fileURLWithPath: savePath)
let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)
exporter!.outputURL = url as URL
exporter!.outputFileType = AVFileType.mov
exporter!.shouldOptimizeForNetworkUse = true
exporter!.videoComposition = mainComposition
exporter!.exportAsynchronously {
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: exporter!.outputURL!)
}) { saved, error in
if saved {
print("video saved to camera roll")
}
}
}
}
How I can manage this to work?
You can use the class created by me for similar purpose:
import Foundation
import CoreMedia
import AVFoundation
public protocol MediaItemProtocol {
var duration: CMTime {get}
var asset: AVAsset {get}
func createFileOnDisk(completionHandler: #escaping (_ fileURL: URL?, _ error: Error?) -> Void)
}
final public class VideoMediaItem: MediaItemProtocol {
public private (set) var duration: CMTime
public private (set) var asset: AVAsset
private var videoUrl: URL
public init(url: URL) {
videoUrl = url
let options = [AVURLAssetPreferPreciseDurationAndTimingKey: true]
let asset = AVURLAsset(url: url, options: options)
self.asset = asset
duration = asset.duration
}
public func createFileOnDisk(completionHandler: #escaping (_ fileURL: URL?, _ error: Error?) -> Void) {
completionHandler(videoUrl, nil)
}
}
final class VideoCompositionMediaItem: MediaItemProtocol {
private (set) var duration: CMTime = kCMTimeZero
private (set) var asset: AVAsset
private var itemList: [MediaItemProtocol]
private var exporter: AVAssetExportSession?
init(item: [MediaItemProtocol]) {
self.itemList = item
let composition = AVMutableComposition()
for mediaItem in item {
if CMTIME_IS_VALID(mediaItem.duration) {
let itemDuration = mediaItem.duration
let range = CMTimeRangeMake(kCMTimeZero, itemDuration)
try? composition.insertTimeRange(range, of: mediaItem.asset, at: duration)
duration = CMTimeAdd(duration, itemDuration)
}
}
if let track = item.first?.asset.tracks(withMediaType: .video).first {
if let compositionTrack = composition.tracks(withMediaType: .video).first {
compositionTrack.preferredTransform = track.preferredTransform
}
}
asset = composition
}
func createFileOnDisk(completionHandler: #escaping (_ fileURL: URL?, _ error: Error?) -> Void) {
if self.exporter != nil {
completionHandler(nil, MediaItemError.AlreadyExporting)
return
}
exporter = AVAssetExportSession(asset: asset, presetName: AVAssetExportPreset1280x720)
guard let exporter = exporter else {
completionHandler(nil, MediaItemError.UnknownError)
return
}
let fileURL = URL(fileURLWithPath: NSTemporaryDirectory())
.appendingPathComponent("Composition" + NSUUID().uuidString)
.appendingPathExtension("mov")
exporter.outputURL = fileURL
exporter.outputFileType = AVFileType.mov
exporter.shouldOptimizeForNetworkUse = true
exporter.timeRange = CMTimeRangeMake(kCMTimeZero, duration)
exporter.exportAsynchronously {
if self.exporter?.status == .completed {
self.exporter = nil
completionHandler(fileURL, nil)
} else {
self.exporter = nil
completionHandler(nil, MediaItemError.ErrorExporting)
}
}
}
}
Example
Hi,
Struggling to rotate this video to show in the proper orientation and fill the entire screen.
I cannot get the avasset with videocompisition but cannot get it to work correctly.
let videoAsset: AVAsset = AVAsset(URL: outputFileURL) as AVAsset
let clipVideoTrack = videoAsset.tracksWithMediaType(AVMediaTypeVideo).first! as AVAssetTrack
let newHeight = CGFloat(clipVideoTrack.naturalSize.height/3*4)
let composition = AVMutableComposition()
composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
let videoComposition = AVMutableVideoComposition()
var videoSize = CGSize()
videoSize = clipVideoTrack.naturalSize
videoComposition.renderSize = videoSize
videoComposition.frameDuration = CMTimeMake(1, 30)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(180, 30))
// rotate to portrait
let transformer:AVMutableVideoCompositionLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: clipVideoTrack)
let t1 = CGAffineTransformMakeTranslation(0, 0);
let t2 = CGAffineTransformRotate(t1, CGFloat(M_PI_2));
transformer.setTransform(t2, atTime: kCMTimeZero)
instruction.layerInstructions = [transformer]
videoComposition.instructions = [instruction]
let formatter = NSDateFormatter()
formatter.dateFormat = "yyyy'-'MM'-'dd'T'HH':'mm':'ss'Z'"
let date = NSDate()
let documentsPath = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0] as NSString
let outputPath = "\(documentsPath)/\(formatter.stringFromDate(date)).mp4"
let outputURL = NSURL(fileURLWithPath: outputPath)
let exporter = AVAssetExportSession(asset: videoAsset, presetName: AVAssetExportPresetHighestQuality)!
exporter.videoComposition = videoComposition
exporter.outputURL = outputURL
exporter.outputFileType = AVFileTypeQuickTimeMovie
exporter.exportAsynchronouslyWithCompletionHandler({ () -> Void in
dispatch_async(dispatch_get_main_queue(), {
self.handleExportCompletion(exporter)
})
})
Solved the rotation converting from the code below:
AVMutableVideoComposition rotated video captured in portrait mode
Now having issues with exporting in question below if anyone knows:
https://stackoverflow.com/questions/35233766/avasset-failing-to-export
I've created this function which get a video which is captured in portraitmode. however when i save the avassetexport and show it, it seems like it identify it as landscape, how can i make sure to create this as a portrait video??
func createVideo() -> AVAssetExportSession {
let documentsPath = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0] as NSString
let fileURL = NSURL(fileURLWithPath: "\(documentsPath)/pre.mov")
let composition = AVMutableComposition()
let vidAsset = AVURLAsset(URL: fileURL, options: nil)
// get video track
let vtrack = vidAsset.tracksWithMediaType(AVMediaTypeVideo)
let videoTrack:AVAssetTrack = vtrack[0]
let vid_timerange = CMTimeRangeMake(kCMTimeZero, vidAsset.duration)
do {
let compositionvideoTrack:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
try compositionvideoTrack.insertTimeRange(vid_timerange, ofTrack: videoTrack, atTime: kCMTimeZero)
compositionvideoTrack.preferredTransform = videoTrack.preferredTransform
} catch {
print(error)
}
//Get the video
let fullSizeImage = videoTrack
print(fullSizeImage.naturalSize)
let newOverLayHeight = fullSizeImage.naturalSize.width / self.containerView!.frame.width * self.containerView!.frame.height
UIGraphicsBeginImageContext(CGSizeMake(fullSizeImage.naturalSize.width, newOverLayHeight));
self.containerView!.drawViewHierarchyInRect(CGRectMake(0, 0, fullSizeImage.naturalSize.width, newOverLayHeight), afterScreenUpdates: true)
let overlayImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
let imglogo = UIImage(named: "image.png")
let imglayer = CALayer()
imglayer.contents = imglogo?.CGImage
imglayer.frame = CGRectMake(0,fullSizeImage.naturalSize.height - newOverLayHeight, overlayImage.size.width, overlayImage.size.height)
let videolayer = CALayer()
videolayer.frame = CGRectMake(0, 0, fullSizeImage.naturalSize.width, fullSizeImage.naturalSize.height)
let parentlayer = CALayer()
parentlayer.frame = CGRectMake(0, 0, fullSizeImage.naturalSize.width, fullSizeImage.naturalSize.height)
parentlayer.addSublayer(videolayer)
parentlayer.addSublayer(imglayer)
let layercomposition = AVMutableVideoComposition()
layercomposition.frameDuration = CMTimeMake(1, 30)
layercomposition.renderSize = fullSizeImage.naturalSize
layercomposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videolayer, inLayer: parentlayer)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, composition.duration)
let videotrack = composition.tracksWithMediaType(AVMediaTypeVideo)[0] as AVAssetTrack
let layerinstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videotrack)
instruction.layerInstructions = NSArray(object: layerinstruction) as! [AVVideoCompositionLayerInstruction]
layercomposition.instructions = NSArray(object: instruction) as! [AVVideoCompositionInstructionProtocol]
// create new file to receive data
let docsDir: AnyObject = documentsPath
let movieFilePath = docsDir.stringByAppendingPathComponent("result.mov")
let movieDestinationUrl = NSURL(fileURLWithPath: movieFilePath)
_ = try? NSFileManager().removeItemAtURL(movieDestinationUrl)
let preFilePath = docsDir.stringByAppendingPathComponent("pre.mov")
let preDestinationUrl = NSURL(fileURLWithPath: preFilePath)
_ = try? NSFileManager().removeItemAtURL(preDestinationUrl)
// use AVAssetExportSession to export video
let assetExport = AVAssetExportSession(asset: composition, presetName:AVAssetExportPresetHighestQuality)
assetExport!.outputFileType = AVFileTypeQuickTimeMovie
assetExport!.outputURL = movieDestinationUrl
assetExport!.videoComposition = layercomposition
self.movieUrl = movieFilePath
return assetExport!
}
I have followed this link for cropping and resizing video:
Swift: Crop and Export Video
I want to crop and resize video to 612*612.
My code is given below:
let outputPath : NSString = NSString(format: "%#%#", NSTemporaryDirectory(), "output.mov")
let outputURL : NSURL = NSURL(fileURLWithPath: outputPath as String)!
let fileManager : NSFileManager = NSFileManager.defaultManager()
if(fileManager.fileExistsAtPath(outputPath as String))
{
let asset : AVURLAsset = AVURLAsset(URL: outputURL, options: nil)
if let clipVideoTrack: AVAssetTrack = asset.tracksWithMediaType(AVMediaTypeVideo)[0] as? AVAssetTrack
{
var videoComposition: AVMutableVideoComposition = AVMutableVideoComposition()
videoComposition.frameDuration = CMTimeMake(1, 60)
print(clipVideoTrack.naturalSize.height)
videoComposition.renderSize = CGSizeMake(612,612)
var instruction: AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(60, 30))
var transformer: AVMutableVideoCompositionLayerInstruction =
AVMutableVideoCompositionLayerInstruction(assetTrack: clipVideoTrack)
var t1: CGAffineTransform = CGAffineTransformMakeTranslation(clipVideoTrack.naturalSize.height, 0)
var t2: CGAffineTransform = CGAffineTransformRotate(t1, CGFloat(M_PI_2))
var finalTransform: CGAffineTransform = t2
transformer.setTransform(finalTransform, atTime: kCMTimeZero)
instruction.layerInstructions = NSArray(object: transformer) as [AnyObject]
videoComposition.instructions = NSArray(object: instruction) as [AnyObject]
let exportPath : NSString = NSString(format: "%#%#", NSTemporaryDirectory(), "output2.mov")
var exportUrl: NSURL = NSURL.fileURLWithPath(exportPath as! String)!
if(fileManager.fileExistsAtPath(exportPath as String))
{
var error:NSError? = nil
if(fileManager.removeItemAtPath(exportPath as String, error: &error))
{
//Error - handle if requried
}
}
var exporter = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetHighestQuality)
exporter.videoComposition = videoComposition
exporter.outputFileType = AVFileTypeQuickTimeMovie
exporter.outputURL = exportUrl
exporter.exportAsynchronouslyWithCompletionHandler({ () -> Void in
dispatch_async(dispatch_get_main_queue()) {
() -> Void in
let outputURL:NSURL = exporter.outputURL;
self.videoURL = outputURL
let asset:AVURLAsset = AVURLAsset(URL: outputURL, options: nil)
}
})
}
}
I get size video size as 612*612 but content is weird. What could be the issue?
Got solution:
I have set value of AVCaptureSession to AVCaptureSessionPresetiFrame960x540
and I changed its value to AVCaptureSessionPreset1280x720 solve my issue.
I'm trying to merge some videos in a unique output.mov. The exported file isn't playable and I don't now why. Can someone help me?
func exportVideo2(path:String, outputPath:String, nMovie:Int) -> Bool{
var composition = AVMutableComposition()
let track:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
var insertTime = kCMTimeZero
var movie = movieOfProject(path)
if movie.count == nMovie{
for (index,mov) in enumerate(movie){
let moviePath = path.stringByAppendingPathComponent(mov)
// moviePath = path to the .mov file
println(moviePath)
let moviePathUrl = NSURL(fileURLWithPath: moviePath)
let sourceAsset = AVURLAsset(URL: moviePathUrl, options: nil)
println(sourceAsset)
let tracks = sourceAsset.tracksWithMediaType(AVMediaTypeVideo)
println(sourceAsset.playable) // print true
println(sourceAsset.exportable) // print true
println(sourceAsset.readable) // print true
if tracks.count > 0{
let assetTrack:AVAssetTrack = tracks[0] as AVAssetTrack
track.insertTimeRange(CMTimeRangeMake(kCMTimeZero,sourceAsset.duration), ofTrack: assetTrack, atTime: insertTime, error: nil)
insertTime = CMTimeAdd(insertTime, sourceAsset.duration)
}
}
let completeMovie = outputPath.stringByAppendingPathComponent("movie.mov")
let completeMovieUrl = NSURL(fileURLWithPath: completeMovie)
var exporter = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality)
exporter.outputURL = completeMovieUrl
exporter.outputFileType = AVFileTypeMPEG4
exporter.exportAsynchronouslyWithCompletionHandler(nil)
let ass = AVURLAsset(URL: completeMovieUrl, options: nil)
println(ass.readable) // print false
println(ass.exportable) // print false
println(ass.playable) // print false
return true
}else{
return false
}
}
The .mov files that I have to merge are all readable so I think the problem is in the last part, where the output video is exported.
Resolved.
func exportVideo3(path:String, outputPath:String, nMovie:Int) -> Bool{
var composition = AVMutableComposition()
let trackVideo:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
let trackAudio:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
var insertTime = kCMTimeZero
var movie = movieOfProject(path)
if movie.count == nMovie{
for (index,mov) in enumerate(movie){
let moviePath = path.stringByAppendingPathComponent(mov)
let moviePathUrl = NSURL(fileURLWithPath: moviePath)
let sourceAsset = AVURLAsset(URL: moviePathUrl, options: nil)
let tracks = sourceAsset.tracksWithMediaType(AVMediaTypeVideo)
let audios = sourceAsset.tracksWithMediaType(AVMediaTypeAudio)
if tracks.count > 0{
let assetTrack:AVAssetTrack = tracks[0] as AVAssetTrack
trackVideo.insertTimeRange(CMTimeRangeMake(kCMTimeZero,sourceAsset.duration), ofTrack: assetTrack, atTime: insertTime, error: nil)
let assetTrackAudio:AVAssetTrack = audios[0] as AVAssetTrack
trackAudio.insertTimeRange(CMTimeRangeMake(kCMTimeZero,sourceAsset.duration), ofTrack: assetTrackAudio, atTime: insertTime, error: nil)
insertTime = CMTimeAdd(insertTime, sourceAsset.duration)
}
}
let completeMovie = outputPath.stringByAppendingPathComponent("movie.mov")
let completeMovieUrl = NSURL(fileURLWithPath: completeMovie)
var exporter = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality)
exporter.outputURL = completeMovieUrl
exporter.outputFileType = AVFileTypeMPEG4 //AVFileTypeQuickTimeMovie
exporter.exportAsynchronouslyWithCompletionHandler({
switch exporter.status{
case AVAssetExportSessionStatus.Failed:
println("failed \(exporter.error)")
case AVAssetExportSessionStatus.Cancelled:
println("cancelled \(exporter.error)")
default:
println("complete")
}
})
return true
}else{
return false
}
}