I'm new to AVFoundation, and i'm trying to add a image to a video. however i keep getting an error : failed Optional(Error Domain=AVFoundationErrorDomain Code=-11823 "Cannot Save" UserInfo={NSLocalizedDescription=Cannot Save, NSLocalizedRecoverySuggestion=Try saving again.})
. What am i doing wrong. Here is my code:
func createVideo() {
let documentsPath = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0] as NSString
let fileURL = NSURL(fileURLWithPath: "\(documentsPath)/\(self.randomVideoFileName).mov")
let composition = AVMutableComposition()
let vidAsset = AVURLAsset(URL: fileURL, options: nil)
// get video track
let vtrack = vidAsset.tracksWithMediaType(AVMediaTypeVideo)
let videoTrack:AVAssetTrack = vtrack[0]
let vid_timerange = CMTimeRangeMake(kCMTimeZero, vidAsset.duration)
do {
let compositionvideoTrack:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
try compositionvideoTrack.insertTimeRange(vid_timerange, ofTrack: videoTrack, atTime: kCMTimeZero)
compositionvideoTrack.preferredTransform = videoTrack.preferredTransform
} catch {
print(error)
}
//Get the video
let fullSizeImage = videoTrack
let newOverLayHeight = fullSizeImage.naturalSize.width / self.containerView!.frame.width * self.containerView!.frame.height
UIGraphicsBeginImageContext(CGSizeMake(fullSizeImage.naturalSize.width, newOverLayHeight));
self.containerView!.drawViewHierarchyInRect(CGRectMake(0, 0, fullSizeImage.naturalSize.width, newOverLayHeight), afterScreenUpdates: true)
let overlayImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
let imglogo = UIImage(named: "image.png")
let imglayer = CALayer()
imglayer.contents = imglogo?.CGImage
imglayer.frame = CGRectMake(0,fullSizeImage.naturalSize.height - newOverLayHeight, overlayImage.size.width, overlayImage.size.height)
let videolayer = CALayer()
videolayer.frame = CGRectMake(0, 0, fullSizeImage.naturalSize.width, fullSizeImage.naturalSize.height)
let parentlayer = CALayer()
parentlayer.frame = CGRectMake(0, 0, fullSizeImage.naturalSize.width, fullSizeImage.naturalSize.height)
parentlayer.addSublayer(imglayer)
let layercomposition = AVMutableVideoComposition()
layercomposition.frameDuration = CMTimeMake(1, 30)
layercomposition.renderSize = fullSizeImage.naturalSize
layercomposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videolayer, inLayer: parentlayer)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, composition.duration)
let videotrack = composition.tracksWithMediaType(AVMediaTypeVideo)[0] as AVAssetTrack
let layerinstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videotrack)
instruction.layerInstructions = NSArray(object: layerinstruction) as! [AVVideoCompositionLayerInstruction]
layercomposition.instructions = NSArray(object: instruction) as! [AVVideoCompositionInstructionProtocol]
// create new file to receive data
let docsDir: AnyObject = documentsPath
let movieFilePath = docsDir.stringByAppendingPathComponent("result.mov")
let movieDestinationUrl = NSURL(fileURLWithPath: movieFilePath)
// use AVAssetExportSession to export video
let assetExport = AVAssetExportSession(asset: composition, presetName:AVAssetExportPresetHighestQuality)
assetExport!.outputFileType = AVFileTypeQuickTimeMovie
assetExport!.outputURL = movieDestinationUrl
assetExport!.exportAsynchronouslyWithCompletionHandler({
switch assetExport!.status{
case AVAssetExportSessionStatus.Failed:
print("failed \(assetExport!.error)")
case AVAssetExportSessionStatus.Cancelled:
print("cancelled \(assetExport!.error)")
default:
print("Movie complete")
// save to photoalbum
NSOperationQueue.mainQueue().addOperationWithBlock({ () -> Void in
UISaveVideoAtPathToSavedPhotosAlbum(movieDestinationUrl.absoluteString, self, "image:didFinishSavingWithError:contextInfo:", nil)
})
}
})
}
As Matt commented, you've forgotten to delete the output file (AVFoundation refuses to overwrite files for some reason). So do that:
let movieDestinationUrl = NSURL(fileURLWithPath: movieFilePath)
_ = try? NSFileManager().removeItemAtURL(movieDestinationUrl)
That fixes the error, but you won't yet see your watermark because you're not setting the AVAssetExportSession's videoComposition:
assetExport?.videoComposition = layercomposition // important!
assetExport!.exportAsynchronouslyWithCompletionHandler({...})
Hi i have done this in ObjectivC following is my code...
AVMutableVideoComposition* videoComp = [AVMutableVideoComposition videoComposition] ;
CGSize videoSize = CGSizeApplyAffineTransform(a_compositionVideoTrack.naturalSize, a_compositionVideoTrack.preferredTransform);
CATextLayer *titleLayer = [CATextLayer layer];
titleLayer.string = #"lippieapp.com";
titleLayer.font = (__bridge CFTypeRef)(#"Helvetica-Bold");
titleLayer.fontSize = 32.0;
//titleLayer.alignmentMode = kCAAlignmentCenter;
titleLayer.frame = CGRectMake(30, 0, 250, 60); //You may need to adjust this for proper display
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
[parentLayer addSublayer:videoLayer];
Related
I have taken one video URL from backend and I want to marge with image. So I have added VideoLayer, ImageLayer on ParentLayer called AnimationLayer.
After Merge Video and images, it seems 1 black screen.
How can I resolve this bug?
func MergeVideo1(_ vidioUrlString: String?, with img: UIImage?, With VideoName : String)
{
guard let videoUrl = URL(string: vidioUrlString ?? "") else { return }
let videoUrlAsset = AVURLAsset(url: videoUrl, options: nil)
// Setup `mutableComposition` from the existing video
let mutableComposition = AVMutableComposition()
let videoAssetTrack = videoUrlAsset.tracks(withMediaType: AVMediaType.video).first!
let videoCompositionTrack = mutableComposition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: kCMPersistentTrackID_Invalid)
videoCompositionTrack!.preferredTransform = videoAssetTrack.preferredTransform
try! videoCompositionTrack!.insertTimeRange(CMTimeRange(start:CMTime.zero, duration:videoAssetTrack.timeRange.duration), of: videoAssetTrack, at: CMTime.zero)
let audioAssetTrack = videoUrlAsset.tracks(withMediaType: AVMediaType.audio).first!
let audioCompositionTrack = mutableComposition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: kCMPersistentTrackID_Invalid)
try! audioCompositionTrack!.insertTimeRange(CMTimeRange(start: CMTime.zero, duration:audioAssetTrack.timeRange.duration), of: audioAssetTrack , at: CMTime.zero)
// Create a `videoComposition` to represent the `foregroundImage`
let videoSize: CGSize = videoCompositionTrack!.naturalSize
let frame = CGRect(x: 0.0, y: 0.0, width: videoSize.width, height: videoSize.height)
let imgLogoMix = UIImage(named: "icn_RandomDownload")
//Logo
let imageLayer_LOGO = CALayer()
imageLayer_LOGO.contents = imgLogoMix.cgImage
imageLayer_LOGO.frame = frame
//Frame
let imageLayer = CALayer()
imageLayer.contents = img?.cgImage
imageLayer.frame = frame
let videoLayer = CALayer()
videoLayer.frame = frame
let animationLayer = CALayer()
animationLayer.frame = frame
animationLayer.addSublayer(videoLayer)
animationLayer.addSublayer(imageLayer)
animationLayer.addSublayer(imageLayer_LOGO)
imageLayer.bringToFront()
imageLayer_LOGO.bringToFront()
let videoComposition = AVMutableVideoComposition(propertiesOf: (videoCompositionTrack?.asset!)!)
videoComposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: animationLayer)
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
let DirPath = paths[0].appendingPathComponent("CREATE_IMAGE")
//finalPath = DirPath.path + "/myVideo.mp4"
finalPath = DirPath.path + "/\(VideoName).mp4"
if FileManager.default.fileExists(atPath: finalPath) {
do {
try FileManager.default.removeItem(atPath: finalPath)
} catch {
}
}
let exportSession = AVAssetExportSession( asset: mutableComposition, presetName: AVAssetExportPresetHighestQuality)!
exportSession.videoComposition = videoComposition
// exportSession.outputURL = destinationFilePath
exportSession.outputURL = URL(fileURLWithPath: finalPath)
exportSession.outputFileType = AVFileType.mp4
exportSession.exportAsynchronously(completionHandler: {
switch exportSession.status {
case AVAssetExportSession.Status.failed:
print("failed")
SKActivityHUD.DismissHUD()
print(exportSession.error ?? "unknown error")
case AVAssetExportSession.Status.cancelled:
print("cancelled")
SKActivityHUD.DismissHUD()
print(exportSession.error ?? "unknown error")
default:
print("Movie complete")
// SKActivityHUD.DismissHUD()
}
})
}
Edited above code with
let videoComposition = AVMutableVideoComposition()
videoComposition.frameDuration = CMTimeMake(value: 1, timescale: Int32(videoCompositionTrack?.nominalFrameRate ?? 300))
videoComposition.renderSize = videoSize
videoComposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: animationLayer)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(start: CMTime.zero, duration: mutableComposition.duration)
let videotrack = mutableComposition.tracks(withMediaType: AVMediaType.video)[0] as AVAssetTrack
let layerinstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videotrack)
instruction.layerInstructions = NSArray(object: layerinstruction) as [AnyObject] as! [AVVideoCompositionLayerInstruction]
videoComposition.instructions = [instruction]
Above code just replaced with
let videoComposition = AVMutableVideoComposition()
videoComposition.frameDuration = CMTimeMake(value: 1, timescale: Int32(videoCompositionTrack?.nominalFrameRate ?? 300))
videoComposition.renderSize = videoSize
videoComposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: animationLayer)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(start: CMTime.zero, duration: mutableComposition.duration)
let videotrack = mutableComposition.tracks(withMediaType: AVMediaType.video)[0] as AVAssetTrack
let layerinstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videotrack)
let rgb = CGColorSpaceCreateDeviceRGB()
let myColor : [CGFloat] = [1.0, 1.0, 1.0, 1.0] //white
let ref = CGColor(colorSpace: rgb, components: myColor)
instruction.backgroundColor = ref
instruction.layerInstructions = NSArray(object: layerinstruction) as [AnyObject] as! [AVVideoCompositionLayerInstruction]
videoComposition.instructions = [instruction]
This function is exporting the merged composition to a landscape orientation when the source video is in portrait. I save the original video in portrait orientation to my documents directory and then save it to camera roll and works fine. I then pass the saved video's url to this function and it somehow rotates it to landscape when it shouldn't. How do I fix this?
func makeVideoOverlay (url : URL) {
print("documents directory url: \(url)")
let composition = AVMutableComposition()
let vidAsset = AVURLAsset(url: url as URL, options: nil)
// get video track
let vtrack = vidAsset.tracks(withMediaType: AVMediaTypeVideo)
let videoTrack:AVAssetTrack = vtrack[0]
let vid_duration = videoTrack.timeRange.duration
let vid_timerange = CMTimeRangeMake(kCMTimeZero, vidAsset.duration)
//var error: NSError?
let compositionvideoTrack:AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
do {
try compositionvideoTrack.insertTimeRange(vid_timerange, of: videoTrack, at: kCMTimeZero)
} catch {
// handle error
print("comp video track error: \(error.localizedDescription)")
}
compositionvideoTrack.preferredTransform = videoTrack.preferredTransform
let size = videoTrack.naturalSize
//this prints out to 1920x1080 landscape dimension. i don't know how
print("asset size: \(size)")
// Watermark Effect
let imglogo = UIImage(named: "logo-image")
let imglayer = CALayer()
imglayer.contents = imglogo?.cgImage
imglayer.frame = CGRect.init(x: 5, y: size.height-160, width: 150, height: 150)
imglayer.opacity = 1.0
let videolayer = CALayer()
videolayer.frame = CGRect.init(x: 0, y: 0, width: size.width, height: size.height)
let parentlayer = CALayer()
parentlayer.frame = CGRect.init(x: 0, y: 0, width: size.width, height: size.height)
parentlayer.addSublayer(videolayer)
parentlayer.addSublayer(imglayer)
let layercomposition = AVMutableVideoComposition()
layercomposition.frameDuration = CMTimeMake(1, 30)
layercomposition.renderSize = size
layercomposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videolayer, in: parentlayer)
// instruction for watermark
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, composition.duration)
let videotrack = composition.tracks(withMediaType: AVMediaTypeVideo)[0] as AVAssetTrack
let layerinstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videotrack)
instruction.layerInstructions = NSArray(object: layerinstruction) as [AnyObject] as [AnyObject] as! [AVVideoCompositionLayerInstruction]
layercomposition.instructions = NSArray(object: instruction) as [AnyObject] as [AnyObject] as! [AVVideoCompositionInstructionProtocol]
// create new file to receive data
let dirPaths = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)
let docsDir: String = dirPaths[0] as String
let movieFilePath = docsDir.appending("/result.mov") as String
movieDestinationUrl = URL(fileURLWithPath: movieFilePath)
print("overlay destination url: \(movieDestinationUrl)")
// use AVAssetExportSession to export video
let assetExport = AVAssetExportSession(asset: composition, presetName:AVAssetExportPresetHighestQuality)
assetExport?.outputFileType = AVFileTypeQuickTimeMovie
assetExport?.outputURL = movieDestinationUrl as URL
assetExport?.videoComposition = layercomposition
assetExport?.exportAsynchronously(completionHandler: {
if assetExport?.status == AVAssetExportSessionStatus.failed
{
print("failed: \(assetExport?.error)")
}
else if assetExport?.status == AVAssetExportSessionStatus.cancelled
{
print("cancelled: \(assetExport?.error)")
}
else
{
print("Movie complete")
OperationQueue.main.addOperation({ () -> Void in
//saves in landscape
self.saveAsset(url: self.movieDestinationUrl)
})
}
})
}
AVMutableVideoCompositionLayerInstruction has a method setTransform(_:at:)
As documentation say
Sets a fixed transform to apply from the specified time until the next
time at which a transform is set. [...]. Before the first specified time for which a
transform is set, the affine transform is held constant at the value
of identity ; after the last time for which a transform is set, the
affine transform is held constant at that last value.
You should set videoTrack's preferredTransform to layerInstruction instead.
EDIT
You need to create layerinstruction with the new created composition track instead.
let layerinstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: compositionvideoTrack) // NOT videoTrack.
layerinstruction.setTransform(videoTrack.preferredTransform, at: kCMTimeZero)
I try in vain to watermark an existing video with the AVFoundation library. I followed the instructions and tried to rewrite the code in Swift from the third answer in this question: iPhone Watermark on recorded Video. – but this doesn't work for me.
Every time when run my code I only see a black video rendered with the length of my source video which needs to be watermarked. My goal is to fade in the watermark after 5 seconds.
Here is my code:
let composition = AVMutableComposition()
let vidAsset = AVURLAsset(URL: NSURL(fileURLWithPath: moviePath), options: nil)
// GET THE VIDEO TRACK
let vtrack = vidAsset.tracksWithMediaType(AVMediaTypeVideo)
let videoTrack:AVAssetTrack = vtrack[0]
let vid_timerange = CMTimeRangeMake(kCMTimeZero, vidAsset.duration)
do {
let compositionvideoTrack:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
try compositionvideoTrack.insertTimeRange(vid_timerange, ofTrack: videoTrack, atTime: kCMTimeZero)
compositionvideoTrack.preferredTransform = videoTrack.preferredTransform
} catch {
print(error)
}
let animationImage: UIImage = self.artworkImage
let artWorkOverlayLayer: CALayer = CALayer()
artWorkOverlayLayer.contents = (animationImage.CGImage as! AnyObject)
artWorkOverlayLayer.frame = CGRectMake(0, 0, 512, 512)
artWorkOverlayLayer.opacity = 0
artWorkOverlayLayer.masksToBounds = true
let animation: CABasicAnimation = CABasicAnimation(keyPath: "opacity")
animation.duration = 10
animation.repeatCount = 0
animation.autoreverses = false
animation.fromValue = Int(0.0)
animation.toValue = Int(1.0)
animation.beginTime = 5.0
artWorkOverlayLayer.addAnimation(animation, forKey: "animateOpacity")
let videolayer = CALayer()
videolayer.frame = CGRectMake(0, 0, videoTrack.naturalSize.width, videoTrack.naturalSize.height)
let parentlayer = CALayer()
parentlayer.frame = CGRectMake(0, 0, videoTrack.naturalSize.width, videoTrack.naturalSize.height)
parentlayer.addSublayer(artWorkOverlayLayer)
let layercomposition = AVMutableVideoComposition()
layercomposition.frameDuration = CMTimeMake(1, 30)
layercomposition.renderSize = videoTrack.naturalSize
layercomposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videolayer, inLayer: parentlayer)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, composition.duration)
let videotrack = composition.tracksWithMediaType(AVMediaTypeVideo)[0] as AVAssetTrack
let layerinstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videotrack)
instruction.layerInstructions = NSArray(object: layerinstruction) as! [AVVideoCompositionLayerInstruction]
layercomposition.instructions = NSArray(object: instruction) as! [AVVideoCompositionInstructionProtocol]
// EXPORT
let filePath: NSURL = NSURL.fileURLWithPath(NSTemporaryDirectory().stringByAppendingString("output-tmp.mp4"))
let assetExportSession: AVAssetExportSession! = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality)
assetExportSession.outputFileType = AVFileTypeMPEG4
assetExportSession.outputURL = filePath
assetExportSession.videoComposition = layercomposition
assetExportSession.exportAsynchronouslyWithCompletionHandler({() -> Void in
print(filePath)
})
So I am trying to add a watermark to a previously recorded video using the following code, but when I view the video, there is no watermark. Can anyone help? I tried following the post at: iPhone Watermark on recorded Video.
public func addWatermarkToVideo(url: NSURL, completion:(url: NSURL?) -> Void) {
let videoAsset = AVURLAsset(URL: url)
let mixComposition = AVMutableComposition()
let compositionVideoTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
let clipVideoTrack: AVAssetTrack = videoAsset.tracksWithMediaType(AVMediaTypeVideo)[0]
do {
try compositionVideoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), ofTrack: clipVideoTrack, atTime: kCMTimeZero)
} catch {
print(error)
}
compositionVideoTrack.preferredTransform = videoAsset.tracksWithMediaType(AVMediaTypeVideo)[0].preferredTransform
//Add watermark
guard let myImage = UIImage(named: "Logo") else {
completion(url: nil)
return
}
let aLayer = CALayer()
aLayer.contents = myImage.CGImage
aLayer.frame = CGRectMake(5, 25, 100, 57)
aLayer.opacity = 0.65
let videoSize = videoAsset.tracksWithMediaType(AVMediaTypeVideo)[0].naturalSize
let parentLayer = CALayer()
let videoLayer = CALayer()
parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height)
videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height)
parentLayer.addSublayer(videoLayer)
parentLayer.addSublayer(aLayer)
let videoComp = AVMutableVideoComposition()
videoComp.renderSize = videoSize
videoComp.frameDuration = CMTimeMake(1,30)
videoComp.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, inLayer: parentLayer)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, mixComposition.duration)
let videoTrack = mixComposition.tracksWithMediaType(AVMediaTypeVideo)[0]
let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
instruction.layerInstructions = [layerInstruction]
videoComp.instructions = [instruction]
let paths = NSSearchPathForDirectoriesInDomains(NSSearchPathDirectory.DocumentDirectory, NSSearchPathDomainMask.UserDomainMask, true)
let documentsDirectory: AnyObject = paths[0]
let dataPath = documentsDirectory.stringByAppendingPathComponent("VideoCache")
if (!NSFileManager.defaultManager().fileExistsAtPath(dataPath)) {
do {
try NSFileManager.defaultManager().createDirectoryAtPath(dataPath, withIntermediateDirectories: false, attributes: nil)
} catch {
print("Couldn't create path")
}
}
let tempURL = NSURL(fileURLWithPath: dataPath)
let completeMovieUrl = tempURL.URLByAppendingPathComponent("tether-\(NSDate()).mov")
if let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) {
exporter.outputURL = completeMovieUrl
exporter.outputFileType = AVFileTypeMPEG4
exporter.exportAsynchronouslyWithCompletionHandler({ () -> Void in
switch exporter.status {
case .Failed:
print("failed \(exporter.error)")
completion(url: nil)
break
case .Cancelled:
print("cancelled \(exporter.error)")
completion(url: nil)
break
default:
print("complete")
completion(url: exporter.outputURL)
}
})
}
}
You've forgotten to add your videoComposition to the AVAssetExportSession:
exporter.outputFileType = AVFileTypeMPEG4 // You had this
exporter.videoComposition = videoComp // but had forgotten this
exporter.exportAsynchronouslyWithCompletionHandler({ // ...
I've created this function which get a video which is captured in portraitmode. however when i save the avassetexport and show it, it seems like it identify it as landscape, how can i make sure to create this as a portrait video??
func createVideo() -> AVAssetExportSession {
let documentsPath = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0] as NSString
let fileURL = NSURL(fileURLWithPath: "\(documentsPath)/pre.mov")
let composition = AVMutableComposition()
let vidAsset = AVURLAsset(URL: fileURL, options: nil)
// get video track
let vtrack = vidAsset.tracksWithMediaType(AVMediaTypeVideo)
let videoTrack:AVAssetTrack = vtrack[0]
let vid_timerange = CMTimeRangeMake(kCMTimeZero, vidAsset.duration)
do {
let compositionvideoTrack:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
try compositionvideoTrack.insertTimeRange(vid_timerange, ofTrack: videoTrack, atTime: kCMTimeZero)
compositionvideoTrack.preferredTransform = videoTrack.preferredTransform
} catch {
print(error)
}
//Get the video
let fullSizeImage = videoTrack
print(fullSizeImage.naturalSize)
let newOverLayHeight = fullSizeImage.naturalSize.width / self.containerView!.frame.width * self.containerView!.frame.height
UIGraphicsBeginImageContext(CGSizeMake(fullSizeImage.naturalSize.width, newOverLayHeight));
self.containerView!.drawViewHierarchyInRect(CGRectMake(0, 0, fullSizeImage.naturalSize.width, newOverLayHeight), afterScreenUpdates: true)
let overlayImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
let imglogo = UIImage(named: "image.png")
let imglayer = CALayer()
imglayer.contents = imglogo?.CGImage
imglayer.frame = CGRectMake(0,fullSizeImage.naturalSize.height - newOverLayHeight, overlayImage.size.width, overlayImage.size.height)
let videolayer = CALayer()
videolayer.frame = CGRectMake(0, 0, fullSizeImage.naturalSize.width, fullSizeImage.naturalSize.height)
let parentlayer = CALayer()
parentlayer.frame = CGRectMake(0, 0, fullSizeImage.naturalSize.width, fullSizeImage.naturalSize.height)
parentlayer.addSublayer(videolayer)
parentlayer.addSublayer(imglayer)
let layercomposition = AVMutableVideoComposition()
layercomposition.frameDuration = CMTimeMake(1, 30)
layercomposition.renderSize = fullSizeImage.naturalSize
layercomposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videolayer, inLayer: parentlayer)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, composition.duration)
let videotrack = composition.tracksWithMediaType(AVMediaTypeVideo)[0] as AVAssetTrack
let layerinstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videotrack)
instruction.layerInstructions = NSArray(object: layerinstruction) as! [AVVideoCompositionLayerInstruction]
layercomposition.instructions = NSArray(object: instruction) as! [AVVideoCompositionInstructionProtocol]
// create new file to receive data
let docsDir: AnyObject = documentsPath
let movieFilePath = docsDir.stringByAppendingPathComponent("result.mov")
let movieDestinationUrl = NSURL(fileURLWithPath: movieFilePath)
_ = try? NSFileManager().removeItemAtURL(movieDestinationUrl)
let preFilePath = docsDir.stringByAppendingPathComponent("pre.mov")
let preDestinationUrl = NSURL(fileURLWithPath: preFilePath)
_ = try? NSFileManager().removeItemAtURL(preDestinationUrl)
// use AVAssetExportSession to export video
let assetExport = AVAssetExportSession(asset: composition, presetName:AVAssetExportPresetHighestQuality)
assetExport!.outputFileType = AVFileTypeQuickTimeMovie
assetExport!.outputURL = movieDestinationUrl
assetExport!.videoComposition = layercomposition
self.movieUrl = movieFilePath
return assetExport!
}