I try in vain to watermark an existing video with the AVFoundation library. I followed the instructions and tried to rewrite the code in Swift from the third answer in this question: iPhone Watermark on recorded Video. – but this doesn't work for me.
Every time when run my code I only see a black video rendered with the length of my source video which needs to be watermarked. My goal is to fade in the watermark after 5 seconds.
Here is my code:
let composition = AVMutableComposition()
let vidAsset = AVURLAsset(URL: NSURL(fileURLWithPath: moviePath), options: nil)
// GET THE VIDEO TRACK
let vtrack = vidAsset.tracksWithMediaType(AVMediaTypeVideo)
let videoTrack:AVAssetTrack = vtrack[0]
let vid_timerange = CMTimeRangeMake(kCMTimeZero, vidAsset.duration)
do {
let compositionvideoTrack:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
try compositionvideoTrack.insertTimeRange(vid_timerange, ofTrack: videoTrack, atTime: kCMTimeZero)
compositionvideoTrack.preferredTransform = videoTrack.preferredTransform
} catch {
print(error)
}
let animationImage: UIImage = self.artworkImage
let artWorkOverlayLayer: CALayer = CALayer()
artWorkOverlayLayer.contents = (animationImage.CGImage as! AnyObject)
artWorkOverlayLayer.frame = CGRectMake(0, 0, 512, 512)
artWorkOverlayLayer.opacity = 0
artWorkOverlayLayer.masksToBounds = true
let animation: CABasicAnimation = CABasicAnimation(keyPath: "opacity")
animation.duration = 10
animation.repeatCount = 0
animation.autoreverses = false
animation.fromValue = Int(0.0)
animation.toValue = Int(1.0)
animation.beginTime = 5.0
artWorkOverlayLayer.addAnimation(animation, forKey: "animateOpacity")
let videolayer = CALayer()
videolayer.frame = CGRectMake(0, 0, videoTrack.naturalSize.width, videoTrack.naturalSize.height)
let parentlayer = CALayer()
parentlayer.frame = CGRectMake(0, 0, videoTrack.naturalSize.width, videoTrack.naturalSize.height)
parentlayer.addSublayer(artWorkOverlayLayer)
let layercomposition = AVMutableVideoComposition()
layercomposition.frameDuration = CMTimeMake(1, 30)
layercomposition.renderSize = videoTrack.naturalSize
layercomposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videolayer, inLayer: parentlayer)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, composition.duration)
let videotrack = composition.tracksWithMediaType(AVMediaTypeVideo)[0] as AVAssetTrack
let layerinstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videotrack)
instruction.layerInstructions = NSArray(object: layerinstruction) as! [AVVideoCompositionLayerInstruction]
layercomposition.instructions = NSArray(object: instruction) as! [AVVideoCompositionInstructionProtocol]
// EXPORT
let filePath: NSURL = NSURL.fileURLWithPath(NSTemporaryDirectory().stringByAppendingString("output-tmp.mp4"))
let assetExportSession: AVAssetExportSession! = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality)
assetExportSession.outputFileType = AVFileTypeMPEG4
assetExportSession.outputURL = filePath
assetExportSession.videoComposition = layercomposition
assetExportSession.exportAsynchronouslyWithCompletionHandler({() -> Void in
print(filePath)
})
Related
I have taken one video URL from backend and I want to marge with image. So I have added VideoLayer, ImageLayer on ParentLayer called AnimationLayer.
After Merge Video and images, it seems 1 black screen.
How can I resolve this bug?
func MergeVideo1(_ vidioUrlString: String?, with img: UIImage?, With VideoName : String)
{
guard let videoUrl = URL(string: vidioUrlString ?? "") else { return }
let videoUrlAsset = AVURLAsset(url: videoUrl, options: nil)
// Setup `mutableComposition` from the existing video
let mutableComposition = AVMutableComposition()
let videoAssetTrack = videoUrlAsset.tracks(withMediaType: AVMediaType.video).first!
let videoCompositionTrack = mutableComposition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: kCMPersistentTrackID_Invalid)
videoCompositionTrack!.preferredTransform = videoAssetTrack.preferredTransform
try! videoCompositionTrack!.insertTimeRange(CMTimeRange(start:CMTime.zero, duration:videoAssetTrack.timeRange.duration), of: videoAssetTrack, at: CMTime.zero)
let audioAssetTrack = videoUrlAsset.tracks(withMediaType: AVMediaType.audio).first!
let audioCompositionTrack = mutableComposition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: kCMPersistentTrackID_Invalid)
try! audioCompositionTrack!.insertTimeRange(CMTimeRange(start: CMTime.zero, duration:audioAssetTrack.timeRange.duration), of: audioAssetTrack , at: CMTime.zero)
// Create a `videoComposition` to represent the `foregroundImage`
let videoSize: CGSize = videoCompositionTrack!.naturalSize
let frame = CGRect(x: 0.0, y: 0.0, width: videoSize.width, height: videoSize.height)
let imgLogoMix = UIImage(named: "icn_RandomDownload")
//Logo
let imageLayer_LOGO = CALayer()
imageLayer_LOGO.contents = imgLogoMix.cgImage
imageLayer_LOGO.frame = frame
//Frame
let imageLayer = CALayer()
imageLayer.contents = img?.cgImage
imageLayer.frame = frame
let videoLayer = CALayer()
videoLayer.frame = frame
let animationLayer = CALayer()
animationLayer.frame = frame
animationLayer.addSublayer(videoLayer)
animationLayer.addSublayer(imageLayer)
animationLayer.addSublayer(imageLayer_LOGO)
imageLayer.bringToFront()
imageLayer_LOGO.bringToFront()
let videoComposition = AVMutableVideoComposition(propertiesOf: (videoCompositionTrack?.asset!)!)
videoComposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: animationLayer)
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
let DirPath = paths[0].appendingPathComponent("CREATE_IMAGE")
//finalPath = DirPath.path + "/myVideo.mp4"
finalPath = DirPath.path + "/\(VideoName).mp4"
if FileManager.default.fileExists(atPath: finalPath) {
do {
try FileManager.default.removeItem(atPath: finalPath)
} catch {
}
}
let exportSession = AVAssetExportSession( asset: mutableComposition, presetName: AVAssetExportPresetHighestQuality)!
exportSession.videoComposition = videoComposition
// exportSession.outputURL = destinationFilePath
exportSession.outputURL = URL(fileURLWithPath: finalPath)
exportSession.outputFileType = AVFileType.mp4
exportSession.exportAsynchronously(completionHandler: {
switch exportSession.status {
case AVAssetExportSession.Status.failed:
print("failed")
SKActivityHUD.DismissHUD()
print(exportSession.error ?? "unknown error")
case AVAssetExportSession.Status.cancelled:
print("cancelled")
SKActivityHUD.DismissHUD()
print(exportSession.error ?? "unknown error")
default:
print("Movie complete")
// SKActivityHUD.DismissHUD()
}
})
}
Edited above code with
let videoComposition = AVMutableVideoComposition()
videoComposition.frameDuration = CMTimeMake(value: 1, timescale: Int32(videoCompositionTrack?.nominalFrameRate ?? 300))
videoComposition.renderSize = videoSize
videoComposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: animationLayer)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(start: CMTime.zero, duration: mutableComposition.duration)
let videotrack = mutableComposition.tracks(withMediaType: AVMediaType.video)[0] as AVAssetTrack
let layerinstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videotrack)
instruction.layerInstructions = NSArray(object: layerinstruction) as [AnyObject] as! [AVVideoCompositionLayerInstruction]
videoComposition.instructions = [instruction]
Above code just replaced with
let videoComposition = AVMutableVideoComposition()
videoComposition.frameDuration = CMTimeMake(value: 1, timescale: Int32(videoCompositionTrack?.nominalFrameRate ?? 300))
videoComposition.renderSize = videoSize
videoComposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: animationLayer)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(start: CMTime.zero, duration: mutableComposition.duration)
let videotrack = mutableComposition.tracks(withMediaType: AVMediaType.video)[0] as AVAssetTrack
let layerinstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videotrack)
let rgb = CGColorSpaceCreateDeviceRGB()
let myColor : [CGFloat] = [1.0, 1.0, 1.0, 1.0] //white
let ref = CGColor(colorSpace: rgb, components: myColor)
instruction.backgroundColor = ref
instruction.layerInstructions = NSArray(object: layerinstruction) as [AnyObject] as! [AVVideoCompositionLayerInstruction]
videoComposition.instructions = [instruction]
This function is exporting the merged composition to a landscape orientation when the source video is in portrait. I save the original video in portrait orientation to my documents directory and then save it to camera roll and works fine. I then pass the saved video's url to this function and it somehow rotates it to landscape when it shouldn't. How do I fix this?
func makeVideoOverlay (url : URL) {
print("documents directory url: \(url)")
let composition = AVMutableComposition()
let vidAsset = AVURLAsset(url: url as URL, options: nil)
// get video track
let vtrack = vidAsset.tracks(withMediaType: AVMediaTypeVideo)
let videoTrack:AVAssetTrack = vtrack[0]
let vid_duration = videoTrack.timeRange.duration
let vid_timerange = CMTimeRangeMake(kCMTimeZero, vidAsset.duration)
//var error: NSError?
let compositionvideoTrack:AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
do {
try compositionvideoTrack.insertTimeRange(vid_timerange, of: videoTrack, at: kCMTimeZero)
} catch {
// handle error
print("comp video track error: \(error.localizedDescription)")
}
compositionvideoTrack.preferredTransform = videoTrack.preferredTransform
let size = videoTrack.naturalSize
//this prints out to 1920x1080 landscape dimension. i don't know how
print("asset size: \(size)")
// Watermark Effect
let imglogo = UIImage(named: "logo-image")
let imglayer = CALayer()
imglayer.contents = imglogo?.cgImage
imglayer.frame = CGRect.init(x: 5, y: size.height-160, width: 150, height: 150)
imglayer.opacity = 1.0
let videolayer = CALayer()
videolayer.frame = CGRect.init(x: 0, y: 0, width: size.width, height: size.height)
let parentlayer = CALayer()
parentlayer.frame = CGRect.init(x: 0, y: 0, width: size.width, height: size.height)
parentlayer.addSublayer(videolayer)
parentlayer.addSublayer(imglayer)
let layercomposition = AVMutableVideoComposition()
layercomposition.frameDuration = CMTimeMake(1, 30)
layercomposition.renderSize = size
layercomposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videolayer, in: parentlayer)
// instruction for watermark
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, composition.duration)
let videotrack = composition.tracks(withMediaType: AVMediaTypeVideo)[0] as AVAssetTrack
let layerinstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videotrack)
instruction.layerInstructions = NSArray(object: layerinstruction) as [AnyObject] as [AnyObject] as! [AVVideoCompositionLayerInstruction]
layercomposition.instructions = NSArray(object: instruction) as [AnyObject] as [AnyObject] as! [AVVideoCompositionInstructionProtocol]
// create new file to receive data
let dirPaths = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)
let docsDir: String = dirPaths[0] as String
let movieFilePath = docsDir.appending("/result.mov") as String
movieDestinationUrl = URL(fileURLWithPath: movieFilePath)
print("overlay destination url: \(movieDestinationUrl)")
// use AVAssetExportSession to export video
let assetExport = AVAssetExportSession(asset: composition, presetName:AVAssetExportPresetHighestQuality)
assetExport?.outputFileType = AVFileTypeQuickTimeMovie
assetExport?.outputURL = movieDestinationUrl as URL
assetExport?.videoComposition = layercomposition
assetExport?.exportAsynchronously(completionHandler: {
if assetExport?.status == AVAssetExportSessionStatus.failed
{
print("failed: \(assetExport?.error)")
}
else if assetExport?.status == AVAssetExportSessionStatus.cancelled
{
print("cancelled: \(assetExport?.error)")
}
else
{
print("Movie complete")
OperationQueue.main.addOperation({ () -> Void in
//saves in landscape
self.saveAsset(url: self.movieDestinationUrl)
})
}
})
}
AVMutableVideoCompositionLayerInstruction has a method setTransform(_:at:)
As documentation say
Sets a fixed transform to apply from the specified time until the next
time at which a transform is set. [...]. Before the first specified time for which a
transform is set, the affine transform is held constant at the value
of identity ; after the last time for which a transform is set, the
affine transform is held constant at that last value.
You should set videoTrack's preferredTransform to layerInstruction instead.
EDIT
You need to create layerinstruction with the new created composition track instead.
let layerinstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: compositionvideoTrack) // NOT videoTrack.
layerinstruction.setTransform(videoTrack.preferredTransform, at: kCMTimeZero)
I've created this function which get a video which is captured in portraitmode. however when i save the avassetexport and show it, it seems like it identify it as landscape, how can i make sure to create this as a portrait video??
func createVideo() -> AVAssetExportSession {
let documentsPath = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0] as NSString
let fileURL = NSURL(fileURLWithPath: "\(documentsPath)/pre.mov")
let composition = AVMutableComposition()
let vidAsset = AVURLAsset(URL: fileURL, options: nil)
// get video track
let vtrack = vidAsset.tracksWithMediaType(AVMediaTypeVideo)
let videoTrack:AVAssetTrack = vtrack[0]
let vid_timerange = CMTimeRangeMake(kCMTimeZero, vidAsset.duration)
do {
let compositionvideoTrack:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
try compositionvideoTrack.insertTimeRange(vid_timerange, ofTrack: videoTrack, atTime: kCMTimeZero)
compositionvideoTrack.preferredTransform = videoTrack.preferredTransform
} catch {
print(error)
}
//Get the video
let fullSizeImage = videoTrack
print(fullSizeImage.naturalSize)
let newOverLayHeight = fullSizeImage.naturalSize.width / self.containerView!.frame.width * self.containerView!.frame.height
UIGraphicsBeginImageContext(CGSizeMake(fullSizeImage.naturalSize.width, newOverLayHeight));
self.containerView!.drawViewHierarchyInRect(CGRectMake(0, 0, fullSizeImage.naturalSize.width, newOverLayHeight), afterScreenUpdates: true)
let overlayImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
let imglogo = UIImage(named: "image.png")
let imglayer = CALayer()
imglayer.contents = imglogo?.CGImage
imglayer.frame = CGRectMake(0,fullSizeImage.naturalSize.height - newOverLayHeight, overlayImage.size.width, overlayImage.size.height)
let videolayer = CALayer()
videolayer.frame = CGRectMake(0, 0, fullSizeImage.naturalSize.width, fullSizeImage.naturalSize.height)
let parentlayer = CALayer()
parentlayer.frame = CGRectMake(0, 0, fullSizeImage.naturalSize.width, fullSizeImage.naturalSize.height)
parentlayer.addSublayer(videolayer)
parentlayer.addSublayer(imglayer)
let layercomposition = AVMutableVideoComposition()
layercomposition.frameDuration = CMTimeMake(1, 30)
layercomposition.renderSize = fullSizeImage.naturalSize
layercomposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videolayer, inLayer: parentlayer)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, composition.duration)
let videotrack = composition.tracksWithMediaType(AVMediaTypeVideo)[0] as AVAssetTrack
let layerinstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videotrack)
instruction.layerInstructions = NSArray(object: layerinstruction) as! [AVVideoCompositionLayerInstruction]
layercomposition.instructions = NSArray(object: instruction) as! [AVVideoCompositionInstructionProtocol]
// create new file to receive data
let docsDir: AnyObject = documentsPath
let movieFilePath = docsDir.stringByAppendingPathComponent("result.mov")
let movieDestinationUrl = NSURL(fileURLWithPath: movieFilePath)
_ = try? NSFileManager().removeItemAtURL(movieDestinationUrl)
let preFilePath = docsDir.stringByAppendingPathComponent("pre.mov")
let preDestinationUrl = NSURL(fileURLWithPath: preFilePath)
_ = try? NSFileManager().removeItemAtURL(preDestinationUrl)
// use AVAssetExportSession to export video
let assetExport = AVAssetExportSession(asset: composition, presetName:AVAssetExportPresetHighestQuality)
assetExport!.outputFileType = AVFileTypeQuickTimeMovie
assetExport!.outputURL = movieDestinationUrl
assetExport!.videoComposition = layercomposition
self.movieUrl = movieFilePath
return assetExport!
}
I'm new to AVFoundation, and i'm trying to add a image to a video. however i keep getting an error : failed Optional(Error Domain=AVFoundationErrorDomain Code=-11823 "Cannot Save" UserInfo={NSLocalizedDescription=Cannot Save, NSLocalizedRecoverySuggestion=Try saving again.})
. What am i doing wrong. Here is my code:
func createVideo() {
let documentsPath = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0] as NSString
let fileURL = NSURL(fileURLWithPath: "\(documentsPath)/\(self.randomVideoFileName).mov")
let composition = AVMutableComposition()
let vidAsset = AVURLAsset(URL: fileURL, options: nil)
// get video track
let vtrack = vidAsset.tracksWithMediaType(AVMediaTypeVideo)
let videoTrack:AVAssetTrack = vtrack[0]
let vid_timerange = CMTimeRangeMake(kCMTimeZero, vidAsset.duration)
do {
let compositionvideoTrack:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
try compositionvideoTrack.insertTimeRange(vid_timerange, ofTrack: videoTrack, atTime: kCMTimeZero)
compositionvideoTrack.preferredTransform = videoTrack.preferredTransform
} catch {
print(error)
}
//Get the video
let fullSizeImage = videoTrack
let newOverLayHeight = fullSizeImage.naturalSize.width / self.containerView!.frame.width * self.containerView!.frame.height
UIGraphicsBeginImageContext(CGSizeMake(fullSizeImage.naturalSize.width, newOverLayHeight));
self.containerView!.drawViewHierarchyInRect(CGRectMake(0, 0, fullSizeImage.naturalSize.width, newOverLayHeight), afterScreenUpdates: true)
let overlayImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
let imglogo = UIImage(named: "image.png")
let imglayer = CALayer()
imglayer.contents = imglogo?.CGImage
imglayer.frame = CGRectMake(0,fullSizeImage.naturalSize.height - newOverLayHeight, overlayImage.size.width, overlayImage.size.height)
let videolayer = CALayer()
videolayer.frame = CGRectMake(0, 0, fullSizeImage.naturalSize.width, fullSizeImage.naturalSize.height)
let parentlayer = CALayer()
parentlayer.frame = CGRectMake(0, 0, fullSizeImage.naturalSize.width, fullSizeImage.naturalSize.height)
parentlayer.addSublayer(imglayer)
let layercomposition = AVMutableVideoComposition()
layercomposition.frameDuration = CMTimeMake(1, 30)
layercomposition.renderSize = fullSizeImage.naturalSize
layercomposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videolayer, inLayer: parentlayer)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, composition.duration)
let videotrack = composition.tracksWithMediaType(AVMediaTypeVideo)[0] as AVAssetTrack
let layerinstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videotrack)
instruction.layerInstructions = NSArray(object: layerinstruction) as! [AVVideoCompositionLayerInstruction]
layercomposition.instructions = NSArray(object: instruction) as! [AVVideoCompositionInstructionProtocol]
// create new file to receive data
let docsDir: AnyObject = documentsPath
let movieFilePath = docsDir.stringByAppendingPathComponent("result.mov")
let movieDestinationUrl = NSURL(fileURLWithPath: movieFilePath)
// use AVAssetExportSession to export video
let assetExport = AVAssetExportSession(asset: composition, presetName:AVAssetExportPresetHighestQuality)
assetExport!.outputFileType = AVFileTypeQuickTimeMovie
assetExport!.outputURL = movieDestinationUrl
assetExport!.exportAsynchronouslyWithCompletionHandler({
switch assetExport!.status{
case AVAssetExportSessionStatus.Failed:
print("failed \(assetExport!.error)")
case AVAssetExportSessionStatus.Cancelled:
print("cancelled \(assetExport!.error)")
default:
print("Movie complete")
// save to photoalbum
NSOperationQueue.mainQueue().addOperationWithBlock({ () -> Void in
UISaveVideoAtPathToSavedPhotosAlbum(movieDestinationUrl.absoluteString, self, "image:didFinishSavingWithError:contextInfo:", nil)
})
}
})
}
As Matt commented, you've forgotten to delete the output file (AVFoundation refuses to overwrite files for some reason). So do that:
let movieDestinationUrl = NSURL(fileURLWithPath: movieFilePath)
_ = try? NSFileManager().removeItemAtURL(movieDestinationUrl)
That fixes the error, but you won't yet see your watermark because you're not setting the AVAssetExportSession's videoComposition:
assetExport?.videoComposition = layercomposition // important!
assetExport!.exportAsynchronouslyWithCompletionHandler({...})
Hi i have done this in ObjectivC following is my code...
AVMutableVideoComposition* videoComp = [AVMutableVideoComposition videoComposition] ;
CGSize videoSize = CGSizeApplyAffineTransform(a_compositionVideoTrack.naturalSize, a_compositionVideoTrack.preferredTransform);
CATextLayer *titleLayer = [CATextLayer layer];
titleLayer.string = #"lippieapp.com";
titleLayer.font = (__bridge CFTypeRef)(#"Helvetica-Bold");
titleLayer.fontSize = 32.0;
//titleLayer.alignmentMode = kCAAlignmentCenter;
titleLayer.frame = CGRectMake(30, 0, 250, 60); //You may need to adjust this for proper display
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
[parentLayer addSublayer:videoLayer];
I'm using AVMutableComposition and AVAssetExportSession to trim a video down. Randomly, and I mean randomly (I cannot consistently reproduce) users' videos have a few black frames at the start of the trimmed video. The audio is unaffected. I can confirm 100% that the videos being trimmed don't have anything to do with it, as this happens for a wide variety of videos from all different sources.
Any insight into why these videos are being exported with black frames in the start would be very very welcome. Thanks!
Some relevant code (sorry for the length):
// AVURLAssetPreferPreciseDurationAndTimingKey added in attempt to solve issue
let videoAsset = AVURLAsset(URL: url, options: [AVURLAssetPreferPreciseDurationAndTimingKey: true])
var mixComposition = AVMutableComposition()
let compositionVideoTrack = mixComposition.addMutableTrackWithMediaType(
AVMediaTypeVideo,
preferredTrackID: Int32(kCMPersistentTrackID_Invalid)
)
let clipVideoTrack = videoAsset.tracksWithMediaType(AVMediaTypeVideo)[0] as! AVAssetTrack
let videoSize = clipVideoTrack.naturalSize
// startTime and duration are NSTimeInterval types
let start = startTime == 0 ? kCMTimeZero : CMTimeMakeWithSeconds(startTime, videoAsset.duration.timescale)
var dur = CMTimeMakeWithSeconds(duration, videoAsset.duration.timescale)
if dur.value >= videoAsset.duration.value {
dur = videoAsset.duration
}
compositionVideoTrack.insertTimeRange(
CMTimeRange(start: start, duration: dur),
ofTrack:clipVideoTrack,
atTime: kCMTimeZero,
error:nil
)
compositionVideoTrack.preferredTransform = videoAsset.tracksWithMediaType(AVMediaTypeVideo)[0].preferredTransform
let compositionAudioTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
let clipAudioTrack = videoAsset.tracksWithMediaType(AVMediaTypeAudio)[0] as! AVAssetTrack
compositionAudioTrack.insertTimeRange(
CMTimeRange(start: start, duration: dur),
ofTrack: clipAudioTrack,
atTime: kCMTimeZero,
error: nil
)
let parentLayer = CALayer()
parentLayer.backgroundColor = UIColor.blackColor().CGColor
let videoLayer = CALayer()
videoLayer.backgroundColor = UIColor.blackColor().CGColor
var parentFrame = CGRect(
x: 0,
y: 0,
width: videoSize.width,
height: videoSize.height
)
if parentFrame.width % 2 > 0 {
parentFrame.size.width = parentFrame.size.width - 1
}
// Fix crop frame height
if parentFrame.size.height % 2 > 0 {
parentFrame.size.height = parentFrame.size.height - 1
}
parentLayer.frame = parentFrame
videoLayer.frame = CGRect(
x: 0,
y: 0,
width: videoSize.width,
height: videoSize.height
)
parentLayer.addSublayer(videoLayer)
let videoComp = AVMutableVideoComposition()
videoComp.renderSize = parentLayer.frame.size
videoComp.frameDuration = CMTimeMake(1, Int32(clipVideoTrack.nominalFrameRate))
videoComp.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, inLayer: parentLayer)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRange(start: kCMTimeZero, duration: mixComposition.duration)
let videoTrack = mixComposition.tracksWithMediaType(AVMediaTypeVideo)[0] as! AVAssetTrack
let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
layerInstruction.setTransform(CGAffineTransformMakeScale(parentLayer.frame.size.width / videoSize.width, parentLayer.frame.size.height / videoSize.height), atTime: kCMTimeZero)
instruction.layerInstructions = [layerInstruction]
videoComp.instructions = [instruction]
// Export
let exportSession = AVAssetExportSession(
asset: mixComposition,
presetName: AVAssetExportPresetHighestQuality
)
exportSession.videoComposition = videoComp
let renderFileName = "video.mp4"
let renderURL = NSURL(fileURLWithPath: NSTemporaryDirectory().stringByAppendingPathComponent(renderFileName))
exportSession.outputURL = renderURL
exportSession.outputFileType = AVFileTypeQuickTimeMovie
exportSession.exportAsynchronouslyWithCompletionHandler { ... }
The solution to this for us was to not try to crop and trim the video in the same operation. I still don't have an answer as to why this was happening, but we were able to resolve it by first trimming the video for time, then after we had the video with the proper duration performing the crop operation on it.
Unfortunately I believe this is just a bug in framework, but at least in our case we were able to solve it by doing less in each operation and just stringing operations together.