I'm using AVMutableComposition and AVAssetExportSession to trim a video down. Randomly, and I mean randomly (I cannot consistently reproduce) users' videos have a few black frames at the start of the trimmed video. The audio is unaffected. I can confirm 100% that the videos being trimmed don't have anything to do with it, as this happens for a wide variety of videos from all different sources.
Any insight into why these videos are being exported with black frames in the start would be very very welcome. Thanks!
Some relevant code (sorry for the length):
// AVURLAssetPreferPreciseDurationAndTimingKey added in attempt to solve issue
let videoAsset = AVURLAsset(URL: url, options: [AVURLAssetPreferPreciseDurationAndTimingKey: true])
var mixComposition = AVMutableComposition()
let compositionVideoTrack = mixComposition.addMutableTrackWithMediaType(
AVMediaTypeVideo,
preferredTrackID: Int32(kCMPersistentTrackID_Invalid)
)
let clipVideoTrack = videoAsset.tracksWithMediaType(AVMediaTypeVideo)[0] as! AVAssetTrack
let videoSize = clipVideoTrack.naturalSize
// startTime and duration are NSTimeInterval types
let start = startTime == 0 ? kCMTimeZero : CMTimeMakeWithSeconds(startTime, videoAsset.duration.timescale)
var dur = CMTimeMakeWithSeconds(duration, videoAsset.duration.timescale)
if dur.value >= videoAsset.duration.value {
dur = videoAsset.duration
}
compositionVideoTrack.insertTimeRange(
CMTimeRange(start: start, duration: dur),
ofTrack:clipVideoTrack,
atTime: kCMTimeZero,
error:nil
)
compositionVideoTrack.preferredTransform = videoAsset.tracksWithMediaType(AVMediaTypeVideo)[0].preferredTransform
let compositionAudioTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
let clipAudioTrack = videoAsset.tracksWithMediaType(AVMediaTypeAudio)[0] as! AVAssetTrack
compositionAudioTrack.insertTimeRange(
CMTimeRange(start: start, duration: dur),
ofTrack: clipAudioTrack,
atTime: kCMTimeZero,
error: nil
)
let parentLayer = CALayer()
parentLayer.backgroundColor = UIColor.blackColor().CGColor
let videoLayer = CALayer()
videoLayer.backgroundColor = UIColor.blackColor().CGColor
var parentFrame = CGRect(
x: 0,
y: 0,
width: videoSize.width,
height: videoSize.height
)
if parentFrame.width % 2 > 0 {
parentFrame.size.width = parentFrame.size.width - 1
}
// Fix crop frame height
if parentFrame.size.height % 2 > 0 {
parentFrame.size.height = parentFrame.size.height - 1
}
parentLayer.frame = parentFrame
videoLayer.frame = CGRect(
x: 0,
y: 0,
width: videoSize.width,
height: videoSize.height
)
parentLayer.addSublayer(videoLayer)
let videoComp = AVMutableVideoComposition()
videoComp.renderSize = parentLayer.frame.size
videoComp.frameDuration = CMTimeMake(1, Int32(clipVideoTrack.nominalFrameRate))
videoComp.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, inLayer: parentLayer)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRange(start: kCMTimeZero, duration: mixComposition.duration)
let videoTrack = mixComposition.tracksWithMediaType(AVMediaTypeVideo)[0] as! AVAssetTrack
let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
layerInstruction.setTransform(CGAffineTransformMakeScale(parentLayer.frame.size.width / videoSize.width, parentLayer.frame.size.height / videoSize.height), atTime: kCMTimeZero)
instruction.layerInstructions = [layerInstruction]
videoComp.instructions = [instruction]
// Export
let exportSession = AVAssetExportSession(
asset: mixComposition,
presetName: AVAssetExportPresetHighestQuality
)
exportSession.videoComposition = videoComp
let renderFileName = "video.mp4"
let renderURL = NSURL(fileURLWithPath: NSTemporaryDirectory().stringByAppendingPathComponent(renderFileName))
exportSession.outputURL = renderURL
exportSession.outputFileType = AVFileTypeQuickTimeMovie
exportSession.exportAsynchronouslyWithCompletionHandler { ... }
The solution to this for us was to not try to crop and trim the video in the same operation. I still don't have an answer as to why this was happening, but we were able to resolve it by first trimming the video for time, then after we had the video with the proper duration performing the crop operation on it.
Unfortunately I believe this is just a bug in framework, but at least in our case we were able to solve it by doing less in each operation and just stringing operations together.
Related
I am recording 3 videos and merging them into one. I need text on all the videos. am doing it but no success. the video goes black and the audio comes fine. When I do it without CALayer everything is good Need Help.
i there something wring with the code or I am doing it the wrong way please guide me.
private func doMerge(arrayVideos:[AVAsset], animation:Bool, completion:#escaping Completion) -> Void {
var insertTime = CMTime.zero
var arrayLayerInstructions:[AVMutableVideoCompositionLayerInstruction] = []
var outputSize = CGSize.init(width: 0, height: 0)
// Determine video output size
for videoAsset in arrayVideos {
let videoTrack = videoAsset.tracks(withMediaType: AVMediaType.video)[0]
let assetInfo = orientationFromTransform(transform: videoTrack.preferredTransform)
var videoSize = videoTrack.naturalSize
if assetInfo.isPortrait == true {
videoSize.width = videoTrack.naturalSize.height
videoSize.height = videoTrack.naturalSize.width
}
if videoSize.height > outputSize.height {
outputSize = videoSize
}
}
if outputSize.width == 0 || outputSize.height == 0 {
outputSize = defaultSize
}
// Silence sound (in case of video has no sound track)
// let silenceURL = Bundle.main.url(forResource: "silence", withExtension: "mp3")
// let silenceAsset = AVAsset(url:silenceURL!)
// let silenceSoundTrack = silenceAsset.tracks(withMediaType: AVMediaType.audio).first
// Init composition
let mixComposition = AVMutableComposition.init()
for videoAsset in arrayVideos {
// Get video track
guard let videoTrack = videoAsset.tracks(withMediaType: AVMediaType.video).first else { continue }
// Get audio track
var audioTrack:AVAssetTrack?
if videoAsset.tracks(withMediaType: AVMediaType.audio).count > 0 {
audioTrack = videoAsset.tracks(withMediaType: AVMediaType.audio).first
}
else {
// audioTrack = silenceSoundTrack
}
// Init video & audio composition track
let videoCompositionTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.video,
preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
let audioCompositionTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio,
preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
do {
let startTime = CMTime.zero
let duration = videoAsset.duration
// Add video track to video composition at specific time
try videoCompositionTrack?.insertTimeRange(CMTimeRangeMake(start: startTime, duration: duration),
of: videoTrack,
at: insertTime)
// Add audio track to audio composition at specific time
if let audioTrack = audioTrack {
try audioCompositionTrack?.insertTimeRange(CMTimeRangeMake(start: startTime, duration: duration),
of: audioTrack,
at: insertTime)
}
// Add instruction for video track
let layerInstruction = videoCompositionInstructionForTrack(track: videoCompositionTrack!,
asset: videoAsset,
standardSize: outputSize,
atTime: insertTime)
// Hide video track before changing to new track
let endTime = CMTimeAdd(insertTime, duration)
if animation {
let timeScale = videoAsset.duration.timescale
let durationAnimation = CMTime.init(seconds: 1, preferredTimescale: timeScale)
layerInstruction.setOpacityRamp(fromStartOpacity: 1.0, toEndOpacity: 0.0, timeRange: CMTimeRange.init(start: endTime, duration: durationAnimation))
}
else {
layerInstruction.setOpacity(0, at: endTime)
}
arrayLayerInstructions.append(layerInstruction)
// Increase the insert time
insertTime = CMTimeAdd(insertTime, duration)
}
catch {
print("Load track error")
}
// Watermark Effect
let size = videoTrack.naturalSize
// create text Layer
let titleLayer = CATextLayer()
titleLayer.backgroundColor = UIColor.clear.cgColor
titleLayer.contentsScale = UIScreen.main.scale
titleLayer.string = "Dummy text"
titleLayer.foregroundColor = UIColor.white.cgColor
titleLayer.font = UIFont(name: "Helvetica", size: 28)
titleLayer.shadowOpacity = 0.5
titleLayer.alignmentMode = CATextLayerAlignmentMode.center
titleLayer.frame = CGRect(x: 0, y: 50, width: size.width, height: size.height)
let videolayer = CALayer()
videolayer.backgroundColor = UIColor.clear.cgColor
// videolayer.frame = CGRect(x: 0, y: 0, width: size.width, height: size.height)
// let layercomposition = AVMutableVideoComposition()
// layercomposition.frameDuration = CMTimeMake(value: 1, timescale: 30)
// layercomposition.renderSize = size
mainComposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videolayer, in: titleLayer)
}
// Main video composition instruction
let mainInstruction = AVMutableVideoCompositionInstruction()
mainInstruction.timeRange = CMTimeRangeMake(start: CMTime.zero, duration: insertTime)
mainInstruction.layerInstructions = arrayLayerInstructions
// Main video composition
// mainComposition = AVMutableVideoComposition()
mainComposition.instructions = [mainInstruction]
mainComposition.frameDuration = CMTimeMake(value: 1, timescale: 30)
mainComposition.renderSize = outputSize
// Export to file
let path = NSTemporaryDirectory().appending("mergedVideo.mp4")
let exportURL = URL.init(fileURLWithPath: path)
// Remove file if existed
FileManager.default.removeItemIfExisted(exportURL)
// Init exporter
let exporter = AVAssetExportSession.init(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)
exporter?.outputURL = exportURL
exporter?.outputFileType = AVFileType.mp4
exporter?.shouldOptimizeForNetworkUse = true
exporter?.videoComposition = mainComposition
// Do export
exporter?.exportAsynchronously(completionHandler: {
DispatchQueue.main.async {
self.exportDidFinish(exporter: exporter, videoURL: exportURL, completion: completion)
}
})
}
Just Change This Part
let assetInfo = orientationFromTransform(transform: videoTrack.preferredTransform)
var videoSize = videoTrack.naturalSize
if assetInfo.isPortrait == true {
videoSize.width = videoTrack.naturalSize.height
videoSize.height = videoTrack.naturalSize.width
}
// let size = videoTrack.naturalSize
// create text Layer
let titleLayer = CATextLayer()
titleLayer.backgroundColor = UIColor.clear.cgColor
titleLayer.contentsScale = UIScreen.main.scale
titleLayer.string = questions[counter]
counter = counter + 1
titleLayer.foregroundColor = UIColor.black.cgColor
titleLayer.font = UIFont(name: "Helvetica", size: 28)
titleLayer.shadowOpacity = 0.5
titleLayer.alignmentMode = CATextLayerAlignmentMode.center
titleLayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
let videolayer = CALayer()
videolayer.backgroundColor = UIColor.clear.cgColor
videolayer.backgroundColor = UIColor.red.cgColor
videolayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
let parentlayer = CALayer()
parentlayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
parentlayer.addSublayer(videolayer)
parentlayer.addSublayer(titleLayer)
// let layercomposition = AVMutableVideoComposition()
// layercomposition.frameDuration = CMTimeMake(value: 1, timescale: 30)
// layercomposition.renderSize = size
mainComposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videolayer, in: parentlayer)
I'm developing an app with a custom video recorder and I should put a watermark over it. It all goes well on the recording part, but when I try to put the watermark, the video is exported with a different orientation an sometimes it's badly cropped. The result is correct only with the device on landscape right.
Here's the code of the function I use to put the watermark on the video:
func addWatermarkToVideo(_ videoURL: URL, completion:#escaping (URL) -> Void) {
let videoAsset = AVURLAsset.init(url: videoURL)
let mixComposition = AVMutableComposition.init()
let compositionVideoTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
do {
let clipVideoTrack = videoAsset.tracks(withMediaType: AVMediaTypeVideo).first
try compositionVideoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: clipVideoTrack!, at: kCMTimeZero)
compositionVideoTrack.preferredTransform = (videoAsset.tracks(withMediaType: AVMediaTypeVideo).first?.preferredTransform)!
let layer = CALayer.init()
layer.contents = UIImage.init(named: "VideoWatermark")?.cgImage
layer.frame = CGRect.init(x: 10, y: 10, width: 300, height: 100)
let videoTrack = videoAsset.tracks(withMediaType: AVMediaTypeVideo).first
let videoSize = videoTrack!.naturalSize
let parentLayer = CALayer.init()
let videoLayer = CALayer.init()
parentLayer.frame = CGRect.init(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
videoLayer.frame = CGRect.init(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
parentLayer.addSublayer(videoLayer)
parentLayer.addSublayer(layer)
let videoComp = AVMutableVideoComposition.init()
videoComp.renderSize = videoSize
videoComp.frameDuration = CMTimeMake(1, 30)
videoComp.animationTool = AVVideoCompositionCoreAnimationTool.init(postProcessingAsVideoLayer: videoLayer, in: parentLayer)
let instruction = AVMutableVideoCompositionInstruction.init()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, mixComposition.duration)
let mixVideoTrack = mixComposition.tracks(withMediaType: AVMediaTypeVideo).first
let layerInstruction = AVMutableVideoCompositionLayerInstruction.init(assetTrack: mixVideoTrack!)
layerInstruction.setTransform((clipVideoTrack?.preferredTransform)!, at: kCMTimeZero)
instruction.layerInstructions = [layerInstruction]
videoComp.instructions = [instruction]
let assetExport = AVAssetExportSession.init(asset: mixComposition, presetName: AVAssetExportPreset1280x720)
assetExport?.videoComposition = videoComp
let videoName = "\(Date.init().timeIntervalSince1970).mp4"
let exportURL = URL.init(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent(videoName)
assetExport?.outputFileType = AVFileTypeMPEG4
assetExport?.outputURL = exportURL
assetExport?.shouldOptimizeForNetworkUse = true
assetExport?.exportAsynchronously(completionHandler: {
NSLog("Export status: \(String(describing: assetExport?.status.rawValue))")
completion(exportURL)
})
} catch let error as NSError {
print("\(error), \(error.localizedDescription)")
}
}
I tried googling my issue but no result was finally helpful to me, I have no idea what I'm doing wrong here.
Any help will be deeply appreciated, thanks in advance to anyone who will help me.
This function is exporting the merged composition to a landscape orientation when the source video is in portrait. I save the original video in portrait orientation to my documents directory and then save it to camera roll and works fine. I then pass the saved video's url to this function and it somehow rotates it to landscape when it shouldn't. How do I fix this?
func makeVideoOverlay (url : URL) {
print("documents directory url: \(url)")
let composition = AVMutableComposition()
let vidAsset = AVURLAsset(url: url as URL, options: nil)
// get video track
let vtrack = vidAsset.tracks(withMediaType: AVMediaTypeVideo)
let videoTrack:AVAssetTrack = vtrack[0]
let vid_duration = videoTrack.timeRange.duration
let vid_timerange = CMTimeRangeMake(kCMTimeZero, vidAsset.duration)
//var error: NSError?
let compositionvideoTrack:AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
do {
try compositionvideoTrack.insertTimeRange(vid_timerange, of: videoTrack, at: kCMTimeZero)
} catch {
// handle error
print("comp video track error: \(error.localizedDescription)")
}
compositionvideoTrack.preferredTransform = videoTrack.preferredTransform
let size = videoTrack.naturalSize
//this prints out to 1920x1080 landscape dimension. i don't know how
print("asset size: \(size)")
// Watermark Effect
let imglogo = UIImage(named: "logo-image")
let imglayer = CALayer()
imglayer.contents = imglogo?.cgImage
imglayer.frame = CGRect.init(x: 5, y: size.height-160, width: 150, height: 150)
imglayer.opacity = 1.0
let videolayer = CALayer()
videolayer.frame = CGRect.init(x: 0, y: 0, width: size.width, height: size.height)
let parentlayer = CALayer()
parentlayer.frame = CGRect.init(x: 0, y: 0, width: size.width, height: size.height)
parentlayer.addSublayer(videolayer)
parentlayer.addSublayer(imglayer)
let layercomposition = AVMutableVideoComposition()
layercomposition.frameDuration = CMTimeMake(1, 30)
layercomposition.renderSize = size
layercomposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videolayer, in: parentlayer)
// instruction for watermark
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, composition.duration)
let videotrack = composition.tracks(withMediaType: AVMediaTypeVideo)[0] as AVAssetTrack
let layerinstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videotrack)
instruction.layerInstructions = NSArray(object: layerinstruction) as [AnyObject] as [AnyObject] as! [AVVideoCompositionLayerInstruction]
layercomposition.instructions = NSArray(object: instruction) as [AnyObject] as [AnyObject] as! [AVVideoCompositionInstructionProtocol]
// create new file to receive data
let dirPaths = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)
let docsDir: String = dirPaths[0] as String
let movieFilePath = docsDir.appending("/result.mov") as String
movieDestinationUrl = URL(fileURLWithPath: movieFilePath)
print("overlay destination url: \(movieDestinationUrl)")
// use AVAssetExportSession to export video
let assetExport = AVAssetExportSession(asset: composition, presetName:AVAssetExportPresetHighestQuality)
assetExport?.outputFileType = AVFileTypeQuickTimeMovie
assetExport?.outputURL = movieDestinationUrl as URL
assetExport?.videoComposition = layercomposition
assetExport?.exportAsynchronously(completionHandler: {
if assetExport?.status == AVAssetExportSessionStatus.failed
{
print("failed: \(assetExport?.error)")
}
else if assetExport?.status == AVAssetExportSessionStatus.cancelled
{
print("cancelled: \(assetExport?.error)")
}
else
{
print("Movie complete")
OperationQueue.main.addOperation({ () -> Void in
//saves in landscape
self.saveAsset(url: self.movieDestinationUrl)
})
}
})
}
AVMutableVideoCompositionLayerInstruction has a method setTransform(_:at:)
As documentation say
Sets a fixed transform to apply from the specified time until the next
time at which a transform is set. [...]. Before the first specified time for which a
transform is set, the affine transform is held constant at the value
of identity ; after the last time for which a transform is set, the
affine transform is held constant at that last value.
You should set videoTrack's preferredTransform to layerInstruction instead.
EDIT
You need to create layerinstruction with the new created composition track instead.
let layerinstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: compositionvideoTrack) // NOT videoTrack.
layerinstruction.setTransform(videoTrack.preferredTransform, at: kCMTimeZero)
I'm encountering wrong orientation of video exported using AVAssetExportSession only in front Camera. I followed this tutorial https://stackoverflow.com/a/35368649/3764365 but I got this scenario. I think it's not wrong orientation the image is cut at half. I tried changing the video layer, render layer but got no luck. My code looks like this.
let composition = AVMutableComposition()
let vidAsset = AVURLAsset(url: path)
// get video track
let vtrack = vidAsset.tracks(withMediaType: AVMediaTypeVideo)
// get audi trac
let videoTrack:AVAssetTrack = vtrack[0]
_ = videoTrack.timeRange.duration
let vid_timerange = CMTimeRangeMake(kCMTimeZero, vidAsset.duration)
var _: NSError?
let compositionvideoTrack:AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
do {
try compositionvideoTrack.insertTimeRange(vid_timerange, of: videoTrack, at: kCMTimeZero)
} catch let error {
print(error.localizedDescription)
}
let compositionVideoTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
let audioTrack = vidAsset.tracks(withMediaType: AVMediaTypeAudio)[0]
do {
try compositionVideoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, vidAsset.duration), of: audioTrack, at: kCMTimeZero)
} catch {
print("error")
}
let size = videoTrack.naturalSize
let parentlayer = CALayer()
parentlayer.frame = CGRect(x: 0, y: 0, width: size.height, height: size.width)
let videolayer = CALayer()
videolayer.frame = CGRect(x: 0, y: 0, width: size.height, height: size.width)
parentlayer.addSublayer(videolayer)
let layercomposition = AVMutableVideoComposition()
layercomposition.frameDuration = CMTimeMake(1, 30)
layercomposition.renderSize = CGSize(width: size.height, height: size.width)
layercomposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videolayer, in: parentlayer)
// instruction for watermark
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, composition.duration)
let videotrack = composition.tracks(withMediaType: AVMediaTypeVideo)[0] as AVAssetTrack
let layerinstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videotrack)
instruction.layerInstructions = [layerinstruction]
layercomposition.instructions = [instruction]
layerinstruction.setTransform(videoTrack.preferredTransform, at: kCMTimeZero)
// create new file to receive data
let movieDestinationUrl = UIImage.outPut()
// use AVAssetExportSession to export video
let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPreset1280x720)!
assetExport.videoComposition = layercomposition
assetExport.outputFileType = AVFileTypeQuickTimeMovie
assetExport.outputURL = movieDestinationUrl
Setting movieFileOutputConnection?.isVideoMirrored from true to false fixed the issue for me. Its a weird bug in my opinion.
if self.currentCamera == .front {
movieFileOutputConnection?.isVideoMirrored = false
}
I will share my code on how I solved this issue.
func addImagesToVideo(path: URL, labelImageViews: [LabelImageView]) {
SVProgressHUD.show()
let composition = AVMutableComposition()
let vidAsset = AVURLAsset(url: path)
// get video track
let vtrack = vidAsset.tracks(withMediaType: AVMediaTypeVideo)
// get audi trac
let videoTrack:AVAssetTrack = vtrack[0]
_ = videoTrack.timeRange.duration
let vid_timerange = CMTimeRangeMake(kCMTimeZero, vidAsset.duration)
var _: NSError?
let compositionvideoTrack:AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
do {
try compositionvideoTrack.insertTimeRange(vid_timerange, of: videoTrack, at: kCMTimeZero)
} catch let error {
print(error.localizedDescription)
}
let compositionVideoTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
let audioTrack = vidAsset.tracks(withMediaType: AVMediaTypeAudio)[0]
do {
try compositionVideoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, vidAsset.duration), of: audioTrack, at: kCMTimeZero)
} catch {
print("error")
}
let size = videoTrack.naturalSize
let parentlayer = CALayer()
parentlayer.frame = CGRect(x: 0, y: 0, width: size.height, height: size.width)
let videolayer = CALayer()
videolayer.frame = CGRect(x: 0, y: 0, width: size.height, height: size.width)
parentlayer.addSublayer(videolayer)
if labelImageViews.count != 0 {
let blankImage = self.clearImage(size: videolayer.frame.size)
let image = self.saveImage(imageOne: blankImage, labelImageViews: labelImageViews)
let imglayer = CALayer()
imglayer.contents = image.cgImage
imglayer.frame = CGRect(origin: CGPoint.zero, size: videolayer.frame.size)
imglayer.opacity = 1
parentlayer.addSublayer(imglayer)
}
let layercomposition = AVMutableVideoComposition()
layercomposition.frameDuration = CMTimeMake(1, 30)
layercomposition.renderSize = CGSize(width: size.height, height: size.width)
layercomposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videolayer, in: parentlayer)
// instruction for watermark
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, composition.duration)
let videotrack = composition.tracks(withMediaType: AVMediaTypeVideo)[0] as AVAssetTrack
let layerinstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videotrack)
instruction.layerInstructions = [layerinstruction]
layercomposition.instructions = [instruction]
var isVideoAssetPortrait = false
let videoTransform = videoTrack.preferredTransform
if(videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) {
isVideoAssetPortrait = true
}
if(videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {
isVideoAssetPortrait = true
}
if isVideoAssetPortrait {
let FirstAssetScaleFactor = CGAffineTransform(scaleX: 1, y: 1)
layerinstruction.setTransform(videoTrack.preferredTransform.concatenating(FirstAssetScaleFactor), at: kCMTimeZero)
} else {
let FirstAssetScaleFactor = CGAffineTransform(scaleX: 1, y: 1)
layerinstruction.setTransform(videoTrack.preferredTransform.concatenating(FirstAssetScaleFactor).concatenating(CGAffineTransform(translationX: 0, y: 560)), at: kCMTimeZero)
}
// create new file to receive data
let movieDestinationUrl = UIImage.outPut()
// use AVAssetExportSession to export video
let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPreset1280x720)!
assetExport.videoComposition = layercomposition
assetExport.outputFileType = AVFileTypeQuickTimeMovie
assetExport.outputURL = movieDestinationUrl
assetExport.exportAsynchronously(completionHandler: {
switch assetExport.status{
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport.error!)")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport.error!)")
default:
print("Movie complete")
// play video
OperationQueue.main.addOperation({ () -> Void in
let output = UIImage.outPut()
UIImage.compress(inputURL: movieDestinationUrl as NSURL, outputURL: output as NSURL) {
UISaveVideoAtPathToSavedPhotosAlbum(output.relativePath, nil, nil, nil)
print("Done Converting")
DispatchQueue.main.async {
SVProgressHUD.dismiss()
}
}
})
}
})
}
I try in vain to watermark an existing video with the AVFoundation library. I followed the instructions and tried to rewrite the code in Swift from the third answer in this question: iPhone Watermark on recorded Video. – but this doesn't work for me.
Every time when run my code I only see a black video rendered with the length of my source video which needs to be watermarked. My goal is to fade in the watermark after 5 seconds.
Here is my code:
let composition = AVMutableComposition()
let vidAsset = AVURLAsset(URL: NSURL(fileURLWithPath: moviePath), options: nil)
// GET THE VIDEO TRACK
let vtrack = vidAsset.tracksWithMediaType(AVMediaTypeVideo)
let videoTrack:AVAssetTrack = vtrack[0]
let vid_timerange = CMTimeRangeMake(kCMTimeZero, vidAsset.duration)
do {
let compositionvideoTrack:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
try compositionvideoTrack.insertTimeRange(vid_timerange, ofTrack: videoTrack, atTime: kCMTimeZero)
compositionvideoTrack.preferredTransform = videoTrack.preferredTransform
} catch {
print(error)
}
let animationImage: UIImage = self.artworkImage
let artWorkOverlayLayer: CALayer = CALayer()
artWorkOverlayLayer.contents = (animationImage.CGImage as! AnyObject)
artWorkOverlayLayer.frame = CGRectMake(0, 0, 512, 512)
artWorkOverlayLayer.opacity = 0
artWorkOverlayLayer.masksToBounds = true
let animation: CABasicAnimation = CABasicAnimation(keyPath: "opacity")
animation.duration = 10
animation.repeatCount = 0
animation.autoreverses = false
animation.fromValue = Int(0.0)
animation.toValue = Int(1.0)
animation.beginTime = 5.0
artWorkOverlayLayer.addAnimation(animation, forKey: "animateOpacity")
let videolayer = CALayer()
videolayer.frame = CGRectMake(0, 0, videoTrack.naturalSize.width, videoTrack.naturalSize.height)
let parentlayer = CALayer()
parentlayer.frame = CGRectMake(0, 0, videoTrack.naturalSize.width, videoTrack.naturalSize.height)
parentlayer.addSublayer(artWorkOverlayLayer)
let layercomposition = AVMutableVideoComposition()
layercomposition.frameDuration = CMTimeMake(1, 30)
layercomposition.renderSize = videoTrack.naturalSize
layercomposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videolayer, inLayer: parentlayer)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, composition.duration)
let videotrack = composition.tracksWithMediaType(AVMediaTypeVideo)[0] as AVAssetTrack
let layerinstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videotrack)
instruction.layerInstructions = NSArray(object: layerinstruction) as! [AVVideoCompositionLayerInstruction]
layercomposition.instructions = NSArray(object: instruction) as! [AVVideoCompositionInstructionProtocol]
// EXPORT
let filePath: NSURL = NSURL.fileURLWithPath(NSTemporaryDirectory().stringByAppendingString("output-tmp.mp4"))
let assetExportSession: AVAssetExportSession! = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality)
assetExportSession.outputFileType = AVFileTypeMPEG4
assetExportSession.outputURL = filePath
assetExportSession.videoComposition = layercomposition
assetExportSession.exportAsynchronouslyWithCompletionHandler({() -> Void in
print(filePath)
})