i'm trying to do some animations to a video (after recording) and then export it.
but before any animation i had the problem with the orientation which became landscape (without export it was portrait) and it is solved not but now i have problem with making it full screen.in iphone 6,7 plus its full screen how ever in ipad it is not.
here is my method :
func export(_ url : URL) {
let composition = AVMutableComposition()
let asset = AVURLAsset(url: url, options: nil)
let track = asset.tracks(withMediaType : AVMediaTypeVideo)
let videoTrack:AVAssetTrack = track[0] as AVAssetTrack
let timerange = CMTimeRangeMake(kCMTimeZero, asset.duration)
let compositionVideoTrack:AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
do {
try compositionVideoTrack.insertTimeRange(timerange, of: videoTrack, at: kCMTimeZero)
} catch {
print(error)
}
let compositionAudioTrack:AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
for audioTrack in asset.tracks(withMediaType: AVMediaTypeAudio) {
do {
try compositionAudioTrack.insertTimeRange(audioTrack.timeRange, of: audioTrack, at: kCMTimeZero)
} catch {
print(error)
}
}
let size = self.view.bounds.size
let videolayer = CALayer()
videolayer.frame = CGRect(x: 0, y: 0, width: size.width, height: size.height)
let parentlayer = CALayer()
parentlayer.frame = CGRect(x: 0, y: 0, width: size.width, height: size.height)
parentlayer.addSublayer(videolayer)
let layercomposition = AVMutableVideoComposition()
layercomposition.frameDuration = CMTimeMake(1, 30)
layercomposition.renderSize = self.view.bounds.size
layercomposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videolayer, in: parentlayer)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration)
let videotrack = composition.tracks(withMediaType: AVMediaTypeVideo)[0] as AVAssetTrack
let layerinstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videotrack)
let ratio = size.height / videoTrack.naturalSize.width
composition.naturalSize = videoTrack.naturalSize
layerinstruction.setTransform(videoTrack.preferredTransform.scaledBy(x: 0.645 , y: ratio).translatedBy(x: self.view.bounds.height, y: 0), at: kCMTimeZero)
instruction.layerInstructions = [layerinstruction]
layercomposition.instructions = [instruction]
let filePath = self.fileName()
let movieUrl = URL(fileURLWithPath: filePath)
guard let assetExport = AVAssetExportSession(asset: composition, presetName:AVAssetExportPresetHighestQuality) else {return}
assetExport.videoComposition = layercomposition
assetExport.outputFileType = AVFileTypeMPEG4
assetExport.outputURL = movieUrl
assetExport.exportAsynchronously(completionHandler: {
switch assetExport.status {
case .completed:
print("success")
let player = AVPlayer(url: movieUrl)
let playerViewController = AVPlayerViewController()
playerViewController.player = player
self.present(playerViewController, animated: true) {
playerViewController.player!.play()
}
break
case .cancelled:
print("cancelled")
break
case .exporting:
print("exporting")
break
case .failed:
print("failed: \(String(describing: assetExport.error))")
break
case .unknown:
print("unknown")
break
case .waiting:
print("waiting")
break
}
})
}
what fixed my problem was changing the scale factor and the translated amount. here is the code below :
let size = self.view.bounds.size
let trackTransform = videoTrack.preferredTransform
let xScale = size.height / videoTrack.naturalSize.width
let yScale = size.width / videoTrack.naturalSize.height
let exportTransform = videoTrack.preferredTransform.translatedBy(x: trackTransform.ty * -1 , y: 0).scaledBy(x: xScale , y: yScale)
Related
First time poster, looooooong time peruser. I'm using SwiftUI for the layout and UIRepresentables for the camera work. (Xcode 11.7), and trying to overlay an image onto a CALayer (for eventual export to video). The image was converted from a UITextView so the user is free to edit, pinch/zoom, and drag the text to their heart's content. After scouring SO for days, and reading Ray Wenderlich tutorials I've hit a wall. Screenshots below.
Before: freeform text 'coffee' added to the view
After: exported movie still, 'coffee' text position is incorrect
Below is the export function. I suspect I'm doing something wrong with relativePosition.
Thank you for any suggestions, this is my foray into writing an iOS app.
static func exportLayersToVideo(_ fileUrl:String, _ textView:UITextView){
let fileURL = NSURL(fileURLWithPath: fileUrl)
let composition = AVMutableComposition()
let vidAsset = AVURLAsset(url: fileURL as URL, options: nil)
// get video track
let vtrack = vidAsset.tracks(withMediaType: AVMediaType.video)
let videoTrack: AVAssetTrack = vtrack[0]
let vid_timerange = CMTimeRangeMake(start: CMTime.zero, duration: vidAsset.duration)
let tr: CMTimeRange = CMTimeRange(start: CMTime.zero, duration: CMTime(seconds: 10.0, preferredTimescale: 600))
composition.insertEmptyTimeRange(tr)
let trackID:CMPersistentTrackID = CMPersistentTrackID(kCMPersistentTrackID_Invalid)
if let compositionvideoTrack: AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: trackID) {
do {
try compositionvideoTrack.insertTimeRange(vid_timerange, of: videoTrack, at: CMTime.zero)
} catch {
print("error")
}
compositionvideoTrack.preferredTransform = videoTrack.preferredTransform
} else {
print("unable to add video track")
return
}
let size = videoTrack.naturalSize
let parentlayer = CALayer()
parentlayer.frame = CGRect(x: 0, y: 0, width: size.width, height: size.height)
let videolayer = CALayer()
videolayer.frame = CGRect(x: 0, y: 0, width: size.width, height: size.height)
// Convert UITextView to Image
let renderer = UIGraphicsImageRenderer(size: textView.bounds.size)
let image = renderer.image { ctx in
textView.drawHierarchy(in: textView.bounds, afterScreenUpdates: true)
}
let imglayer = CALayer()
let scaledAspect: CGFloat = image.size.width / image.size.height
let scaledWidth = size.width
let scaledHeight = scaledWidth / scaledAspect
let relativePosition = parentlayer.convert(textView.frame.origin, from: textView.layer)
imglayer.frame = CGRect(x: relativePosition.x, y: relativePosition.y, width: scaledWidth,height: scaledHeight)
imglayer.contents = image.cgImage
// Adding videolayer and imglayer
parentlayer.addSublayer(videolayer)
parentlayer.addSublayer(imglayer)
let layercomposition = AVMutableVideoComposition()
layercomposition.frameDuration = CMTimeMake(value: 1, timescale: 30)
layercomposition.renderSize = size
layercomposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videolayer, in: parentlayer)
// instruction for overlay
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(start: CMTime.zero, duration: composition.duration)
let videotrack = composition.tracks(withMediaType: AVMediaType.video)[0] as AVAssetTrack
let layerinstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videotrack)
instruction.layerInstructions = NSArray(object: layerinstruction) as [AnyObject] as! [AVVideoCompositionLayerInstruction]
layercomposition.instructions = NSArray(object: instruction) as [AnyObject] as! [AVVideoCompositionInstructionProtocol]
// create new file to receive data
let dirPaths = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)
let docsDir = dirPaths[0] as NSString
let movieFilePath = docsDir.appendingPathComponent("result.mov")
let movieDestinationUrl = NSURL(fileURLWithPath: movieFilePath)
// use AVAssetExportSession to export video
let assetExport = AVAssetExportSession(asset: composition, presetName:AVAssetExportPresetHighestQuality)
assetExport?.outputFileType = AVFileType.mov
assetExport?.videoComposition = layercomposition
// Check exist and remove old files
do { // delete old video
try FileManager.default.removeItem(at: movieDestinationUrl as URL)
} catch { print("Error Removing Existing File: \(error.localizedDescription).") }
do { // delete old video
try FileManager.default.removeItem(at: fileURL as URL)
} catch { print("Error Removing Existing File: \(error.localizedDescription).") }
assetExport?.outputURL = movieDestinationUrl as URL
assetExport?.exportAsynchronously(completionHandler: {
switch assetExport!.status {
case AVAssetExportSession.Status.failed:
print("failed")
print(assetExport?.error ?? "unknown error")
case AVAssetExportSession.Status.cancelled:
print("cancelled")
print(assetExport?.error ?? "unknown error")
default:
print("Movie complete")
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: movieDestinationUrl as URL)
}) { saved, error in
if saved {
print("Saved")
}
}
}
})
}
}
It looks like the x position is correct, but the y is off. I think this is because the origin is at the bottom-left instead of the top-left. Try this:
var relativePosition = parentlayer.convert(textView.frame.origin, from: textView.layer)
relativePosition.y = size.height - relativePosition.y
imglayer.frame = CGRect(x: relativePosition.x, y: relativePosition.y, width: scaledWidth,height: scaledHeight)
I'm learning AVFoundation and I'm having a problem trying to save a video with an overlay image in Swift 3. Using AVMutableComposition I'm able to add the image to the video however the video is zoomed in and not constraining itself to the portrait size the video was taken in. I've tried:
Setting the natural size through the AVAssetTrack.
Constraining the video to portrait size in the AVMutableVideoComposition renderFrame.
Locking the new video bounds to the recorded video width and height.
The code below works apart from the issue I'm needing help on. The image I'm trying to add covers the entire portrait view and has a border all around the edges. The app also only allows for portrait.
func processVideoWithWatermark(video: AVURLAsset, watermark: UIImage, completion: #escaping (Bool) -> Void) {
let composition = AVMutableComposition()
let asset = AVURLAsset(url: video.url, options: nil)
let track = asset.tracks(withMediaType: AVMediaTypeVideo)
let videoTrack:AVAssetTrack = track[0] as AVAssetTrack
let timerange = CMTimeRangeMake(kCMTimeZero, asset.duration)
let compositionVideoTrack:AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
do {
try compositionVideoTrack.insertTimeRange(timerange, of: videoTrack, at: kCMTimeZero)
compositionVideoTrack.preferredTransform = videoTrack.preferredTransform
} catch {
print(error)
}
// let compositionAudioTrack:AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
//
// for audioTrack in asset.tracks(withMediaType: AVMediaTypeAudio) {
// do {
// try compositionAudioTrack.insertTimeRange(audioTrack.timeRange, of: audioTrack, at: kCMTimeZero)
// } catch {
// print(error)
// }
//
// }
//
let size = videoTrack.naturalSize
let watermark = watermark.cgImage
let watermarklayer = CALayer()
watermarklayer.contents = watermark
watermarklayer.frame = CGRect(x: 0, y: 0, width: screenWidth, height: screenHeight)
watermarklayer.opacity = 1
let videolayer = CALayer()
videolayer.frame = CGRect(x: 0, y: 0, width: screenWidth, height: screenHeight)
let parentlayer = CALayer()
parentlayer.frame = CGRect(x: 0, y: 0, width: size.width, height: size.height)
parentlayer.addSublayer(videolayer)
parentlayer.addSublayer(watermarklayer)
let layercomposition = AVMutableVideoComposition()
layercomposition.frameDuration = CMTimeMake(1, 30)
layercomposition.renderSize = CGSize(width: screenWidth, height: screenHeight)
layercomposition.renderScale = 1.0
layercomposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videolayer, in: parentlayer)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, composition.duration)
let videotrack = composition.tracks(withMediaType: AVMediaTypeVideo)[0] as AVAssetTrack
let layerinstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videotrack)
layerinstruction.setTransform(videoTrack.preferredTransform, at: kCMTimeZero)
instruction.layerInstructions = [layerinstruction]
layercomposition.instructions = [instruction]
let filePath = NSTemporaryDirectory() + self.fileName()
let movieUrl = URL(fileURLWithPath: filePath)
guard let assetExport = AVAssetExportSession(asset: composition, presetName:AVAssetExportPresetHighestQuality) else {return}
assetExport.videoComposition = layercomposition
assetExport.outputFileType = AVFileTypeMPEG4
assetExport.outputURL = movieUrl
assetExport.exportAsynchronously(completionHandler: {
switch assetExport.status {
case .completed:
print("success")
print(video.url)
self.saveVideoToUserLibrary(fileURL: movieUrl, completion: { (success, error) in
if success {
completion(true)
} else {
completion(false)
}
})
break
case .cancelled:
print("cancelled")
break
case .exporting:
print("exporting")
break
case .failed:
print(video.url)
print("failed: \(assetExport.error!)")
break
case .unknown:
print("unknown")
break
case .waiting:
print("waiting")
break
}
})
}
If the video layer should fill parent layer, your videoLayer's frame is incorrect. You need to set the size equal to size instead of screenSize.
This function is exporting the merged composition to a landscape orientation when the source video is in portrait. I save the original video in portrait orientation to my documents directory and then save it to camera roll and works fine. I then pass the saved video's url to this function and it somehow rotates it to landscape when it shouldn't. How do I fix this?
func makeVideoOverlay (url : URL) {
print("documents directory url: \(url)")
let composition = AVMutableComposition()
let vidAsset = AVURLAsset(url: url as URL, options: nil)
// get video track
let vtrack = vidAsset.tracks(withMediaType: AVMediaTypeVideo)
let videoTrack:AVAssetTrack = vtrack[0]
let vid_duration = videoTrack.timeRange.duration
let vid_timerange = CMTimeRangeMake(kCMTimeZero, vidAsset.duration)
//var error: NSError?
let compositionvideoTrack:AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
do {
try compositionvideoTrack.insertTimeRange(vid_timerange, of: videoTrack, at: kCMTimeZero)
} catch {
// handle error
print("comp video track error: \(error.localizedDescription)")
}
compositionvideoTrack.preferredTransform = videoTrack.preferredTransform
let size = videoTrack.naturalSize
//this prints out to 1920x1080 landscape dimension. i don't know how
print("asset size: \(size)")
// Watermark Effect
let imglogo = UIImage(named: "logo-image")
let imglayer = CALayer()
imglayer.contents = imglogo?.cgImage
imglayer.frame = CGRect.init(x: 5, y: size.height-160, width: 150, height: 150)
imglayer.opacity = 1.0
let videolayer = CALayer()
videolayer.frame = CGRect.init(x: 0, y: 0, width: size.width, height: size.height)
let parentlayer = CALayer()
parentlayer.frame = CGRect.init(x: 0, y: 0, width: size.width, height: size.height)
parentlayer.addSublayer(videolayer)
parentlayer.addSublayer(imglayer)
let layercomposition = AVMutableVideoComposition()
layercomposition.frameDuration = CMTimeMake(1, 30)
layercomposition.renderSize = size
layercomposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videolayer, in: parentlayer)
// instruction for watermark
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, composition.duration)
let videotrack = composition.tracks(withMediaType: AVMediaTypeVideo)[0] as AVAssetTrack
let layerinstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videotrack)
instruction.layerInstructions = NSArray(object: layerinstruction) as [AnyObject] as [AnyObject] as! [AVVideoCompositionLayerInstruction]
layercomposition.instructions = NSArray(object: instruction) as [AnyObject] as [AnyObject] as! [AVVideoCompositionInstructionProtocol]
// create new file to receive data
let dirPaths = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)
let docsDir: String = dirPaths[0] as String
let movieFilePath = docsDir.appending("/result.mov") as String
movieDestinationUrl = URL(fileURLWithPath: movieFilePath)
print("overlay destination url: \(movieDestinationUrl)")
// use AVAssetExportSession to export video
let assetExport = AVAssetExportSession(asset: composition, presetName:AVAssetExportPresetHighestQuality)
assetExport?.outputFileType = AVFileTypeQuickTimeMovie
assetExport?.outputURL = movieDestinationUrl as URL
assetExport?.videoComposition = layercomposition
assetExport?.exportAsynchronously(completionHandler: {
if assetExport?.status == AVAssetExportSessionStatus.failed
{
print("failed: \(assetExport?.error)")
}
else if assetExport?.status == AVAssetExportSessionStatus.cancelled
{
print("cancelled: \(assetExport?.error)")
}
else
{
print("Movie complete")
OperationQueue.main.addOperation({ () -> Void in
//saves in landscape
self.saveAsset(url: self.movieDestinationUrl)
})
}
})
}
AVMutableVideoCompositionLayerInstruction has a method setTransform(_:at:)
As documentation say
Sets a fixed transform to apply from the specified time until the next
time at which a transform is set. [...]. Before the first specified time for which a
transform is set, the affine transform is held constant at the value
of identity ; after the last time for which a transform is set, the
affine transform is held constant at that last value.
You should set videoTrack's preferredTransform to layerInstruction instead.
EDIT
You need to create layerinstruction with the new created composition track instead.
let layerinstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: compositionvideoTrack) // NOT videoTrack.
layerinstruction.setTransform(videoTrack.preferredTransform, at: kCMTimeZero)
I'm encountering wrong orientation of video exported using AVAssetExportSession only in front Camera. I followed this tutorial https://stackoverflow.com/a/35368649/3764365 but I got this scenario. I think it's not wrong orientation the image is cut at half. I tried changing the video layer, render layer but got no luck. My code looks like this.
let composition = AVMutableComposition()
let vidAsset = AVURLAsset(url: path)
// get video track
let vtrack = vidAsset.tracks(withMediaType: AVMediaTypeVideo)
// get audi trac
let videoTrack:AVAssetTrack = vtrack[0]
_ = videoTrack.timeRange.duration
let vid_timerange = CMTimeRangeMake(kCMTimeZero, vidAsset.duration)
var _: NSError?
let compositionvideoTrack:AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
do {
try compositionvideoTrack.insertTimeRange(vid_timerange, of: videoTrack, at: kCMTimeZero)
} catch let error {
print(error.localizedDescription)
}
let compositionVideoTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
let audioTrack = vidAsset.tracks(withMediaType: AVMediaTypeAudio)[0]
do {
try compositionVideoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, vidAsset.duration), of: audioTrack, at: kCMTimeZero)
} catch {
print("error")
}
let size = videoTrack.naturalSize
let parentlayer = CALayer()
parentlayer.frame = CGRect(x: 0, y: 0, width: size.height, height: size.width)
let videolayer = CALayer()
videolayer.frame = CGRect(x: 0, y: 0, width: size.height, height: size.width)
parentlayer.addSublayer(videolayer)
let layercomposition = AVMutableVideoComposition()
layercomposition.frameDuration = CMTimeMake(1, 30)
layercomposition.renderSize = CGSize(width: size.height, height: size.width)
layercomposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videolayer, in: parentlayer)
// instruction for watermark
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, composition.duration)
let videotrack = composition.tracks(withMediaType: AVMediaTypeVideo)[0] as AVAssetTrack
let layerinstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videotrack)
instruction.layerInstructions = [layerinstruction]
layercomposition.instructions = [instruction]
layerinstruction.setTransform(videoTrack.preferredTransform, at: kCMTimeZero)
// create new file to receive data
let movieDestinationUrl = UIImage.outPut()
// use AVAssetExportSession to export video
let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPreset1280x720)!
assetExport.videoComposition = layercomposition
assetExport.outputFileType = AVFileTypeQuickTimeMovie
assetExport.outputURL = movieDestinationUrl
Setting movieFileOutputConnection?.isVideoMirrored from true to false fixed the issue for me. Its a weird bug in my opinion.
if self.currentCamera == .front {
movieFileOutputConnection?.isVideoMirrored = false
}
I will share my code on how I solved this issue.
func addImagesToVideo(path: URL, labelImageViews: [LabelImageView]) {
SVProgressHUD.show()
let composition = AVMutableComposition()
let vidAsset = AVURLAsset(url: path)
// get video track
let vtrack = vidAsset.tracks(withMediaType: AVMediaTypeVideo)
// get audi trac
let videoTrack:AVAssetTrack = vtrack[0]
_ = videoTrack.timeRange.duration
let vid_timerange = CMTimeRangeMake(kCMTimeZero, vidAsset.duration)
var _: NSError?
let compositionvideoTrack:AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
do {
try compositionvideoTrack.insertTimeRange(vid_timerange, of: videoTrack, at: kCMTimeZero)
} catch let error {
print(error.localizedDescription)
}
let compositionVideoTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
let audioTrack = vidAsset.tracks(withMediaType: AVMediaTypeAudio)[0]
do {
try compositionVideoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, vidAsset.duration), of: audioTrack, at: kCMTimeZero)
} catch {
print("error")
}
let size = videoTrack.naturalSize
let parentlayer = CALayer()
parentlayer.frame = CGRect(x: 0, y: 0, width: size.height, height: size.width)
let videolayer = CALayer()
videolayer.frame = CGRect(x: 0, y: 0, width: size.height, height: size.width)
parentlayer.addSublayer(videolayer)
if labelImageViews.count != 0 {
let blankImage = self.clearImage(size: videolayer.frame.size)
let image = self.saveImage(imageOne: blankImage, labelImageViews: labelImageViews)
let imglayer = CALayer()
imglayer.contents = image.cgImage
imglayer.frame = CGRect(origin: CGPoint.zero, size: videolayer.frame.size)
imglayer.opacity = 1
parentlayer.addSublayer(imglayer)
}
let layercomposition = AVMutableVideoComposition()
layercomposition.frameDuration = CMTimeMake(1, 30)
layercomposition.renderSize = CGSize(width: size.height, height: size.width)
layercomposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videolayer, in: parentlayer)
// instruction for watermark
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, composition.duration)
let videotrack = composition.tracks(withMediaType: AVMediaTypeVideo)[0] as AVAssetTrack
let layerinstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videotrack)
instruction.layerInstructions = [layerinstruction]
layercomposition.instructions = [instruction]
var isVideoAssetPortrait = false
let videoTransform = videoTrack.preferredTransform
if(videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) {
isVideoAssetPortrait = true
}
if(videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {
isVideoAssetPortrait = true
}
if isVideoAssetPortrait {
let FirstAssetScaleFactor = CGAffineTransform(scaleX: 1, y: 1)
layerinstruction.setTransform(videoTrack.preferredTransform.concatenating(FirstAssetScaleFactor), at: kCMTimeZero)
} else {
let FirstAssetScaleFactor = CGAffineTransform(scaleX: 1, y: 1)
layerinstruction.setTransform(videoTrack.preferredTransform.concatenating(FirstAssetScaleFactor).concatenating(CGAffineTransform(translationX: 0, y: 560)), at: kCMTimeZero)
}
// create new file to receive data
let movieDestinationUrl = UIImage.outPut()
// use AVAssetExportSession to export video
let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPreset1280x720)!
assetExport.videoComposition = layercomposition
assetExport.outputFileType = AVFileTypeQuickTimeMovie
assetExport.outputURL = movieDestinationUrl
assetExport.exportAsynchronously(completionHandler: {
switch assetExport.status{
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport.error!)")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport.error!)")
default:
print("Movie complete")
// play video
OperationQueue.main.addOperation({ () -> Void in
let output = UIImage.outPut()
UIImage.compress(inputURL: movieDestinationUrl as NSURL, outputURL: output as NSURL) {
UISaveVideoAtPathToSavedPhotosAlbum(output.relativePath, nil, nil, nil)
print("Done Converting")
DispatchQueue.main.async {
SVProgressHUD.dismiss()
}
}
})
}
})
}
So I am trying to add a watermark to a previously recorded video using the following code, but when I view the video, there is no watermark. Can anyone help? I tried following the post at: iPhone Watermark on recorded Video.
public func addWatermarkToVideo(url: NSURL, completion:(url: NSURL?) -> Void) {
let videoAsset = AVURLAsset(URL: url)
let mixComposition = AVMutableComposition()
let compositionVideoTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
let clipVideoTrack: AVAssetTrack = videoAsset.tracksWithMediaType(AVMediaTypeVideo)[0]
do {
try compositionVideoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), ofTrack: clipVideoTrack, atTime: kCMTimeZero)
} catch {
print(error)
}
compositionVideoTrack.preferredTransform = videoAsset.tracksWithMediaType(AVMediaTypeVideo)[0].preferredTransform
//Add watermark
guard let myImage = UIImage(named: "Logo") else {
completion(url: nil)
return
}
let aLayer = CALayer()
aLayer.contents = myImage.CGImage
aLayer.frame = CGRectMake(5, 25, 100, 57)
aLayer.opacity = 0.65
let videoSize = videoAsset.tracksWithMediaType(AVMediaTypeVideo)[0].naturalSize
let parentLayer = CALayer()
let videoLayer = CALayer()
parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height)
videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height)
parentLayer.addSublayer(videoLayer)
parentLayer.addSublayer(aLayer)
let videoComp = AVMutableVideoComposition()
videoComp.renderSize = videoSize
videoComp.frameDuration = CMTimeMake(1,30)
videoComp.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, inLayer: parentLayer)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, mixComposition.duration)
let videoTrack = mixComposition.tracksWithMediaType(AVMediaTypeVideo)[0]
let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
instruction.layerInstructions = [layerInstruction]
videoComp.instructions = [instruction]
let paths = NSSearchPathForDirectoriesInDomains(NSSearchPathDirectory.DocumentDirectory, NSSearchPathDomainMask.UserDomainMask, true)
let documentsDirectory: AnyObject = paths[0]
let dataPath = documentsDirectory.stringByAppendingPathComponent("VideoCache")
if (!NSFileManager.defaultManager().fileExistsAtPath(dataPath)) {
do {
try NSFileManager.defaultManager().createDirectoryAtPath(dataPath, withIntermediateDirectories: false, attributes: nil)
} catch {
print("Couldn't create path")
}
}
let tempURL = NSURL(fileURLWithPath: dataPath)
let completeMovieUrl = tempURL.URLByAppendingPathComponent("tether-\(NSDate()).mov")
if let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) {
exporter.outputURL = completeMovieUrl
exporter.outputFileType = AVFileTypeMPEG4
exporter.exportAsynchronouslyWithCompletionHandler({ () -> Void in
switch exporter.status {
case .Failed:
print("failed \(exporter.error)")
completion(url: nil)
break
case .Cancelled:
print("cancelled \(exporter.error)")
completion(url: nil)
break
default:
print("complete")
completion(url: exporter.outputURL)
}
})
}
}
You've forgotten to add your videoComposition to the AVAssetExportSession:
exporter.outputFileType = AVFileTypeMPEG4 // You had this
exporter.videoComposition = videoComp // but had forgotten this
exporter.exportAsynchronouslyWithCompletionHandler({ // ...