I have taken a png image and a video for watermarking, both are portrait. And I have done it as watermark an image at video.
After watermarking, got a merged video in landscape mode and it is flipped by 90 degree in anti clock direction.
I am not able to find out the exact reason why video got flipped from portrait to landscape mode. While image is showing stretch portrait.
Please help. Thanks in advance.
Used below code:-
- (void)addWatermarkAtVideoFile:(NSURL *)videoURL image:(UIImage *)image withConvertedVideoUUID:(NSString *)convertedVideoUUID response:(void(^)(BOOL success, NSString *videoUUID, NSURL *videoPath))responseBlock {
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:videoURL options:nil];
AVMutableComposition* mixComposition = [AVMutableComposition composition];
AVAssetTrack *clipVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
if(clipVideoTrack) {
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:clipVideoTrack atTime:kCMTimeZero error:nil];
[compositionVideoTrack setPreferredTransform:[[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] preferredTransform]];
}
AVAssetTrack *clipAudioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
if(clipAudioTrack) {
AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:clipAudioTrack atTime:kCMTimeZero error:nil];
}
CGSize sizeOfVideo=[videoAsset naturalSize];
//Image of watermark
UIImage *myImage = image;
CALayer *layerCa = [CALayer layer];
layerCa.contents = (id)myImage.CGImage;
layerCa.frame = CGRectMake(0, 0, sizeOfVideo.width, sizeOfVideo.height);
layerCa.opacity = 1.0;
CALayer *parentLayer=[CALayer layer];
CALayer *videoLayer=[CALayer layer];
parentLayer.frame=CGRectMake(0, 0, sizeOfVideo.width, sizeOfVideo.height);
videoLayer.frame=CGRectMake(0, 0, sizeOfVideo.width, sizeOfVideo.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:layerCa];
AVMutableVideoComposition *videoComposition=[AVMutableVideoComposition videoComposition] ;
videoComposition.frameDuration=CMTimeMake(1, 30);
videoComposition.renderSize=sizeOfVideo;
videoComposition.animationTool=[AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]);
AVAssetTrack *videoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
videoComposition.instructions = [NSArray arrayWithObject: instruction];
//Creating temp path to save the converted video
NSString* myDocumentPath = [self getDocumentDirectoryPathWithFileName:convertedVideoUUID];
NSURL *outputFileURL = [[NSURL alloc] initFileURLWithPath:myDocumentPath];
//Check if the file already exists then remove the previous file
[self removeFileIfExistAtPAth:myDocumentPath];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exportSession.videoComposition=videoComposition;
exportSession.outputURL = outputFileURL;
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch (exportSession.status)
{
case AVAssetExportSessionStatusCompleted:
NSLog(#"Export OK");
[self saveInPhotoAlbum:myDocumentPath];
break;
case AVAssetExportSessionStatusFailed:
NSLog (#"AVAssetExportSessionStatusFailed: %#", exportSession.error);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export Cancelled");
break;
}
BOOL statusSuccess = [exportSession status] == AVAssetExportSessionStatusCompleted;
responseBlock(statusSuccess ? YES : NO, statusSuccess ? convertedVideoUUID : nil, statusSuccess ? outputFileURL : nil);
}];
}
I think it's the default behavior with AVFoundation, it's probably not linked to the watermark feature.
You could use CGAffineTransform :
if(height > width) {
CGAffineTransform rotationTransform = CGAffineTransformMakeRotation(M_PI_2);
[layerInstruction setTransform:rotationTransform atTime:kCMTimeZero];
}
The naturalSize of AVAsset doesn't consider the eventual rotation of a video.
If you want to place tour watermark correctly consider using the renderSize of the AVMutableVideoComposition or apply some transformations.
This snippet gives you the actual orientation for an asset:
func orientationForAsset(_ asset: AVAsset) -> (orientation: UIImageOrientation, isPortrait: Bool) {
let videoTrack = asset.tracks(withMediaType: AVMediaTypeVideo).first!
let transformMatrix = videoTrack.preferredTransform
return orientationFromTransform(transformMatrix)
}
func orientationFromTransform(_ transform: CGAffineTransform) -> (orientation: UIImageOrientation, isPortrait: Bool) {
var assetOrientation = UIImageOrientation.up
var isPortrait = false
if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 {
assetOrientation = .right
isPortrait = true
} else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0 {
assetOrientation = .left
isPortrait = true
} else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0 {
assetOrientation = .up
} else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 {
assetOrientation = .down
}
return (assetOrientation, isPortrait)
}
With this one you can get the actual size considering the rotation.
func resolutionSizeForAsset(_ asset: AVAsset) -> CGSize? {
guard let track = asset.tracks(withMediaType: AVMediaTypeVideo).first else { return nil }
let size = track.naturalSize.applying(track.preferredTransform)
return CGSize(width: fabs(size.width), height: fabs(size.height))
}
Related
After applying an AVVideoComposition to my AVPlayerItem, the filter I apply does work, but the video gets rotated in the AVPlayerLayer.
I know for a fact that the problem is not with the filtered frame because if I show the frame in a UIImageView, the frame is rendered 100% correctly.
The video shows correctly until I apply a videoComposition. Setting the videoGravity on the AVPlayerLayer does not help.
The video gets rotated 90º clockwise and gets stretched in the layer.
Essentially, the video is displayed perfectly in the AVPlayerLayer before the AVPlayerItem is fed through the AVMutableVideoComposition. Once that happens, the video is rotated -90º, and then scaled to fit the same dimensions as the video before filtering. This suggests to me that it does not realize that its transform is already correct, and so it is reapplying the transform on itself.
Why is this happening, and how can I fix it?
Here is some code:
private func filterVideo(with filter: Filter?) {
if let player = player, let playerItem = player.currentItem {
let composition = AVMutableComposition()
let videoAssetTrack = playerItem.asset.tracks(withMediaType: .video).first
let videoCompositionTrack = composition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid)
try? videoCompositionTrack?.insertTimeRange(CMTimeRange(start: kCMTimeZero, duration: playerItem.asset.duration), of: videoAssetTrack!, at: kCMTimeZero)
videoCompositionTrack?.preferredTransform = videoAssetTrack!.preferredTransform
let videoComposition = AVMutableVideoComposition(asset: composition, applyingCIFiltersWithHandler: { (request) in
let filteredImage = <...>
request.finish(with: filteredImage, context: nil)
})
playerItem.videoComposition = videoComposition
}
}
You have a problem in the renderingSize of AVVideoComposition. You should apply transform on AVMutableVideoCompositionInstruction (ie. Rotate and translate transform ).
I have done it in Objective-c and am posting my code. You can convert the syntax into Swift
Objective-c
//------------------------------------
// FIXING ORIENTATION
//------------------------------------
AVMutableVideoCompositionInstruction * MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeAdd(firstAsset.duration, secondAsset.duration));
AVMutableVideoCompositionLayerInstruction *FirstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:secondTrack]; // second
AVAssetTrack *FirstAssetTrack = [[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation FirstAssetOrientation_ = UIImageOrientationUp;
BOOL isFirstAssetPortrait_ = NO;
CGAffineTransform firstTransform = FirstAssetTrack.preferredTransform;
if(firstTransform.a == 0 && firstTransform.b == 1.0 && firstTransform.c == -1.0 && firstTransform.d == 0) {FirstAssetOrientation_= UIImageOrientationRight; isFirstAssetPortrait_ = YES;}
if(firstTransform.a == 0 && firstTransform.b == -1.0 && firstTransform.c == 1.0 && firstTransform.d == 0) {FirstAssetOrientation_ = UIImageOrientationLeft; isFirstAssetPortrait_ = YES;}
if(firstTransform.a == 1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == 1.0) {FirstAssetOrientation_ = UIImageOrientationUp;}
if(firstTransform.a == -1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == -1.0) {FirstAssetOrientation_ = UIImageOrientationDown;}
CGFloat FirstAssetScaleToFitRatio = 320.0/FirstAssetTrack.naturalSize.width;
if(isFirstAssetPortrait_){
FirstAssetScaleToFitRatio = 320.0/FirstAssetTrack.naturalSize.height;
CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
[FirstlayerInstruction setTransform:CGAffineTransformConcat(FirstAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:kCMTimeZero];
}else{
CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
[FirstlayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(FirstAssetTrack.preferredTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, 160)) atTime:kCMTimeZero];
}
[FirstlayerInstruction setOpacity:0.0 atTime:firstAsset.duration];
AVMutableVideoCompositionLayerInstruction *SecondlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack];
AVAssetTrack *SecondAssetTrack = [[secondAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation SecondAssetOrientation_ = UIImageOrientationUp;
BOOL isSecondAssetPortrait_ = NO;
CGAffineTransform secondTransform = SecondAssetTrack.preferredTransform;
if(secondTransform.a == 0 && secondTransform.b == 1.0 && secondTransform.c == -1.0 && secondTransform.d == 0) {SecondAssetOrientation_= UIImageOrientationRight; isSecondAssetPortrait_ = YES;}
if(secondTransform.a == 0 && secondTransform.b == -1.0 && secondTransform.c == 1.0 && secondTransform.d == 0) {SecondAssetOrientation_ = UIImageOrientationLeft; isSecondAssetPortrait_ = YES;}
if(secondTransform.a == 1.0 && secondTransform.b == 0 && secondTransform.c == 0 && secondTransform.d == 1.0) {SecondAssetOrientation_ = UIImageOrientationUp;}
if(secondTransform.a == -1.0 && secondTransform.b == 0 && secondTransform.c == 0 && secondTransform.d == -1.0) {SecondAssetOrientation_ = UIImageOrientationDown;}
CGFloat SecondAssetScaleToFitRatio = 320.0/SecondAssetTrack.naturalSize.width;
if(isSecondAssetPortrait_){
SecondAssetScaleToFitRatio = 320.0/SecondAssetTrack.naturalSize.height;
CGAffineTransform SecondAssetScaleFactor = CGAffineTransformMakeScale(SecondAssetScaleToFitRatio,SecondAssetScaleToFitRatio);
[SecondlayerInstruction setTransform:CGAffineTransformConcat(SecondAssetTrack.preferredTransform, SecondAssetScaleFactor) atTime:firstAsset.duration];
}else{
;
CGAffineTransform SecondAssetScaleFactor = CGAffineTransformMakeScale(SecondAssetScaleToFitRatio,SecondAssetScaleToFitRatio);
[SecondlayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(SecondAssetTrack.preferredTransform, SecondAssetScaleFactor),CGAffineTransformMakeTranslation(0, 160)) atTime:secondAsset.duration];
}
MainInstruction.layerInstructions = [NSArray arrayWithObjects:SecondlayerInstruction,nil];;
AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];
MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
MainCompositionInst.frameDuration = CMTimeMake(1, 30);
MainCompositionInst.renderSize = CGSizeMake(320.0, 480.0);
// Now , you have Orientation Fixed Instrucation layer
// add this composition to your video 😀
// If you want to export Video than you can do like below
NSString *documentsDirectory = [NSHomeDirectory()
stringByAppendingPathComponent:#"Documents"];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"final_merged_video-%d.mp4",arc4random() % 1000]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
// 5 - Create exporter
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPreset640x480];
exporter.outputURL=url;
exporter.videoComposition=MainCompositionInst;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^
{
[[AppDelegate Getdelegate] hideIndicator];
[self exportDidFinish:exporter];
});
}];
for Swift
see this answer Click here
in addition you can also try to rotate your video layer by applying rotation transform on it.
#define degreeToRadian(x) (M_PI * x / 180.0)
[_playerLayer setAffineTransform:CGAffineTransformMakeRotation(degreeToRadian(degree))]
If You are trying to play AVMutableCompostion You should set AVAssetTrack's preferredTransform to AVMutableCompositionTrack's preferredTransform.
let asset = AVAsset(url: url!)
let composition = AVMutableComposition()
let compositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
let videoTrack = asset.tracks(withMediaType: AVMediaTypeVideo).first
try? compositionTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, asset.duration), of: videoTrack!, at: kCMTimeZero)
compositionTrack.preferredTransform = (videoTrack?.preferredTransform)!
let playerItem = AVPlayerItem(asset: composition)
let filter = CIFilter(name: "CIColorInvert")
playerItem.videoComposition = AVVideoComposition(asset: composition, applyingCIFiltersWithHandler: { (request: AVAsynchronousCIImageFilteringRequest) in
filter?.setValue(request.sourceImage, forKey: kCIInputImageKey)
request.finish(with: (filter?.outputImage)!, context: nil)
})
.... the rest of code
Instead of assuming that the image will be filtered, check first if filteredImage is nil. If not, then request.finish(with: filteredImage, context: nil)
However, if it is nil you must request.finish(with: SomeError)
This is as per the docs.
What worked for me at the end:
private func filterVideo(with filter: Filter?) {
guard let player = playerLayer?.player, let playerItem = player.currentItem else { return }
let videoComposition = AVVideoComposition(asset: playerItem.asset, applyingCIFiltersWithHandler: { (request) in
if let filter = filter {
if let filteredImage = filter.filterImage(request.sourceImage) {
let output = filteredImage.cropping(to: request.sourceImage.extent)
request.finish(with: output, context: nil)
} else {
printError("Image not filtered")
request.finish(with: RenderError.couldNotFilter)
}
} else {
let output = request.sourceImage.cropping(to: request.sourceImage.extent)
request.finish(with: output, context: nil)
}
})
playerItem.videoComposition = videoComposition
}
This is the filterImage function of Filter, which is just a nice little wrapper for CIFilter:
func filterImage(_ ciImage: CIImage) -> CIImage? {
guard let filter = ciFilter else { return nil }
filter.setDefaults()
filter.setValue(ciImage, forKey: kCIInputImageKey)
guard let filteredImageData = filter.value(forKey: kCIOutputImageKey) as? CIImage else { return nil }
return filteredImageData
}
Try this code below which worked for me
// Grab the source track from AVURLAsset for example.
let assetV = YourAVASSET.tracks(withMediaType: AVMediaTypeVideo).last
// Grab the composition video track from AVMutableComposition you already made.
let compositionV = YourCompostion.tracks(withMediaType: AVMediaTypeVideo).last
// Apply the original transform.
if ((assetV != nil) && (compostionV != nil)) {
compostionV?.preferredTransform = (assetV?.preferredTransform)!
}
And then go ahead an export your video...
> - (void) createWatermark:(UIImage*)image video:(NSURL*)videoURL
{
if (videoURL == nil)
return;
AppDelegate* appDelegate = [[UIApplication sharedApplication] delegate];
//[appDelegate showLoadingView: YES];
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:videoURL options:nil];
AVMutableComposition* mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack* compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack* clipVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:clipVideoTrack
atTime:kCMTimeZero error:nil];
[compositionVideoTrack setPreferredTransform:[[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] preferredTransform]];
// create the layer with the watermark image
CALayer* aLayer = [CALayer layer];
aLayer.contents = (id)image.CGImage;
aLayer.frame = CGRectMake(50, 100, image.size.width, image.size.height);
aLayer.opacity = 0.9;
//sorts the layer in proper order
AVAssetTrack* videoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
CGSize videoSize = [videoTrack naturalSize];
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:aLayer];
// create text Layer
CATextLayer* titleLayer = [CATextLayer layer];
titleLayer.backgroundColor = [UIColor clearColor].CGColor;
titleLayer.string = #"Dummy text";
titleLayer.font = CFBridgingRetain(#"Helvetica");
titleLayer.fontSize = 28;
titleLayer.shadowOpacity = 0.5;
titleLayer.alignmentMode = kCAAlignmentCenter;
titleLayer.frame = CGRectMake(0, 50, videoSize.width, videoSize.height / 6);
[parentLayer addSublayer:titleLayer];
//create the composition and add the instructions to insert the layer:
AVMutableVideoComposition* videoComp = [AVMutableVideoComposition videoComposition];
videoComp.renderSize = videoSize;
videoComp.frameDuration = CMTimeMake(1, 30);
videoComp.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
/// instruction
AVMutableVideoCompositionInstruction* instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]);
AVAssetTrack* mixVideoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:mixVideoTrack];
instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
videoComp.instructions = [NSArray arrayWithObject: instruction];
// export video
_assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality];
_assetExport.videoComposition = videoComp;
NSLog (#"created exporter. supportedFileTypes: %#", _assetExport.supportedFileTypes);
NSString* videoName = #"NewWatermarkedVideo.mov";
NSString* exportPath = [NSTemporaryDirectory() stringByAppendingPathComponent:videoName];
NSURL* exportUrl = [NSURL fileURLWithPath:exportPath];
if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath])
[[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil];
_assetExport.outputFileType = AVFileTypeQuickTimeMovie;
_assetExport.outputURL = exportUrl;
_assetExport.shouldOptimizeForNetworkUse = YES;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
//[appDelegate showLoadingView:NO];
//Final code here
switch (_assetExport.status)
{
case AVAssetExportSessionStatusUnknown:
NSLog(#"Unknown");
break;
case AVAssetExportSessionStatusWaiting:
NSLog(#"Waiting");
break;
case AVAssetExportSessionStatusExporting:
NSLog(#"Exporting");
break;
case AVAssetExportSessionStatusCompleted:
NSLog(#"Created new water mark image");
playBtn.hidden = NO;
break;
case AVAssetExportSessionStatusFailed:
NSLog(#"Failed- %#", _assetExport.error);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Cancelled");
break;
}
}
];
}
> Use of unresolved identifier 'presentMoviePlayerViewControllerAnimated'
>
> 'videoComposition()' is unavailable: use object construction 'AVMutableVideoComposition()'
Enum case 'unknown' not found in type 'AVAssetExportSessionStatus?'
It is working very fine in Objective C
Argument labels '(_:)' do not match any available overloads
These are the few errors I am getting while doing in swift
I am totally new to swift, any help?
YOUR SWIFT 3.0 VERSION:
func createWatermark(_ image: UIImage, video videoURL: URL)
{
if videoURL == nil
{
return
}
let appDelegate: AppDelegate? = UIApplication.shared.delegate
//[appDelegate showLoadingView: YES];
let videoAsset = AVURLAsset(url: videoURL, options: nil)
let mixComposition = AVMutableComposition()
let compositionVideoTrack: AVMutableCompositionTrack? = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
let clipVideoTrack: AVAssetTrack? = (videoAsset.tracks(withMediaType: AVMediaTypeVideo)[0] as? AVAssetTrack)
try? compositionVideoTrack?.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: clipVideoTrack, atTime: kCMTimeZero)
compositionVideoTrack?.preferredTransform = videoAsset.tracks(withMediaType: AVMediaTypeVideo)[0].preferredTransform
}
// create the layer with the watermark image
var aLayer = CALayer()
aLayer.contents = (image.cgImage as? Any)
aLayer.frame = CGRect(x: CGFloat(50), y: CGFloat(100), width: CGFloat(image.size.width), height: CGFloat(image.size.height))
aLayer.opacity = 0.9
//sorts the layer in proper order
var videoTrack: AVAssetTrack? = (videoAsset.tracks(withMediaType: AVMediaTypeVideo)[0] as? AVAssetTrack)
var videoSize: CGSize? = videoTrack?.naturalSize
var parentLayer = CALayer()
var videoLayer = CALayer()
parentLayer.frame = CGRect(x: CGFloat(0), y: CGFloat(0), width: CGFloat(videoSize?.width), height: CGFloat(videoSize?.height))
videoLayer.frame = CGRect(x: CGFloat(0), y: CGFloat(0), width: CGFloat(videoSize?.width), height: CGFloat(videoSize?.height))
parentLayer.addSublayer(videoLayer)
parentLayer.addSublayer(aLayer)
// create text Layer
var titleLayer = CATextLayer()
titleLayer.backgroundColor = UIColor.clear.cgColor
titleLayer.string = "Dummy text"
titleLayer.font = CFBridgingRetain("Helvetica")
titleLayer.fontSize = 28
titleLayer.shadowOpacity = 0.5
titleLayer.alignmentMode = kCAAlignmentCenter
titleLayer.frame = CGRect(x: CGFloat(0), y: CGFloat(50), width: CGFloat(videoSize.width), height: CGFloat(videoSize.height / 6))
parentLayer.addSublayer(titleLayer)
//create the composition and add the instructions to insert the layer:
var videoComp = AVMutableVideoComposition.videoComposition
videoComp.renderSize = videoSize
videoComp.frameDuration = CMTimeMake(1, 30)
videoComp.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAs: videoLayer, in: parentLayer)
/// instruction
var instruction = AVMutableVideoCompositionInstruction.videoCompositionInstruction
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, mixComposition.duration)
var mixVideoTrack: AVAssetTrack? = (mixComposition.tracks(withMediaType: AVMediaTypeVideo)[0] as? AVAssetTrack)
var layerInstruction = AVMutableVideoCompositionLayerInstruction(mixVideoTrack)
instruction.layerInstructions = [layerInstruction]
videoComp.instructions = [instruction]
// export video
assetExport = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetMediumQuality)
assetExport.videoComposition = videoComp
print("created exporter. supportedFileTypes: \(assetExport.supportedFileTypes)")
var videoName: String = "NewWatermarkedVideo.mov"
var exportPath: String = URL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent(videoName).absoluteString
var exportUrl = URL(fileURLWithPath: exportPath)
if FileManager.default.fileExists(atPath: exportPath) {
try? FileManager.default.removeItem(atPath: exportPath)
}
assetExport.outputFileType = AVFileTypeQuickTimeMovie
assetExport.outputURL = exportUrl
assetExport.shouldOptimizeForNetworkUse = true
assetExport.exportAsynchronously(withCompletionHandler: {(_: Void) -> Void in
//[appDelegate showLoadingView:NO];
//Final code here
switch assetExport.status {
case .unknown:
print("Unknown")
case .waiting:
print("Waiting")
case .exporting:
print("Exporting")
case .completed:
print("Created new water mark image")
playBtn.isHidden = false
case .failed:
print("Failed- \(assetExport.error)")
case .cancelled:
print("Cancelled")
}
})
I add watermark on video. I set origin watermark layer (10;10), but after export, watermark is located in the lower left corner. If I increase y, the watermark is moved up. As if y-axis increases from the bottom up.
Can anyone suggest something?
NSString *path = [[NSBundle mainBundle] pathForResource:#"video" ofType:#"mov"];
NSURL *file = [NSURL fileURLWithPath:path];
AVURLAsset *videoAsset = [[AVURLAsset alloc]initWithURL:file options:nil];
self.asset = videoAsset;
AVMutableComposition *mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *clipVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] firstObject];
AVAssetTrack *clipAudioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] firstObject];
if (clipAudioTrack)
{
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:clipAudioTrack
atTime:kCMTimeZero
error:nil];
}
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:clipVideoTrack
atTime:kCMTimeZero
error:nil];
[compositionVideoTrack setPreferredTransform:clipVideoTrack.preferredTransform];
CGSize videoSize = [clipVideoTrack naturalSize];
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
UIImage *myImage = [UIImage imageNamed:#"watermark"];
CALayer *aLayer = [CALayer layer];
CGRect frame = CGRectMake(10, 10, CGImageGetWidth(myImage.CGImage), CGImageGetHeight(myImage.CGImage));
aLayer.frame = frame;
aLayer.contents = (id)myImage.CGImage;
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:aLayer];
AVMutableVideoComposition *videoComposition=[AVMutableVideoComposition videoComposition] ;
videoComposition.frameDuration=CMTimeMake(1, 30);
videoComposition.renderSize = videoSize;
videoComposition.animationTool=[AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]);
AVAssetTrack *videoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] firstObject];
AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
instruction.layerInstructions = #[layerInstruction];
videoComposition.instructions = #[instruction];
NSString *documentsDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) firstObject];
NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init];
[dateFormatter setDateFormat:#"yyyy-MM-dd_HH-mm-ss"];
NSString *destinationPath = [documentsDirectory stringByAppendingFormat:#"/utput_%#.mov", [dateFormatter stringFromDate:[NSDate date]]];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality];
exportSession.videoComposition = videoComposition;
exportSession.outputURL = [NSURL fileURLWithPath:destinationPath];
exportSession.outputFileType = AVFileTypeMPEG4;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch (exportSession.status)
{
case AVAssetExportSessionStatusCompleted:
NSLog(#"Export OK");
if (UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(destinationPath)) {
UISaveVideoAtPathToSavedPhotosAlbum(destinationPath, self, nil, nil);
}
break;
case AVAssetExportSessionStatusFailed:
NSLog (#"AVAssetExportSessionStatusFailed: %#", exportSession.error);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export Cancelled");
break;
default:
break;
}
}];
I the past I had a lot of problems to find the correct anchor point to add a water mark. I'll post my code. Keep in mind that it generates a video with a watermark on the bottom-right corner (5 pixel margin).
Pay attention at:
logoImageView.frame = CGRectMake(finalSize.width - ((logoImageView.frame.size.width/2) + 5),
finalSize.height - ((logoImageView.frame.size.height/2) + 5),
logoImageView.frame.size.width/2,
logoImageView.frame.size.height/2)
Final code:
final class QUWatermarkManager {
static func watermark(video videoAsset:AVAsset, imageLayer : CALayer, saveToLibrary flag : Bool, completion : ((status : AVAssetExportSessionStatus!, session: AVAssetExportSession!, outputURL : NSURL!) -> ())?) {
var finalSize = CGSizeMake(imageLayer.frame.size.width, imageLayer.frame.size.width)
var computedVideoSize = finalSize.width
while (computedVideoSize%16>0) { // find the right resolution that can be divided by 16
computedVideoSize++;
}
finalSize = CGSizeMake(computedVideoSize,computedVideoSize)
let clipVideoTrack = videoAsset.tracksWithMediaType(AVMediaTypeVideo).first! as AVAssetTrack
let videoSize = clipVideoTrack.naturalSize
let scale = finalSize.width / videoSize.width
let composition = AVMutableComposition()
composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
let videoComposition = AVMutableVideoComposition()
videoComposition.renderSize = CGSizeMake(finalSize.width,finalSize.width)
videoComposition.frameDuration = CMTimeMake(1, 30)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(180, 30))
let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: clipVideoTrack)
let t4 = CGAffineTransformScale(clipVideoTrack.preferredTransform, scale, scale)
layerInstruction.setTransform(t4, atTime: kCMTimeZero)
instruction.layerInstructions = [layerInstruction]
videoComposition.instructions = [instruction]
// Parent Layer
let parentLayer = CALayer()
parentLayer.frame = CGRectMake(0, 0, finalSize.width, finalSize.height)
parentLayer.geometryFlipped = true
//Video Layer
let videoLayer = CALayer()
videoLayer.frame = CGRectMake(0, 0, finalSize.width, finalSize.height)
videoLayer.geometryFlipped = true
let logoImageView = UIImageView(image: UIImage(named: "logo_nav2"))
logoImageView.alpha = 0.5
logoImageView.contentMode = .ScaleAspectFit
logoImageView.frame = CGRectMake(finalSize.width - ((logoImageView.frame.size.width/2) + 5),
finalSize.height - ((logoImageView.frame.size.height/2) + 5),
logoImageView.frame.size.width/2,
logoImageView.frame.size.height/2)
parentLayer.addSublayer(videoLayer)
parentLayer.addSublayer(imageLayer)
parentLayer.addSublayer(logoImageView.layer)
videoComposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, inLayer: parentLayer)
// 4 - Get path
let documentDirectory = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0]
let dateFormatter = NSDateFormatter()
dateFormatter.dateStyle = .LongStyle
dateFormatter.timeStyle = .FullStyle
let date = dateFormatter.stringFromDate(NSDate())
let savePath = documentDirectory.stringByAppendingPathComponent("watermarkVideo-\(date).mov")
let url = NSURL(fileURLWithPath: savePath)
let exporter = AVAssetExportSession(asset: videoAsset, presetName: AVAssetExportPresetHighestQuality)!
exporter.videoComposition = videoComposition
exporter.outputURL = url
exporter.outputFileType = AVFileTypeMPEG4
exporter.shouldOptimizeForNetworkUse = true
exporter.canPerformMultiplePassesOverSourceMediaData = true
exporter.exportAsynchronouslyWithCompletionHandler() {
dispatch_async(dispatch_get_main_queue(), { () -> Void in
if exporter.status == AVAssetExportSessionStatus.Completed {
let outputURL = exporter.outputURL
if flag {
// Save to library
let library = ALAssetsLibrary()
if library.videoAtPathIsCompatibleWithSavedPhotosAlbum(outputURL) {
library.writeVideoAtPathToSavedPhotosAlbum(outputURL,
completionBlock: { (assetURL:NSURL!, error:NSError!) -> Void in
completion!(status: .Completed, session: exporter, outputURL: outputURL)
})
}
} else {
// Dont svae to library
completion!(status: .Completed, session: exporter, outputURL: outputURL)
}
} else {
// Error
completion!(status: exporter.status, session: exporter, outputURL: exporter.outputURL)
}
})
}
}
}
I've recorded a video with the front facing camera and the output is mirrored...
I've tried using AVMutablecomposition and layerinstructions to flip the video but no luck.
Googling and searching Stack Overflow has been fruitless so I bet a simple, straight forward example of how to do this is something that would benefit many.
Theres no indication on what you are using to record the video, ill assume AVCaptureSession + AVCaptureVideoDataOutput
lazy var videoFileOutput: AVCaptureVideoDataOutput = AVCaptureVideoDataOutput()
let v = videoFileOutput.connectionWithMediaType(AVMediaTypeVideo)
v.videoOrientation = .Portrait
v.videoMirrored = true
You can use -[AVMutableVideoCompositionLayerInstruction setTransform:atTime:]
CGAffineTransform transform = CGAffineTransformMakeTranslation(self.config.videoSize, 0);
transform = CGAffineTransformScale(transform, -1.0, 1.0);
[videoCompositionLayerInstruction setTransform:transform atTime:videoTime];
// then append video tracks
// [compositionTrack insertTimeRange:timeRange ofTrack:track atTime:atTime error:&error];
// apply instructions
videoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, composition.duration);
videoCompositionInstruction.layerInstructions = #[videoCompositionLayerInstruction];
videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.renderSize = CGSizeMake(self.config.videoSize, self.config.videoSize);
videoComposition.frameDuration = CMTimeMake(1, self.config.videoFrameRate);
videoComposition.instructions = #[videoCompositionInstruction];
https://github.com/ElfSundae/AVDemo/tree/ef2ca437d0d8dcb3dd41c5a272c8754a29d8a936/AVSimpleEditoriOS
Export composition:
AVAssetExportSession *exportSession = [AVAssetExportSession exportSessionWithAsset:composition presetName:presetName];
exportSession.outputFileType = AVFileTypeMPEG4;
exportSession.outputURL = outputURL;
exportSession.shouldOptimizeForNetworkUse = YES;
// videoComposition contains transform instructions for video tracks
exportSession.videoComposition = videoComposition;
// audioMix contains background music for audio tracks
exportSession.audioMix = audioMix;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
AVAssetExportSessionStatus status = exportSession.status;
if (status != AVAssetExportSessionStatusCompleted) {
// exportSession.error
} else {
// exportSession.outputURL
}
}];
After you get your output transform your video
func mirrorVideo(inputURL: URL, completion: #escaping (_ outputURL : URL?) -> ())
{
let videoAsset: AVAsset = AVAsset( url: inputURL )
let clipVideoTrack = videoAsset.tracks( withMediaType: AVMediaType.video ).first! as AVAssetTrack
let composition = AVMutableComposition()
composition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: CMPersistentTrackID())
let videoComposition = AVMutableVideoComposition()
videoComposition.renderSize = CGSize(width: clipVideoTrack.naturalSize.height, height: clipVideoTrack.naturalSize.width)
videoComposition.frameDuration = CMTimeMake(1, 30)
let transformer = AVMutableVideoCompositionLayerInstruction(assetTrack: clipVideoTrack)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(60, 30))
var transform:CGAffineTransform = CGAffineTransform(scaleX: -1.0, y: 1.0)
transform = transform.translatedBy(x: -clipVideoTrack.naturalSize.width, y: 0.0)
transform = transform.rotated(by: CGFloat(Double.pi/2))
transform = transform.translatedBy(x: 0.0, y: -clipVideoTrack.naturalSize.width)
transformer.setTransform(transform, at: kCMTimeZero)
instruction.layerInstructions = [transformer]
videoComposition.instructions = [instruction]
// Export
let exportSession = AVAssetExportSession(asset: videoAsset, presetName: AVAssetExportPreset640x480)!
let fileName = UniqueIDGenerator.generate().appending(".mp4")
let filePath = documentsURL.appendingPathComponent(fileName)
let croppedOutputFileUrl = filePath
exportSession.outputURL = croppedOutputFileUrl
exportSession.outputFileType = AVFileType.mp4
exportSession.videoComposition = videoComposition
exportSession.exportAsynchronously {
if exportSession.status == .completed {
DispatchQueue.main.async(execute: {
completion(croppedOutputFileUrl)
})
return
} else if exportSession.status == .failed {
print("Export failed - \(String(describing: exportSession.error))")
}
completion(nil)
return
}
}
Swift 5. AVCaptureSession:
let movieFileOutput = AVCaptureMovieFileOutput()
let connection = movieFileOutput.connection(with: .video)
if connection?.isVideoMirroringSupported ?? false {
connection?.isVideoMirrored = true
}
Same for PhotoOutput.
I am trying to merge multiple videos using AVMutableComposition. The problem I face is that whenever I try to add any AVMutableVideoComposition for applying any instructions, my playback freezes in AVPlayer at exact 6 seconds duration.
Another interesting thing is that it plays fine if I play it in Photos app of iPad after exporting using same videoComposition. So why does it freezes in AVPlayer at 6 seconds?
Code:
AVMutableComposition *mutableComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *videoCompositionTrack = [mutableComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *audioCompositionTrack = [mutableComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
for (AVURLAsset *asset in assets)
{
AVAssetTrack *assetTrack;
assetTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVAssetTrack *audioAssetTrack = [asset tracksWithMediaType:AVMediaTypeAudio].firstObject;
NSError *error;
[videoCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, assetTrack.timeRange.duration )
ofTrack:assetTrack
atTime:time
error:&error];
if (error) {
NSLog(#"asset url :: %#",assetTrack.asset);
NSLog(#"Error1 - %#", error.debugDescription);
}
[audioCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioAssetTrack.timeRange.duration)
ofTrack:audioAssetTrack
atTime:time
error:&error];
if (error) {
NSLog(#"Error2 - %#", error.debugDescription);
}
time = CMTimeAdd(time, assetTrack.timeRange.duration);
if (CGSizeEqualToSize(size, CGSizeZero)) {
size = assetTrack.naturalSize;;
}
}
AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
AVMutableVideoCompositionLayerInstruction *videoTrackLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoCompositionTrack];
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, time);
mainInstruction.layerInstructions = [NSArray arrayWithObjects:videoTrackLayerInstruction, nil];
AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
mainCompositionInst.instructions = [NSArray arrayWithObject:mainInstruction];
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
mainCompositionInst.renderSize = size;
pi = [AVPlayerItem playerItemWithAsset:mutableComposition];
pi.videoComposition = mainCompositionInst;
Also, I know problem is mostly videoComposition because if I remove the videoComposition, then it plays fine on AVPlayer.
UPDATE 1: I just found out that when it freezes after 6 seconds, if I drag the slider back or forward(i.e. use seekToTime), it starts playing normally again without any further freeze.
Also audio keeps on playing fine even when video is frozen.
UPDATE 2: If I just go ahead and export it using AVAssetExportSession with same AVMutableComposition, and load asset from of exported video, it works fine. So its just when I play AVMutableComposition directly, problem arises.
Finally, I got the solution to fix this.
You should play after playerItem's status is changed to .ReadyToPlay.
Please see as below.
func startVideoPlayer() {
let playerItem = AVPlayerItem(asset: self.composition!)
playerItem.videoComposition = self.videoComposition!
let player = AVPlayer(playerItem: playerItem)
player.actionAtItemEnd = .None
videoPlayerLayer = AVPlayerLayer(player: player)
videoPlayerLayer!.frame = self.bounds
/* add playerItem's observer */
player.addObserver(self, forKeyPath: "player.currentItem.status", options: .New, context: nil)
NSNotificationCenter.defaultCenter().addObserver(self, selector: "playerItemDidReachEnd:", name: AVPlayerItemDidPlayToEndTimeNotification, object: playerItem);
self.layer.addSublayer(videoPlayerLayer!)
}
override func observeValueForKeyPath(keyPath: String?, ofObject object: AnyObject?, change: [String : AnyObject]?, context: UnsafeMutablePointer<Void>) {
if keyPath != nil && keyPath! == "player.currentItem.status" {
if let newValue = change?[NSKeyValueChangeNewKey] {
if AVPlayerStatus(rawValue: newValue as! Int) == .ReadyToPlay {
playVideo() /* play after status is changed to .ReadyToPlay */
}
}
} else {
super.observeValueForKeyPath(keyPath, ofObject: object, change: change, context: context)
}
}
func playerItemDidReachEnd(notification: NSNotification) {
let playerItem = notification.object as! AVPlayerItem
playerItem.seekToTime(kCMTimeZero)
playVideo()
}
func playVideo() {
videoPlayerLayer?.player!.play()
}