Issue with add watermark on video - ios

I am trying to add an image on a video. Everything works fine except one thing, the image is distorted:
Here is the code :
//Capture the image
UIGraphicsBeginImageContextWithOptions(self.captureView.bounds.size, false, UIScreen.main.scale)
self.captureView.layer.render(in: UIGraphicsGetCurrentContext()!)
let image = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
let watermarkVideo = WatermakVideo()
//video file
let videoFile = Bundle.main.path(forResource: "videoTrim", ofType: "mp4")
let videoURL = URL(fileURLWithPath: videoFile!)
let imageFrame = captureView.frame
watermarkVideo.createWatermark(image, frame: imageFrame, video: videoURL)
Here is the class WatermakVideo :
https://www.dropbox.com/s/0d6i7ap9qu4klp5/WatermakVideo.zip
I would be grateful if you could help me fix this issue.

Copy the below into your file. I had the same issue and solved the problem two weeks ago:
-(void)forStackOverflow:(NSURL*)url{
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:url options:nil];
AVMutableComposition* mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *clipVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *clipAudioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
//If you need audio as well add the Asset Track for audio here
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:clipVideoTrack atTime:kCMTimeZero error:nil];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:clipAudioTrack atTime:kCMTimeZero error:nil];
[compositionVideoTrack setPreferredTransform:[[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] preferredTransform]];
CGSize sizeOfVideo=compositionVideoTrack.naturalSize;
CGFloat scaleWidth = sizeOfVideo.height/self.view.frame.size.width;
CGFloat scaleHeight = sizeOfVideo.width/self.view.frame.size.height;
// add image
UIImage *myImage=[UIImage imageNamed:#"YOUR IMAGE PATH"];
CALayer *layerCa = [CALayer layer];
layerCa.contents = (id)myImage.CGImage;
layerCa.frame = CGRectMake(5*scaleWidth, 0, self.birdSize.width*scaleWidth, self.birdSize.height*scaleWidth);
layerCa.opacity = 1.0;
// add Text on image
CATextLayer *textOfvideo=[[CATextLayer alloc] init];
textOfvideo.alignmentMode = kCAAlignmentLeft;
[textOfvideo setFont:(__bridge CFTypeRef)([UIFont fontWithName:#"Arial" size:64.00])];//fontUsed is the name of font
[textOfvideo setFrame:CGRectMake(layerCa.frame.size.width/6, layerCa.frame.size.height/8*7-layerCa.frame.size.height/3, layerCa.frame.size.width/1.5, layerCa.frame.size.height/3)];
[textOfvideo setAlignmentMode:kCAAlignmentCenter];
[textOfvideo setForegroundColor:[[UIColor redColor] CGColor]];
UILabel*label = [[UILabel alloc]init];
[label setText:self.questionString];
label.textAlignment = NSTextAlignmentCenter;
label.numberOfLines = 4;
label.adjustsFontSizeToFitWidth = YES;
[label setFont:[UIFont fontWithName:#"Arial" size:64.00]];
//[label.layer setBackgroundColor:[[UIColor blackColor] CGColor]];
[label.layer setFrame:CGRectMake(0, 0, textOfvideo.frame.size.width, textOfvideo.frame.size.height)];
[textOfvideo addSublayer:label.layer];
[layerCa addSublayer:textOfvideo];
CALayer *parentLayer=[CALayer layer];
CALayer *videoLayer=[CALayer layer];
parentLayer.frame=CGRectMake(0, 0, sizeOfVideo.width, sizeOfVideo.height);
videoLayer.frame=CGRectMake(0, 0, sizeOfVideo.height,sizeOfVideo.width);
[parentLayer addSublayer:videoLayer];
//[parentLayer addSublayer:optionalLayer];
[parentLayer addSublayer:layerCa];
[parentLayer setBackgroundColor:[UIColor blueColor].CGColor];
AVMutableVideoComposition *videoComposition=[AVMutableVideoComposition videoComposition] ;
videoComposition.frameDuration=CMTimeMake(1, 30);
videoComposition.animationTool=[AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
//AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]);
AVAssetTrack *videoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
UIImageOrientation videoAssetOrientation_ = UIImageOrientationUp;
BOOL isVideoAssetPortrait_ = NO;
[layerInstruction setTransform:videoTrack.preferredTransform atTime:kCMTimeZero];
CGSize naturalSize;
naturalSize = videoTrack.naturalSize;
float renderWidth, renderHeight;
renderWidth = naturalSize.width;
renderHeight = naturalSize.height;
videoComposition.renderSize = naturalSize = CGSizeMake(videoTrack.naturalSize.height, videoTrack.naturalSize.width);
instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
videoComposition.instructions = [NSArray arrayWithObject: instruction];
NSString *documentsDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES)objectAtIndex:0];
NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init];
[dateFormatter setDateFormat:#"yyyy-MM-dd_HH-mm-ss"];
NSString *destinationPath = [documentsDirectory stringByAppendingFormat:#"/utput_%#.mov", [dateFormatter stringFromDate:[NSDate date]]];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exportSession.videoComposition=videoComposition;
exportSession.outputURL = [NSURL fileURLWithPath:destinationPath];
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch (exportSession.status)
{
case AVAssetExportSessionStatusCompleted:
NSLog(#"Export OK");
if (UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(destinationPath)) {
UISaveVideoAtPathToSavedPhotosAlbum(destinationPath, self, #selector(video:didFinishSavingWithError:contextInfo:), nil);
}
break;
case AVAssetExportSessionStatusFailed:
NSLog (#"AVAssetExportSessionStatusFailed: %#", exportSession.error);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export Cancelled");
break;
}
self.currentUrl = exportSession.outputURL;
dispatch_async(dispatch_get_main_queue(), ^{
});
}];
}

Related

AVMutableVideoComposition & AVVideoCompositionCoreAnimationTool color glitch

I'm trying to add image to video using following code:
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:[self assetURL] options:nil];
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
AVMutableCompositionTrack *audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
AVMutableVideoCompositionLayerInstruction *videolayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
AVAssetTrack *videoAssetTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
[videolayerInstruction setTransform:videoAssetTrack.preferredTransform atTime:kCMTimeZero];
mainInstruction.layerInstructions = [NSArray arrayWithObjects:videolayerInstruction,nil];
AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
CGSize naturalSize = CGSizeApplyAffineTransform(videoTrack.naturalSize, videoTrack.preferredTransform);
mainCompositionInst.renderSize = naturalSize;
mainCompositionInst.instructions = [NSArray arrayWithObject:mainInstruction];
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
CGFloat ratio = naturalSize.width / rectVideo.size.width;
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, naturalSize.width, naturalSize.height);
videoLayer.frame = CGRectMake(0, 0, naturalSize.width, naturalSize.height);
[parentLayer addSublayer:videoLayer];
for(DraggableImageView *iv in imageViews){
CALayer *overlayLayer = [CALayer layer];
[overlayLayer setContents:(id)[[self normalizedImage:iv.image] CGImage]];
overlayLayer.frame = CGRectMake(iv.frame.origin.x * ratio, (rectVideo.size.height - iv.frame.origin.y - iv.frame.size.height) * ratio, iv.frame.size.width * ratio, iv.frame.size.height * ratio);
[overlayLayer setMasksToBounds:YES];
overlayLayer.opacity = 0;
overlayLayer.masksToBounds = YES;
CGFloat duration = CMTimeGetSeconds(iv.endTime) - CMTimeGetSeconds(iv.startTime);
CGFloat start = CMTimeGetSeconds(iv.startTime);
if(duration > CMTimeGetSeconds(videoAsset.duration)){
duration = CMTimeGetSeconds(videoAsset.duration);
start = 0;
}
CGFloat fadeInSeconds = iv.fadeInSeconds;
CGFloat fade = fadeInSeconds / duration;
CAKeyframeAnimation *anim = [[CAKeyframeAnimation alloc] init];
anim.keyPath = #"opacity";
anim.values = #[#0, #1, #1, #0];
NSMutableArray *keyTimes = [NSMutableArray new];
[keyTimes addObject:#0];
[keyTimes addObject:#(fade)];
[keyTimes addObject:#(1.0 - fade)];
[keyTimes addObject:#1];
anim.keyTimes = keyTimes;
anim.duration = duration;
anim.beginTime = AVCoreAnimationBeginTimeAtZero + start;
[overlayLayer addAnimation:anim forKey:#"anim"];
[parentLayer addSublayer:overlayLayer];
}
mainCompositionInst.animationTool = [AVVideoCompositionCoreAnimationTool
videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
NSURL *exportURL = [self exportUrl];
exportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exportSession.outputURL = exportURL;
exportSession.videoComposition = mainCompositionInst;
exportSession.outputFileType = _recordSession.fileType;
exportSession.timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
exportProgressTimer = [NSTimer scheduledTimerWithTimeInterval:.1 target:self selector:#selector(exportDidProgress:) userInfo:nil repeats:YES];
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch (exportSession.status) {
case AVAssetExportSessionStatusCompleted:
[self savingComplete:exportSession.error];
break;
case AVAssetExportSessionStatusFailed:
[self savingComplete:exportSession.error];
break;
case AVAssetExportSessionStatusCancelled:
break;
default:
break;
}
}];
This code works great on iPhone SE but on iPad Air 2 gives this color glitch on the right edge of video (zoomed screenshot):
This glitch is not static, it changes vertically (size and y position).
I think this is not a problem with video size (well known problem with green line on the edge when video size not dividable by 4 or 16). Size of this video is 1080x1920.
Any ideas?

Adding a image as watermark on video results inverted video

I am adding image watermark on video using the following code but the resulted video's frame was rotated by 180 degree and i tried every possible solution to stop it. i just want the same video with watermark as a source video. please suggest solution.
-(void)watermarkVideoAtURL:(NSURL *)url fb:(BOOL)fb withCompletionHandler:(void(^)(bool success, NSURL *assetsURL, NSError *error))completionHandler {
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:url options:nil];
AVMutableComposition *mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *clipVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] lastObject];
AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *clipAudioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] lastObject];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:clipVideoTrack atTime:kCMTimeZero error:nil];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:clipAudioTrack atTime:kCMTimeZero error:nil];
[compositionVideoTrack setPreferredTransform:[[[videoAsset tracksWithMediaType:AVMediaTypeVideo] lastObject] preferredTransform]];
CGSize sizeOfVideo = [videoAsset naturalSize];
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, sizeOfVideo.width, sizeOfVideo.height);
videoLayer.frame = CGRectMake(0, 0, sizeOfVideo.width, sizeOfVideo.height);
// Image of watermark
UIImage *myImage = [self imageByApplyingAlpha:watermarkOpacityFactor toImage:[UIImage imageNamed:#"iconbig"]];
CALayer *layerCa = [CALayer layer];
layerCa.contents = (id)myImage.CGImage;
layerCa.frame = CGRectMake(10, sizeOfVideo.height - 50, 50, 50);
layerCa.opacity = 1.0;
CALayer *layerCa2 = [CALayer layer];
layerCa2.contents = (id)myImage.CGImage;
layerCa2.frame = CGRectMake(sizeOfVideo.width - 60, 10, 50, 50);
layerCa2.opacity = 1.0;
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:layerCa];
[parentLayer addSublayer:layerCa2];
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.frameDuration = CMTimeMake(1, 30);
videoComposition.renderSize = sizeOfVideo;
videoComposition.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]);
AVAssetTrack *videoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] lastObject];
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
videoComposition.instructions = [NSArray arrayWithObject:instruction];
NSString *documentsDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES)objectAtIndex:0];
NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init];
[dateFormatter setDateFormat:#"yyyy-MM-dd_HH-mm-ss"];
NSString *destinationPath = [documentsDirectory stringByAppendingFormat:#"/utput_%#.mov", [dateFormatter stringFromDate:[NSDate date]]];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exportSession.videoComposition = videoComposition;
exportSession.outputURL = [NSURL fileURLWithPath:destinationPath];
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch (exportSession.status) {
case AVAssetExportSessionStatusCompleted: {
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
[library writeVideoAtPathToSavedPhotosAlbum:exportSession.outputURL completionBlock:^(NSURL *assetURL, NSError *error) {
if (!error) {
completionHandler(YES, assetURL, nil);
} else {
completionHandler(NO, nil, error);
}
}];
}
break;
case AVAssetExportSessionStatusFailed: {
completionHandler(NO, nil, exportSession.error);
}
break;
case AVAssetExportSessionStatusCancelled: {
completionHandler(NO, nil, exportSession.error);
}
break;
default:
break;
}
}];
}
Try to set AVAssetTrack's preferredTransform to layer instruction
setTransform:atTime:
Sets a fixed transform to apply from the specified time until the next time at which a transform is set.[...] Before the first specified time for which a transform is set, the affine transform is held constant at the value of
CGAffineTransformIdentity
; after the last time for which a transform is set, the affine transform is held constant at that last value.

Recorded video rotates after adding watermark image in iOS

I m using bellow method to add watermark image to video, but the problem is when I record the video and add watermark to it, video rotates by 90 degree.
+(void)createWatermarkForVideo:(NSURL*)videoURL watermark:(UIImage*)watermarkImage stickerContainerView:(UIView*)containerView completionAction:(VideoMergeCompletionBlock)completion{
AVURLAsset *audioAsset, *videoAsset;
//Create AVMutableComposition Object which will hold our multiple AVMutableCompositionTrack or we can say it will hold our video and audio files.
AVMutableComposition* mixComposition = [AVMutableComposition composition];
//Now first load your audio file using AVURLAsset. Make sure you give the correct path of your videos.
audioAsset = [[AVURLAsset alloc]initWithURL:videoURL options:nil];
//Now we are creating the first AVMutableCompositionTrack containing our audio and add it to our AVMutableComposition object.
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
//Now we will load video file.
videoAsset = [[AVURLAsset alloc]initWithURL:videoURL options:nil];
//Now we are creating the second AVMutableCompositionTrack containing our video and add it to our AVMutableComposition object.
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *aVideoAssetTrack = [videoAsset tracksWithMediaType:AVMediaTypeVideo][0];
AVAssetTrack *aAudioAssetTrack = [audioAsset tracksWithMediaType:AVMediaTypeAudio][0];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,aVideoAssetTrack.timeRange.duration);
[a_compositionVideoTrack setPreferredTransform:aVideoAssetTrack.preferredTransform];
#try{
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:aVideoAssetTrack atTime:kCMTimeZero error:nil];
if(CMTimeGetSeconds(audioAsset.duration) == CMTimeGetSeconds(videoAsset.duration)){
#try{
[b_compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,aVideoAssetTrack.timeRange.duration) ofTrack:aAudioAssetTrack atTime:kCMTimeZero error:nil];
}
#catch(NSError *error){
}
}
else{
#try{
[b_compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,aVideoAssetTrack.timeRange.duration) ofTrack:aAudioAssetTrack atTime:kCMTimeZero error:nil];
}
#catch(NSError *error){
}
}
}
#catch(NSError *error){
}
// create the layer with the watermark image
CALayer* aLayer = [CALayer layer];
aLayer.contents = (id)watermarkImage.CGImage;
CGSize videoSize = [aVideoAssetTrack naturalSize];
CGFloat videoScale = videoSize.width/containerView.frame.size.width;
aLayer.frame = CGRectMake(0, 0, containerView.frame.size.width * videoScale, containerView.frame.size.height * videoScale);
aLayer.opacity = 0.9;
//sorts the layer in proper order
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:aLayer];
printf("Video Size %f %f",videoSize.width,videoSize.height);
//create the composition and add the instructions to insert the layer:
AVMutableVideoComposition* videoComp = [AVMutableVideoComposition videoComposition];
videoComp.renderSize = videoSize;
videoComp.frameDuration = CMTimeMake(1, 30);
videoComp.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
/// instruction
AVMutableVideoCompositionInstruction* instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]);
AVAssetTrack* mixVideoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:mixVideoTrack];
instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
videoComp.instructions = [NSArray arrayWithObject: instruction];
AVAssetTrack *assetVideoTrack = [videoAsset tracksWithMediaType:AVMediaTypeVideo].lastObject;
// Apply the original transform.
if (assetVideoTrack && a_compositionVideoTrack) {
[a_compositionVideoTrack setPreferredTransform:assetVideoTrack.preferredTransform];
}
AVAssetExportSession *_assetExport;
// export video
_assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality];
_assetExport.videoComposition = videoComp;
NSLog (#"created exporter. supportedFileTypes: %#", _assetExport.supportedFileTypes);
NSString* videoName = #"NewWatermarkedVideo.mov";
NSString* exportPath = [NSTemporaryDirectory() stringByAppendingPathComponent:videoName];
NSURL* exportUrl = [NSURL fileURLWithPath:exportPath];
if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath])
[[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil];
_assetExport.outputFileType = AVFileTypeQuickTimeMovie;
_assetExport.outputURL = exportUrl;
_assetExport.shouldOptimizeForNetworkUse = YES;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
dispatch_async(dispatch_get_main_queue(), ^{
completion(_assetExport);
});
}
];
}

I have a .mp4 video how i can add water mark

I have .mp4 video and i want to generate new video from that with watermarked on proper place.
Is there possibilities where i can create function and pass video it will return me Water marked video.
try this :
This code is add a text or string ON the video and after saving video you will play on any player.
Most Advantage of this code is Provide video with sound. And all things in one code(that is text and image).
#import <AVFoundation/AVFoundation.h>
-(void)MixVideoWithText
{
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:url options:nil];
AVMutableComposition* mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *clipVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *clipAudioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
//If you need audio as well add the Asset Track for audio here
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:clipVideoTrack atTime:kCMTimeZero error:nil];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:clipAudioTrack atTime:kCMTimeZero error:nil];
[compositionVideoTrack setPreferredTransform:[[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] preferredTransform]];
CGSize sizeOfVideo=[videoAsset naturalSize];
//TextLayer defines the text they want to add in Video
//Text of watermark
CATextLayer *textOfvideo=[[CATextLayer alloc] init];
textOfvideo.string=[NSString stringWithFormat:#"%#",text];//text is shows the text that you want add in video.
[textOfvideo setFont:(__bridge CFTypeRef)([UIFont fontWithName:[NSString stringWithFormat:#"%#",fontUsed] size:13])];//fontUsed is the name of font
[textOfvideo setFrame:CGRectMake(0, 0, sizeOfVideo.width, sizeOfVideo.height/6)];
[textOfvideo setAlignmentMode:kCAAlignmentCenter];
[textOfvideo setForegroundColor:[selectedColour CGColor]];
//Image of watermark
UIImage *myImage=[UIImage imageNamed:#"one.png"];
CALayer layerCa = [CALayer layer];
layerCa.contents = (id)myImage.CGImage;
layerCa.frame = CGRectMake(0, 0, sizeOfVideo.width, sizeOfVideo.height);
layerCa.opacity = 1.0;
CALayer *optionalLayer=[CALayer layer];
[optionalL addSublayer:textOfvideo];
optionalL.frame=CGRectMake(0, 0, sizeOfVideo.width, sizeOfVideo.height);
[optionalL setMasksToBounds:YES];
CALayer *parentLayer=[CALayer layer];
CALayer *videoLayer=[CALayer layer];
parentLayer.frame=CGRectMake(0, 0, sizeOfVideo.width, sizeOfVideo.height);
videoLayer.frame=CGRectMake(0, 0, sizeOfVideo.width, sizeOfVideo.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:optionalLayer];
[parentLayer addSublayer:layerCa];
AVMutableVideoComposition *videoComposition=[AVMutableVideoComposition videoComposition] ;
videoComposition.frameDuration=CMTimeMake(1, 30);
videoComposition.renderSize=sizeOfVideo;
videoComposition.animationTool=[AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]);
AVAssetTrack *videoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
videoComposition.instructions = [NSArray arrayWithObject: instruction];
NSString *documentsDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES)objectAtIndex:0];
NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init];
[dateFormatter setDateFormat:#"yyyy-MM-dd_HH-mm-ss"];
NSString *destinationPath = [documentsDirectory stringByAppendingFormat:#"/utput_%#.mov", [dateFormatter stringFromDate:[NSDate date]]];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality];
exportSession.videoComposition=videoComposition;
exportSession.outputURL = [NSURL fileURLWithPath:destinationPath];
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch (exportSession.status)
{
case AVAssetExportSessionStatusCompleted:
NSLog(#"Export OK");
if (UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(destinationPath)) {
UISaveVideoAtPathToSavedPhotosAlbum(destinationPath, self, #selector(video:didFinishSavingWithError:contextInfo:), nil);
}
break;
case AVAssetExportSessionStatusFailed:
NSLog (#"AVAssetExportSessionStatusFailed: %#", exportSession.error);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export Cancelled");
break;
}
}];
}
Shows the error they will come after saving video.
-(void) video: (NSString *) videoPath didFinishSavingWithError: (NSError *) error contextInfo: (void *) contextInfo
{
if(error)
NSLog(#"Finished saving video with error: %#", error);
}
From :https://stackoverflow.com/a/22016800/3901620

exportAsynchronouslyWithCompletionHandler calls once

I tried asking this before, but got no responses. So I'll try again. I'm trying to lay images on top of a video (like a doodle, or a border). I call the "addAnimation" method three times, and it runs, but the exportAsynchronouslyWithCompletionHandler only gets called on the first try. Any ideas?
- (void)addAnimation:(UIImage *)background for:(NSString *)socialSite
{
// 1 - Load Asset
AVURLAsset* videoAsset = [AVURLAsset URLAssetWithURL:self.videoUrl options:nil];
// 2 - Create AVMutableComposition Object
AVMutableComposition* mixComposition = [AVMutableComposition composition];
// 2.1 - VideoTrack
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
// 2.2 - AudioTrack
AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
//If either of them are nil, then alloc
if( (self.clipVideoTrack == nil) || (self.clipAudioTrack == nil)){
self.clipVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
self.clipAudioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
}
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:self.clipVideoTrack atTime:kCMTimeZero error:nil];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:self.clipAudioTrack atTime:kCMTimeZero error:nil];
CGSize videoSize = [self.clipVideoTrack naturalSize];
// 3 - Background layer
UIImage *myImage = background;
CALayer *aLayer = [CALayer layer];
if(socialSite == nil) {
aLayer.contents = (id)myImage.CGImage;
if(__IPHONE_4_INCH)
aLayer.frame = CGRectMake(0, 0, 640, 640);
else
aLayer.frame = CGRectMake(0, 0, 320, 320);
}
else{
aLayer.contents = (id)myImage.CGImage;
aLayer.frame = CGRectMake(0, 0, myImage.size.width, myImage.size.height);
}
// 3.1 - Video layer
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, myImage.size.width, myImage.size.height);
if(socialSite == nil){
videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
}
else{
videoLayer.frame = CGRectMake(0, 110, videoSize.width, videoSize.height);
}
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:aLayer];
// 3.2 - Add instructions
AVMutableVideoComposition* videoComp = [AVMutableVideoComposition videoComposition];
videoComp.renderSize = videoSize;
videoComp.frameDuration = CMTimeMake(1, 30);
videoComp.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]);
AVAssetTrack *videoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
videoComp.instructions = [NSArray arrayWithObject: instruction];
AVAssetExportSession *assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];//AVAssetExportPresetPassthrough
assetExport.videoComposition = videoComp;
// 4 - Get path
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:
[NSString stringWithFormat:#"FinalVideo-%d.mov",arc4random() % 1000]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
myPathDocs = [myPathDocs stringByAddingPercentEscapesUsingEncoding:NSUTF8StringEncoding];
// 5 - Create exporter
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.shouldOptimizeForNetworkUse = YES;
exporter.videoComposition = videoComp;
[exporter exportAsynchronouslyWithCompletionHandler:^ {
NSData *video;
/*********************************** INSTAGRAM *********************************************/
if([socialSite isEqualToString:#"instagram"]){
video = [NSData dataWithContentsOfURL:exporter.outputURL];
[MPOCreateMPManager sharedManager].createdMarcoPolo.instagramVideo = video;
NSLog(#"IG SIZE (In CreateViewController): %lu",(unsigned long)[MPOCreateMPManager sharedManager].createdMarcoPolo.instagramVideo.length);
}
/*********************************** FB/TWITTER *********************************************/
else if ([socialSite isEqualToString:#"facebook"]){
video = [NSData dataWithContentsOfURL:exporter.outputURL];
[MPOCreateMPManager sharedManager].createdMarcoPolo.fBTwitterVideo = video;
NSLog(#"FB SIZE (In CreateViewController): %lu",(unsigned long)[MPOCreateMPManager sharedManager].createdMarcoPolo.fBTwitterVideo.length);
}
/*********************************** DOODLE *********************************************/
else if (socialSite == nil){
video = [NSData dataWithContentsOfURL:exporter.outputURL];
[MPOCreateMPManager sharedManager].createdMarcoPolo.video = video;
[MPOCreateMPManager sharedManager].createdMarcoPolo.image = [self loadImage:exporter.outputURL];
NSLog(#"*******************************DOODLE DONE*************************************");
}
}];
}

Resources