exportAsynchronouslyWithCompletionHandler calls once - ios

I tried asking this before, but got no responses. So I'll try again. I'm trying to lay images on top of a video (like a doodle, or a border). I call the "addAnimation" method three times, and it runs, but the exportAsynchronouslyWithCompletionHandler only gets called on the first try. Any ideas?
- (void)addAnimation:(UIImage *)background for:(NSString *)socialSite
{
// 1 - Load Asset
AVURLAsset* videoAsset = [AVURLAsset URLAssetWithURL:self.videoUrl options:nil];
// 2 - Create AVMutableComposition Object
AVMutableComposition* mixComposition = [AVMutableComposition composition];
// 2.1 - VideoTrack
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
// 2.2 - AudioTrack
AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
//If either of them are nil, then alloc
if( (self.clipVideoTrack == nil) || (self.clipAudioTrack == nil)){
self.clipVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
self.clipAudioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
}
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:self.clipVideoTrack atTime:kCMTimeZero error:nil];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:self.clipAudioTrack atTime:kCMTimeZero error:nil];
CGSize videoSize = [self.clipVideoTrack naturalSize];
// 3 - Background layer
UIImage *myImage = background;
CALayer *aLayer = [CALayer layer];
if(socialSite == nil) {
aLayer.contents = (id)myImage.CGImage;
if(__IPHONE_4_INCH)
aLayer.frame = CGRectMake(0, 0, 640, 640);
else
aLayer.frame = CGRectMake(0, 0, 320, 320);
}
else{
aLayer.contents = (id)myImage.CGImage;
aLayer.frame = CGRectMake(0, 0, myImage.size.width, myImage.size.height);
}
// 3.1 - Video layer
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, myImage.size.width, myImage.size.height);
if(socialSite == nil){
videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
}
else{
videoLayer.frame = CGRectMake(0, 110, videoSize.width, videoSize.height);
}
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:aLayer];
// 3.2 - Add instructions
AVMutableVideoComposition* videoComp = [AVMutableVideoComposition videoComposition];
videoComp.renderSize = videoSize;
videoComp.frameDuration = CMTimeMake(1, 30);
videoComp.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]);
AVAssetTrack *videoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
videoComp.instructions = [NSArray arrayWithObject: instruction];
AVAssetExportSession *assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];//AVAssetExportPresetPassthrough
assetExport.videoComposition = videoComp;
// 4 - Get path
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:
[NSString stringWithFormat:#"FinalVideo-%d.mov",arc4random() % 1000]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
myPathDocs = [myPathDocs stringByAddingPercentEscapesUsingEncoding:NSUTF8StringEncoding];
// 5 - Create exporter
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.shouldOptimizeForNetworkUse = YES;
exporter.videoComposition = videoComp;
[exporter exportAsynchronouslyWithCompletionHandler:^ {
NSData *video;
/*********************************** INSTAGRAM *********************************************/
if([socialSite isEqualToString:#"instagram"]){
video = [NSData dataWithContentsOfURL:exporter.outputURL];
[MPOCreateMPManager sharedManager].createdMarcoPolo.instagramVideo = video;
NSLog(#"IG SIZE (In CreateViewController): %lu",(unsigned long)[MPOCreateMPManager sharedManager].createdMarcoPolo.instagramVideo.length);
}
/*********************************** FB/TWITTER *********************************************/
else if ([socialSite isEqualToString:#"facebook"]){
video = [NSData dataWithContentsOfURL:exporter.outputURL];
[MPOCreateMPManager sharedManager].createdMarcoPolo.fBTwitterVideo = video;
NSLog(#"FB SIZE (In CreateViewController): %lu",(unsigned long)[MPOCreateMPManager sharedManager].createdMarcoPolo.fBTwitterVideo.length);
}
/*********************************** DOODLE *********************************************/
else if (socialSite == nil){
video = [NSData dataWithContentsOfURL:exporter.outputURL];
[MPOCreateMPManager sharedManager].createdMarcoPolo.video = video;
[MPOCreateMPManager sharedManager].createdMarcoPolo.image = [self loadImage:exporter.outputURL];
NSLog(#"*******************************DOODLE DONE*************************************");
}
}];
}

Related

Issue with add watermark on video

I am trying to add an image on a video. Everything works fine except one thing, the image is distorted:
Here is the code :
//Capture the image
UIGraphicsBeginImageContextWithOptions(self.captureView.bounds.size, false, UIScreen.main.scale)
self.captureView.layer.render(in: UIGraphicsGetCurrentContext()!)
let image = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
let watermarkVideo = WatermakVideo()
//video file
let videoFile = Bundle.main.path(forResource: "videoTrim", ofType: "mp4")
let videoURL = URL(fileURLWithPath: videoFile!)
let imageFrame = captureView.frame
watermarkVideo.createWatermark(image, frame: imageFrame, video: videoURL)
Here is the class WatermakVideo :
https://www.dropbox.com/s/0d6i7ap9qu4klp5/WatermakVideo.zip
I would be grateful if you could help me fix this issue.
Copy the below into your file. I had the same issue and solved the problem two weeks ago:
-(void)forStackOverflow:(NSURL*)url{
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:url options:nil];
AVMutableComposition* mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *clipVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *clipAudioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
//If you need audio as well add the Asset Track for audio here
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:clipVideoTrack atTime:kCMTimeZero error:nil];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:clipAudioTrack atTime:kCMTimeZero error:nil];
[compositionVideoTrack setPreferredTransform:[[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] preferredTransform]];
CGSize sizeOfVideo=compositionVideoTrack.naturalSize;
CGFloat scaleWidth = sizeOfVideo.height/self.view.frame.size.width;
CGFloat scaleHeight = sizeOfVideo.width/self.view.frame.size.height;
// add image
UIImage *myImage=[UIImage imageNamed:#"YOUR IMAGE PATH"];
CALayer *layerCa = [CALayer layer];
layerCa.contents = (id)myImage.CGImage;
layerCa.frame = CGRectMake(5*scaleWidth, 0, self.birdSize.width*scaleWidth, self.birdSize.height*scaleWidth);
layerCa.opacity = 1.0;
// add Text on image
CATextLayer *textOfvideo=[[CATextLayer alloc] init];
textOfvideo.alignmentMode = kCAAlignmentLeft;
[textOfvideo setFont:(__bridge CFTypeRef)([UIFont fontWithName:#"Arial" size:64.00])];//fontUsed is the name of font
[textOfvideo setFrame:CGRectMake(layerCa.frame.size.width/6, layerCa.frame.size.height/8*7-layerCa.frame.size.height/3, layerCa.frame.size.width/1.5, layerCa.frame.size.height/3)];
[textOfvideo setAlignmentMode:kCAAlignmentCenter];
[textOfvideo setForegroundColor:[[UIColor redColor] CGColor]];
UILabel*label = [[UILabel alloc]init];
[label setText:self.questionString];
label.textAlignment = NSTextAlignmentCenter;
label.numberOfLines = 4;
label.adjustsFontSizeToFitWidth = YES;
[label setFont:[UIFont fontWithName:#"Arial" size:64.00]];
//[label.layer setBackgroundColor:[[UIColor blackColor] CGColor]];
[label.layer setFrame:CGRectMake(0, 0, textOfvideo.frame.size.width, textOfvideo.frame.size.height)];
[textOfvideo addSublayer:label.layer];
[layerCa addSublayer:textOfvideo];
CALayer *parentLayer=[CALayer layer];
CALayer *videoLayer=[CALayer layer];
parentLayer.frame=CGRectMake(0, 0, sizeOfVideo.width, sizeOfVideo.height);
videoLayer.frame=CGRectMake(0, 0, sizeOfVideo.height,sizeOfVideo.width);
[parentLayer addSublayer:videoLayer];
//[parentLayer addSublayer:optionalLayer];
[parentLayer addSublayer:layerCa];
[parentLayer setBackgroundColor:[UIColor blueColor].CGColor];
AVMutableVideoComposition *videoComposition=[AVMutableVideoComposition videoComposition] ;
videoComposition.frameDuration=CMTimeMake(1, 30);
videoComposition.animationTool=[AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
//AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]);
AVAssetTrack *videoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
UIImageOrientation videoAssetOrientation_ = UIImageOrientationUp;
BOOL isVideoAssetPortrait_ = NO;
[layerInstruction setTransform:videoTrack.preferredTransform atTime:kCMTimeZero];
CGSize naturalSize;
naturalSize = videoTrack.naturalSize;
float renderWidth, renderHeight;
renderWidth = naturalSize.width;
renderHeight = naturalSize.height;
videoComposition.renderSize = naturalSize = CGSizeMake(videoTrack.naturalSize.height, videoTrack.naturalSize.width);
instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
videoComposition.instructions = [NSArray arrayWithObject: instruction];
NSString *documentsDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES)objectAtIndex:0];
NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init];
[dateFormatter setDateFormat:#"yyyy-MM-dd_HH-mm-ss"];
NSString *destinationPath = [documentsDirectory stringByAppendingFormat:#"/utput_%#.mov", [dateFormatter stringFromDate:[NSDate date]]];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exportSession.videoComposition=videoComposition;
exportSession.outputURL = [NSURL fileURLWithPath:destinationPath];
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch (exportSession.status)
{
case AVAssetExportSessionStatusCompleted:
NSLog(#"Export OK");
if (UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(destinationPath)) {
UISaveVideoAtPathToSavedPhotosAlbum(destinationPath, self, #selector(video:didFinishSavingWithError:contextInfo:), nil);
}
break;
case AVAssetExportSessionStatusFailed:
NSLog (#"AVAssetExportSessionStatusFailed: %#", exportSession.error);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export Cancelled");
break;
}
self.currentUrl = exportSession.outputURL;
dispatch_async(dispatch_get_main_queue(), ^{
});
}];
}

Adding a image as watermark on video results inverted video

I am adding image watermark on video using the following code but the resulted video's frame was rotated by 180 degree and i tried every possible solution to stop it. i just want the same video with watermark as a source video. please suggest solution.
-(void)watermarkVideoAtURL:(NSURL *)url fb:(BOOL)fb withCompletionHandler:(void(^)(bool success, NSURL *assetsURL, NSError *error))completionHandler {
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:url options:nil];
AVMutableComposition *mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *clipVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] lastObject];
AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *clipAudioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] lastObject];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:clipVideoTrack atTime:kCMTimeZero error:nil];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:clipAudioTrack atTime:kCMTimeZero error:nil];
[compositionVideoTrack setPreferredTransform:[[[videoAsset tracksWithMediaType:AVMediaTypeVideo] lastObject] preferredTransform]];
CGSize sizeOfVideo = [videoAsset naturalSize];
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, sizeOfVideo.width, sizeOfVideo.height);
videoLayer.frame = CGRectMake(0, 0, sizeOfVideo.width, sizeOfVideo.height);
// Image of watermark
UIImage *myImage = [self imageByApplyingAlpha:watermarkOpacityFactor toImage:[UIImage imageNamed:#"iconbig"]];
CALayer *layerCa = [CALayer layer];
layerCa.contents = (id)myImage.CGImage;
layerCa.frame = CGRectMake(10, sizeOfVideo.height - 50, 50, 50);
layerCa.opacity = 1.0;
CALayer *layerCa2 = [CALayer layer];
layerCa2.contents = (id)myImage.CGImage;
layerCa2.frame = CGRectMake(sizeOfVideo.width - 60, 10, 50, 50);
layerCa2.opacity = 1.0;
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:layerCa];
[parentLayer addSublayer:layerCa2];
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.frameDuration = CMTimeMake(1, 30);
videoComposition.renderSize = sizeOfVideo;
videoComposition.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]);
AVAssetTrack *videoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] lastObject];
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
videoComposition.instructions = [NSArray arrayWithObject:instruction];
NSString *documentsDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES)objectAtIndex:0];
NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init];
[dateFormatter setDateFormat:#"yyyy-MM-dd_HH-mm-ss"];
NSString *destinationPath = [documentsDirectory stringByAppendingFormat:#"/utput_%#.mov", [dateFormatter stringFromDate:[NSDate date]]];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exportSession.videoComposition = videoComposition;
exportSession.outputURL = [NSURL fileURLWithPath:destinationPath];
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch (exportSession.status) {
case AVAssetExportSessionStatusCompleted: {
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
[library writeVideoAtPathToSavedPhotosAlbum:exportSession.outputURL completionBlock:^(NSURL *assetURL, NSError *error) {
if (!error) {
completionHandler(YES, assetURL, nil);
} else {
completionHandler(NO, nil, error);
}
}];
}
break;
case AVAssetExportSessionStatusFailed: {
completionHandler(NO, nil, exportSession.error);
}
break;
case AVAssetExportSessionStatusCancelled: {
completionHandler(NO, nil, exportSession.error);
}
break;
default:
break;
}
}];
}
Try to set AVAssetTrack's preferredTransform to layer instruction
setTransform:atTime:
Sets a fixed transform to apply from the specified time until the next time at which a transform is set.[...] Before the first specified time for which a transform is set, the affine transform is held constant at the value of
CGAffineTransformIdentity
; after the last time for which a transform is set, the affine transform is held constant at that last value.

Recorded video rotates after adding watermark image in iOS

I m using bellow method to add watermark image to video, but the problem is when I record the video and add watermark to it, video rotates by 90 degree.
+(void)createWatermarkForVideo:(NSURL*)videoURL watermark:(UIImage*)watermarkImage stickerContainerView:(UIView*)containerView completionAction:(VideoMergeCompletionBlock)completion{
AVURLAsset *audioAsset, *videoAsset;
//Create AVMutableComposition Object which will hold our multiple AVMutableCompositionTrack or we can say it will hold our video and audio files.
AVMutableComposition* mixComposition = [AVMutableComposition composition];
//Now first load your audio file using AVURLAsset. Make sure you give the correct path of your videos.
audioAsset = [[AVURLAsset alloc]initWithURL:videoURL options:nil];
//Now we are creating the first AVMutableCompositionTrack containing our audio and add it to our AVMutableComposition object.
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
//Now we will load video file.
videoAsset = [[AVURLAsset alloc]initWithURL:videoURL options:nil];
//Now we are creating the second AVMutableCompositionTrack containing our video and add it to our AVMutableComposition object.
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *aVideoAssetTrack = [videoAsset tracksWithMediaType:AVMediaTypeVideo][0];
AVAssetTrack *aAudioAssetTrack = [audioAsset tracksWithMediaType:AVMediaTypeAudio][0];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,aVideoAssetTrack.timeRange.duration);
[a_compositionVideoTrack setPreferredTransform:aVideoAssetTrack.preferredTransform];
#try{
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:aVideoAssetTrack atTime:kCMTimeZero error:nil];
if(CMTimeGetSeconds(audioAsset.duration) == CMTimeGetSeconds(videoAsset.duration)){
#try{
[b_compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,aVideoAssetTrack.timeRange.duration) ofTrack:aAudioAssetTrack atTime:kCMTimeZero error:nil];
}
#catch(NSError *error){
}
}
else{
#try{
[b_compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,aVideoAssetTrack.timeRange.duration) ofTrack:aAudioAssetTrack atTime:kCMTimeZero error:nil];
}
#catch(NSError *error){
}
}
}
#catch(NSError *error){
}
// create the layer with the watermark image
CALayer* aLayer = [CALayer layer];
aLayer.contents = (id)watermarkImage.CGImage;
CGSize videoSize = [aVideoAssetTrack naturalSize];
CGFloat videoScale = videoSize.width/containerView.frame.size.width;
aLayer.frame = CGRectMake(0, 0, containerView.frame.size.width * videoScale, containerView.frame.size.height * videoScale);
aLayer.opacity = 0.9;
//sorts the layer in proper order
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:aLayer];
printf("Video Size %f %f",videoSize.width,videoSize.height);
//create the composition and add the instructions to insert the layer:
AVMutableVideoComposition* videoComp = [AVMutableVideoComposition videoComposition];
videoComp.renderSize = videoSize;
videoComp.frameDuration = CMTimeMake(1, 30);
videoComp.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
/// instruction
AVMutableVideoCompositionInstruction* instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]);
AVAssetTrack* mixVideoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:mixVideoTrack];
instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
videoComp.instructions = [NSArray arrayWithObject: instruction];
AVAssetTrack *assetVideoTrack = [videoAsset tracksWithMediaType:AVMediaTypeVideo].lastObject;
// Apply the original transform.
if (assetVideoTrack && a_compositionVideoTrack) {
[a_compositionVideoTrack setPreferredTransform:assetVideoTrack.preferredTransform];
}
AVAssetExportSession *_assetExport;
// export video
_assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality];
_assetExport.videoComposition = videoComp;
NSLog (#"created exporter. supportedFileTypes: %#", _assetExport.supportedFileTypes);
NSString* videoName = #"NewWatermarkedVideo.mov";
NSString* exportPath = [NSTemporaryDirectory() stringByAppendingPathComponent:videoName];
NSURL* exportUrl = [NSURL fileURLWithPath:exportPath];
if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath])
[[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil];
_assetExport.outputFileType = AVFileTypeQuickTimeMovie;
_assetExport.outputURL = exportUrl;
_assetExport.shouldOptimizeForNetworkUse = YES;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
dispatch_async(dispatch_get_main_queue(), ^{
completion(_assetExport);
});
}
];
}

Can not add watermark for videos that its track's preferredTransform is not identity matrix using AVFoundation?

I want to add a watermark image on the bottom left corner of downloaded videos, and I find below code works when video's video track's preferredTransform property is CGAffineTransformIdentity(that means no real transformation), while for those videos have transformation, the code fails. How to fix it?
sample video (has transformation) url is here
BTW, the error code that AVFoundations reports is -11841
+(void) addWatermarkWithInputFile:(NSString *)inputFile outputFile:(NSString *)outputFile completion:(void (^)(BOOL))completion{
[[NSFileManager defaultManager] removeItemAtPath:outputFile error:nil];
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:[NSURL fileURLWithPath:inputFile] options:nil];
AVMutableComposition* composition = [AVMutableComposition composition];
CMTimeRange timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
AVAssetTrack *track = [videoAsset tracksWithMediaType:AVMediaTypeVideo].firstObject;
{
AVMutableCompositionTrack * composedTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
composedTrack.preferredTransform = track.preferredTransform;
[composedTrack insertTimeRange:timeRange
ofTrack:[videoAsset tracksWithMediaType:AVMediaTypeVideo].firstObject
atTime:kCMTimeZero
error:nil];
}
{
AVMutableCompositionTrack * composedTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
composedTrack.preferredTransform = track.preferredTransform;
[composedTrack insertTimeRange:timeRange
ofTrack:[videoAsset tracksWithMediaType:AVMediaTypeAudio].firstObject
atTime:kCMTimeZero
error:nil];
}
AVAssetTrack *clipVideoTrack = [videoAsset tracksWithMediaType:AVMediaTypeVideo].firstObject;
CGSize videoSize = clipVideoTrack.naturalSize;
AVMutableVideoComposition* videoComp = [AVMutableVideoComposition videoComposition];
videoComp.renderSize = videoSize;
videoComp.frameDuration = CMTimeMake(1, 30);
{
UIImage *myImage = [UIImage imageNamed:#"video-watermark"];
CALayer *aLayer = [CALayer layer];
aLayer.contents = (id)myImage.CGImage;
aLayer.frame = CGRectMake(10, 10, myImage.size.width * myImage.scale, myImage.size.height * myImage.scale);
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:aLayer];
videoComp.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
}
/// instruction
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = timeRange;
AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:clipVideoTrack];
instruction.layerInstructions = #[layerInstruction];
videoComp.instructions = #[instruction];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:composition
presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL = [NSURL fileURLWithPath:outputFile];
exporter.outputFileType = AVFileTypeMPEG4;
exporter.shouldOptimizeForNetworkUse = YES;
exporter.videoComposition = videoComp;
exporter.timeRange = timeRange;
[exporter exportAsynchronouslyWithCompletionHandler:^{
completion(exporter.status == AVAssetExportSessionStatusCompleted);
}];
}

AVMutableComposition of a Solid Color with No AVAsset

Here's my end goal: I'd like to use AVVideoCompositionCoreAnimationTool to create a video from Core Animation. I will not be using an existing AVAsset in this composition.
My question is, how can I use AVMutableComposition to make a video with a static solid color for a given amount of time? After I figure that out, I can add the animation.
Here's my code:
- (void)exportVideo {
AVMutableComposition *mixComposition = [AVMutableComposition composition];
CMTimeRange timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(10, 600));
[mixComposition insertEmptyTimeRange:timeRange];
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[videoTrack insertEmptyTimeRange:timeRange];
AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
mainInstruction.timeRange = timeRange;
mainInstruction.backgroundColor = [UIColor blueColor].CGColor;
AVMutableVideoComposition *mainComposition = [AVMutableVideoComposition videoComposition];
mainComposition.renderSize = CGSizeMake(500, 500);
mainComposition.instructions = [NSArray arrayWithObject:mainInstruction];
mainComposition.frameDuration = CMTimeMake(1, 30);
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"FinalVideo-%d.mov",arc4random() % 1000]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL = url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.shouldOptimizeForNetworkUse = YES;
exporter.videoComposition = mainComposition;
[exporter exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:exporter];
});
}];
}
I know you have to add at least one track to the mixComposition, so I've added a video track and inserted an empty time range, but when I call exportAsynchronouslyWithCompletionHandler, the handler is never called. I can add a dispatch_after for any amount of time after I call export and observe that the exporter has a status of AVAssetExportSessionStatusExporting.
What am I doing wrong?
After playing around with this a few months ago I found that the only reliable way to get it to work is to use a short, blank video in your AVMutableCompositionTrack, then overlay it with the desired layers.
I uploaded a project to GitHub about a month ago as a result of a bug in the simulator. You can download the blank video here, if you'd like.
-(void)exportVideo
{
CGSize renderingSize = CGSizeMake(640, 360); // The desired size of your video
float displayDuration = 2.0f; //The duration of the desired video, in seconds
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"FinalVideo-%d.mov",arc4random() % 1000]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
NSError *error;
[[NSFileManager defaultManager] removeItemAtURL:url error:&error];
mutableComposition = [AVMutableComposition composition];
mutableCompositionVideoTrack = [mutableComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.renderSize = renderingSize;
videoComposition.frameDuration = CMTimeMake(1, 30);
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, videoComposition.renderSize.width, videoComposition.renderSize.height);
videoLayer.frame = CGRectMake(0, 0, videoComposition.renderSize.width, videoComposition.renderSize.height);
[parentLayer addSublayer:videoLayer];
videoComposition.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
NSString *path = [[NSBundle mainBundle] pathForResource:#"blank_1080p" ofType:#"mp4"];
NSURL *trackUrl = [NSURL fileURLWithPath:path];
AVAsset *asset = [AVAsset assetWithURL:trackUrl];
AVAssetTrack *track = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
[mutableCompositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,CMTimeMakeWithSeconds(displayDuration, 600)) ofTrack:track atTime:kCMTimeZero error:nil];
CALayer *imageLayer = [CALayer layer];
imageLayer.bounds = parentLayer.frame;
imageLayer.anchorPoint = CGPointMake(0.5, 0.5);
imageLayer.position = CGPointMake(CGRectGetMidX(imageLayer.bounds), CGRectGetMidY(imageLayer.bounds));
imageLayer.backgroundColor = [UIColor blueColor].CGColor;
imageLayer.contentsGravity = kCAGravityResizeAspectFill;
[parentLayer addSublayer:imageLayer];
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(displayDuration, 600));
videoComposition.instructions = #[instruction];
exporter = [[AVAssetExportSession alloc] initWithAsset:mutableComposition presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL = url;
exporter.videoComposition = videoComposition;
exporter.outputFileType= AVFileTypeMPEG4;
exporter.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(displayDuration, 600));
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^(void){
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:exporter];
});
}];
}

Resources