AVCaptureSession Export with Text - ios

am using AVCaptureSession to capture a video and save it to cameraroll , everything works fine but the question how can i export it with a text written on it or even place a image when exported? thanks

Look at AVMutableVideoCompositionLayerInstruction you basically need to add layer instruction to your video, I don't remember the link but there is an Apple WWDC video about that.

i have managed a way to write on the video a image and a catextlayer on the export of a bundle movie , everything is good , but now how can i write a text to a specific frame? (time)
this is my code that worked,
- (IBAction)btn1:(id)sender {
NSString *filePath = [[NSBundle mainBundle] pathForResource:#"movie" ofType:#"mov"];
//NSURL * urlpath = [NSURL URLWithString:filePath];
NSLog(#"%#",filePath);
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:[NSURL fileURLWithPath:filePath] options:nil];
AVMutableComposition* mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *clipVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:clipVideoTrack atTime:kCMTimeZero error:nil];
[compositionVideoTrack setPreferredTransform:[[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] preferredTransform]];
AVMutableCompositionTrack *audioTrack1 = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferre dTrackID :kCMPers istentTr ackID_In valid];
[audioTrack1 insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration )
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
atTime:kCMTimeZero
error:nil];
CGSize videoSize = [clipVideoTrack naturalSize];
UIImage *myImage = [UIImage imageNamed:#"close#2x.png"];
CALayer *aLayer = [CALayer layer];
aLayer.contents = (id)myImage.CGImage;
aLayer.frame = CGRectMake(videoSize.width - 65, videoSize.height - 75, 57, 57 );
aLayer.opacity = 0.65;
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:aLayer];
CATextLayer *titleLayer = [CATextLayer layer];
titleLayer.string = #"Text goes here";
titleLayer.font = (__bridge CFTypeRef)(#"Helvetica");
titleLayer.fontSize = videoSize.height / 6;
//?? titleLayer.shadowOpacity = 0.5;
titleLayer.alignmentMode = kCAAlignmentCenter;
titleLayer.bounds = CGRectMake(0, 0, videoSize.width, videoSize.height / 6); //You may need to adjust this for proper display
[parentLayer addSublayer:titleLayer];
AVMutableVideoComposition* videoComp = [AVMutableVideoComposition videoComposition];
videoComp.renderSize = videoSize;
videoComp.frameDuration = CMTimeMake(1, 30);
videoComp.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
AVMutableVideoCompositionInstruction *instruction = [ AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration ]);
AVAssetTrack *videoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableVideoCompositionLayerInstruction* layerInstruction = [ AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
videoComp.instructions = [NSArray arrayWithObject: instruction];
AVAssetExportSession *assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality] ;//AVAssetExportPresetPassthrough
assetExport.videoComposition = videoComp;
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString* VideoName = [NSString stringWithFormat:#"%#/mynewwatermarkedvideo. mp4",documentsDirectory];
//NSString *exportPath = [NSTemporaryDirectory() stringByAppendingPathComponent:VideoName];
NSURL *exportUrl = [NSURL fileURLWithPath:VideoName];
if ([[NSFileManager defaultManager] fileExistsAtPath:VideoName])
{
[[NSFileManager defaultManager] removeItemAtPath:VideoName error:nil];
}
assetExport.outputFileType = AVFileTypeQuickTimeMovie;
assetExport.outputURL = exportUrl;
assetExport.shouldOptimizeForNetworkUse = YES;
//[strRecordedFilename setString: exportPath];
[assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
//[assetExport release];
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:assetExport];
});
}
];
}
-(void)exportDidFinish:(AVAssetExportSession*)session
{
NSURL *exportUrl = session.outputURL;
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
NSLog(#"%#",exportUrl);
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:exportUrl])
{
[library writeVideoAtPathToSavedPhotosAlbum:exportUrl completionBlock:^( NSURL *assetURL, NSError *error)
{
dispatch_async(dispatch_get_main_queue(), ^{
if (error) {
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:#"Error" message:#"Video Saving Failed"
delegate:nil cancelButtonT itle:#"OK" ot herButtonTitl es:nil];
[alert show];
} else {
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:#"Video Saved" message:#"Saved To Photo Album"
delegate:self cancelButton Title:#"OK" o therButtonTit les:nil];
[alert show];
}
});
}];
}
NSLog(#"Completed");
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:#"AlertView" message:#"Video is edited successfully." delegate:self cancelButtonTitle:#"OK" otherButtonTitles:nil];
[alert show];
}

Related

Issue with add watermark on video

I am trying to add an image on a video. Everything works fine except one thing, the image is distorted:
Here is the code :
//Capture the image
UIGraphicsBeginImageContextWithOptions(self.captureView.bounds.size, false, UIScreen.main.scale)
self.captureView.layer.render(in: UIGraphicsGetCurrentContext()!)
let image = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
let watermarkVideo = WatermakVideo()
//video file
let videoFile = Bundle.main.path(forResource: "videoTrim", ofType: "mp4")
let videoURL = URL(fileURLWithPath: videoFile!)
let imageFrame = captureView.frame
watermarkVideo.createWatermark(image, frame: imageFrame, video: videoURL)
Here is the class WatermakVideo :
https://www.dropbox.com/s/0d6i7ap9qu4klp5/WatermakVideo.zip
I would be grateful if you could help me fix this issue.
Copy the below into your file. I had the same issue and solved the problem two weeks ago:
-(void)forStackOverflow:(NSURL*)url{
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:url options:nil];
AVMutableComposition* mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *clipVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *clipAudioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
//If you need audio as well add the Asset Track for audio here
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:clipVideoTrack atTime:kCMTimeZero error:nil];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:clipAudioTrack atTime:kCMTimeZero error:nil];
[compositionVideoTrack setPreferredTransform:[[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] preferredTransform]];
CGSize sizeOfVideo=compositionVideoTrack.naturalSize;
CGFloat scaleWidth = sizeOfVideo.height/self.view.frame.size.width;
CGFloat scaleHeight = sizeOfVideo.width/self.view.frame.size.height;
// add image
UIImage *myImage=[UIImage imageNamed:#"YOUR IMAGE PATH"];
CALayer *layerCa = [CALayer layer];
layerCa.contents = (id)myImage.CGImage;
layerCa.frame = CGRectMake(5*scaleWidth, 0, self.birdSize.width*scaleWidth, self.birdSize.height*scaleWidth);
layerCa.opacity = 1.0;
// add Text on image
CATextLayer *textOfvideo=[[CATextLayer alloc] init];
textOfvideo.alignmentMode = kCAAlignmentLeft;
[textOfvideo setFont:(__bridge CFTypeRef)([UIFont fontWithName:#"Arial" size:64.00])];//fontUsed is the name of font
[textOfvideo setFrame:CGRectMake(layerCa.frame.size.width/6, layerCa.frame.size.height/8*7-layerCa.frame.size.height/3, layerCa.frame.size.width/1.5, layerCa.frame.size.height/3)];
[textOfvideo setAlignmentMode:kCAAlignmentCenter];
[textOfvideo setForegroundColor:[[UIColor redColor] CGColor]];
UILabel*label = [[UILabel alloc]init];
[label setText:self.questionString];
label.textAlignment = NSTextAlignmentCenter;
label.numberOfLines = 4;
label.adjustsFontSizeToFitWidth = YES;
[label setFont:[UIFont fontWithName:#"Arial" size:64.00]];
//[label.layer setBackgroundColor:[[UIColor blackColor] CGColor]];
[label.layer setFrame:CGRectMake(0, 0, textOfvideo.frame.size.width, textOfvideo.frame.size.height)];
[textOfvideo addSublayer:label.layer];
[layerCa addSublayer:textOfvideo];
CALayer *parentLayer=[CALayer layer];
CALayer *videoLayer=[CALayer layer];
parentLayer.frame=CGRectMake(0, 0, sizeOfVideo.width, sizeOfVideo.height);
videoLayer.frame=CGRectMake(0, 0, sizeOfVideo.height,sizeOfVideo.width);
[parentLayer addSublayer:videoLayer];
//[parentLayer addSublayer:optionalLayer];
[parentLayer addSublayer:layerCa];
[parentLayer setBackgroundColor:[UIColor blueColor].CGColor];
AVMutableVideoComposition *videoComposition=[AVMutableVideoComposition videoComposition] ;
videoComposition.frameDuration=CMTimeMake(1, 30);
videoComposition.animationTool=[AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
//AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]);
AVAssetTrack *videoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
UIImageOrientation videoAssetOrientation_ = UIImageOrientationUp;
BOOL isVideoAssetPortrait_ = NO;
[layerInstruction setTransform:videoTrack.preferredTransform atTime:kCMTimeZero];
CGSize naturalSize;
naturalSize = videoTrack.naturalSize;
float renderWidth, renderHeight;
renderWidth = naturalSize.width;
renderHeight = naturalSize.height;
videoComposition.renderSize = naturalSize = CGSizeMake(videoTrack.naturalSize.height, videoTrack.naturalSize.width);
instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
videoComposition.instructions = [NSArray arrayWithObject: instruction];
NSString *documentsDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES)objectAtIndex:0];
NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init];
[dateFormatter setDateFormat:#"yyyy-MM-dd_HH-mm-ss"];
NSString *destinationPath = [documentsDirectory stringByAppendingFormat:#"/utput_%#.mov", [dateFormatter stringFromDate:[NSDate date]]];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exportSession.videoComposition=videoComposition;
exportSession.outputURL = [NSURL fileURLWithPath:destinationPath];
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch (exportSession.status)
{
case AVAssetExportSessionStatusCompleted:
NSLog(#"Export OK");
if (UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(destinationPath)) {
UISaveVideoAtPathToSavedPhotosAlbum(destinationPath, self, #selector(video:didFinishSavingWithError:contextInfo:), nil);
}
break;
case AVAssetExportSessionStatusFailed:
NSLog (#"AVAssetExportSessionStatusFailed: %#", exportSession.error);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export Cancelled");
break;
}
self.currentUrl = exportSession.outputURL;
dispatch_async(dispatch_get_main_queue(), ^{
});
}];
}

Adding a image as watermark on video results inverted video

I am adding image watermark on video using the following code but the resulted video's frame was rotated by 180 degree and i tried every possible solution to stop it. i just want the same video with watermark as a source video. please suggest solution.
-(void)watermarkVideoAtURL:(NSURL *)url fb:(BOOL)fb withCompletionHandler:(void(^)(bool success, NSURL *assetsURL, NSError *error))completionHandler {
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:url options:nil];
AVMutableComposition *mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *clipVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] lastObject];
AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *clipAudioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] lastObject];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:clipVideoTrack atTime:kCMTimeZero error:nil];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:clipAudioTrack atTime:kCMTimeZero error:nil];
[compositionVideoTrack setPreferredTransform:[[[videoAsset tracksWithMediaType:AVMediaTypeVideo] lastObject] preferredTransform]];
CGSize sizeOfVideo = [videoAsset naturalSize];
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, sizeOfVideo.width, sizeOfVideo.height);
videoLayer.frame = CGRectMake(0, 0, sizeOfVideo.width, sizeOfVideo.height);
// Image of watermark
UIImage *myImage = [self imageByApplyingAlpha:watermarkOpacityFactor toImage:[UIImage imageNamed:#"iconbig"]];
CALayer *layerCa = [CALayer layer];
layerCa.contents = (id)myImage.CGImage;
layerCa.frame = CGRectMake(10, sizeOfVideo.height - 50, 50, 50);
layerCa.opacity = 1.0;
CALayer *layerCa2 = [CALayer layer];
layerCa2.contents = (id)myImage.CGImage;
layerCa2.frame = CGRectMake(sizeOfVideo.width - 60, 10, 50, 50);
layerCa2.opacity = 1.0;
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:layerCa];
[parentLayer addSublayer:layerCa2];
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.frameDuration = CMTimeMake(1, 30);
videoComposition.renderSize = sizeOfVideo;
videoComposition.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]);
AVAssetTrack *videoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] lastObject];
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
videoComposition.instructions = [NSArray arrayWithObject:instruction];
NSString *documentsDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES)objectAtIndex:0];
NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init];
[dateFormatter setDateFormat:#"yyyy-MM-dd_HH-mm-ss"];
NSString *destinationPath = [documentsDirectory stringByAppendingFormat:#"/utput_%#.mov", [dateFormatter stringFromDate:[NSDate date]]];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exportSession.videoComposition = videoComposition;
exportSession.outputURL = [NSURL fileURLWithPath:destinationPath];
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch (exportSession.status) {
case AVAssetExportSessionStatusCompleted: {
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
[library writeVideoAtPathToSavedPhotosAlbum:exportSession.outputURL completionBlock:^(NSURL *assetURL, NSError *error) {
if (!error) {
completionHandler(YES, assetURL, nil);
} else {
completionHandler(NO, nil, error);
}
}];
}
break;
case AVAssetExportSessionStatusFailed: {
completionHandler(NO, nil, exportSession.error);
}
break;
case AVAssetExportSessionStatusCancelled: {
completionHandler(NO, nil, exportSession.error);
}
break;
default:
break;
}
}];
}
Try to set AVAssetTrack's preferredTransform to layer instruction
setTransform:atTime:
Sets a fixed transform to apply from the specified time until the next time at which a transform is set.[...] Before the first specified time for which a transform is set, the affine transform is held constant at the value of
CGAffineTransformIdentity
; after the last time for which a transform is set, the affine transform is held constant at that last value.

Getting error when i added a watermark in a video

I am creating an application which deals with the video which is exist in local directory of app.Now I want to add a watermark in it. But it is showing an error : -
{NSLocalizedDescription=Cannot Decode, NSLocalizedFailureReason=The
media data could not be decoded. It may be damaged.}
Please help me for getting this issue
-(void)displayWatermarkInVideo
{
NSString *path = [[NSBundle mainBundle]pathForResource:#"Video_20160102191557356_by_videoshow" ofType:#"mp4"];
NSURL *URL = [NSURL fileURLWithPath:path];
AVURLAsset *videoAssets = [[AVURLAsset alloc]initWithURL:URL options:nil];
AVMutableComposition* mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *clipVideoTrack = [[videoAssets tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAssets.duration)
ofTrack:clipVideoTrack
atTime:kCMTimeZero error:nil];
[compositionVideoTrack setPreferredTransform:[[[videoAssets tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] preferredTransform]];
//Add image
UIImage *myImage = [UIImage imageNamed:#"Frame-1.png"];
CALayer *aLayer = [CALayer layer];
aLayer.contents = (id)myImage.CGImage;
aLayer.frame = CGRectMake(5, 25, 57, 57); //Needed for proper display. We are using the app icon (57x57). If you use 0,0 you will not see it
aLayer.opacity = 0.65; //Feel free to alter the alpha here
//Add text instead of image
CGSize videoSize = [videoAssets naturalSize];
CATextLayer *titleLayer = [CATextLayer layer];
titleLayer.string = #"Text goes here";
titleLayer.font = CFBridgingRetain(#"Helvetica");
titleLayer.fontSize = videoSize.height / 6;
//?? titleLayer.shadowOpacity = 0.5;
titleLayer.alignmentMode = kCAAlignmentCenter;
titleLayer.bounds = CGRectMake(0, 0, videoSize.width, videoSize.height / 6); //You may need to adjust this for proper display
//Sorts the layer in proper order
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:aLayer];
[parentLayer addSublayer:titleLayer]; //ONLY IF WE ADDED TEXT
//Create composition
AVMutableVideoComposition* videoComp = [AVMutableVideoComposition videoComposition];
videoComp.renderSize = videoSize;
videoComp.frameDuration = CMTimeMake(1, 30);
videoComp.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
//Create instruction
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]);
AVAssetTrack *videoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
videoComp.instructions = [NSArray arrayWithObject: instruction];
//Export
NSString *documentsDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES)objectAtIndex:0];
NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init];
[dateFormatter setDateFormat:#"yyyy-MM-dd_HH-mm-ss"];
NSString *destinationPath = [documentsDirectory stringByAppendingFormat:#"/utput_%#.mov", [dateFormatter stringFromDate:[NSDate date]]];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality];
exportSession.videoComposition=videoComp;
exportSession.outputURL = [NSURL fileURLWithPath:destinationPath];
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch (exportSession.status)
{
case AVAssetExportSessionStatusCompleted:
NSLog(#"Export OK");
if (UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(destinationPath)) {
UISaveVideoAtPathToSavedPhotosAlbum(destinationPath, self, #selector(video:didFinishSavingWithError:contextInfo:), nil);
}
break;
case AVAssetExportSessionStatusFailed:
NSLog (#"AVAssetExportSessionStatusFailed: %#", exportSession.error);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export Cancelled");
break;
}
}];
}
I used this code to generate video with watermark.
- (void)videoOutput{// 1 - Early exit if there's no video file selected
if (!self.videoAsset) {
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:#"Error" message:#"Please Load a Video Asset First"
delegate:nil cancelButtonTitle:#"OK" otherButtonTitles:nil];
[alert show];
return;
}
// 2 - Create AVMutableComposition object. This object will hold your AVMutableCompositionTrack instances.
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
// 3 - Video track
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
[videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, self.videoAsset.duration)
ofTrack:[[self.videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
// 3.1 - Create AVMutableVideoCompositionInstruction
AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, self.videoAsset.duration);
// 3.2 - Create an AVMutableVideoCompositionLayerInstruction for the video track and fix the orientation.
AVMutableVideoCompositionLayerInstruction *videolayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
AVAssetTrack *videoAssetTrack = [[self.videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation videoAssetOrientation_ = UIImageOrientationUp;
BOOL isVideoAssetPortrait_ = NO;
CGAffineTransform videoTransform = videoAssetTrack.preferredTransform;
if (videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationRight;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationLeft;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) {
videoAssetOrientation_ = UIImageOrientationUp;
}
if (videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) {
videoAssetOrientation_ = UIImageOrientationDown;
}
[videolayerInstruction setTransform:videoAssetTrack.preferredTransform atTime:kCMTimeZero];
[videolayerInstruction setOpacity:0.0 atTime:self.videoAsset.duration];
// 3.3 - Add instructions
mainInstruction.layerInstructions = [NSArray arrayWithObjects:videolayerInstruction,nil];
AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
CGSize naturalSize;
if(isVideoAssetPortrait_){
naturalSize = CGSizeMake(videoAssetTrack.naturalSize.height, videoAssetTrack.naturalSize.width);
} else {
naturalSize = videoAssetTrack.naturalSize;
}
float renderWidth, renderHeight;
renderWidth = naturalSize.width;
renderHeight = naturalSize.height;
mainCompositionInst.renderSize = CGSizeMake(renderWidth, renderHeight);
mainCompositionInst.instructions = [NSArray arrayWithObject:mainInstruction];
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
NSLog(#"naturalSize %f %f", naturalSize.width, naturalSize.height);
// no-op - override this method in the subclass
// 1 - Set up the text layer
CATextLayer *subtitle1Text = [[CATextLayer alloc] init];
[subtitle1Text setFont:#"Helvetica-Bold"];
[subtitle1Text setFontSize:20];
[subtitle1Text setFrame:CGRectMake(0, 0, size.width, 100)];
[subtitle1Text setString:#"Your Text.."];
[subtitle1Text setAlignmentMode:kCAAlignmentCenter];
[subtitle1Text setForegroundColor:[[UIColor whiteColor] CGColor]];
// 2 - The usual overlay
CALayer *overlayLayer = [CALayer layer];
[overlayLayer addSublayer:subtitle1Text];
overlayLayer.frame = CGRectMake(0, 0, size.width, size.height);
[overlayLayer setMasksToBounds:YES];
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, size.width, size.height);
videoLayer.frame = CGRectMake(0, 0, size.width, size.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:overlayLayer];
composition.animationTool = [AVVideoCompositionCoreAnimationTool
videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
// 4 - Get path
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:
[NSString stringWithFormat:#"FinalVideo-%d.mov",arc4random() % 1000]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
// 5 - Create exporter
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.shouldOptimizeForNetworkUse = YES;
exporter.videoComposition = mainCompositionInst;
[exporter exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
if (session.status == AVAssetExportSessionStatusCompleted) {
NSURL *outputURL = session.outputURL;
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputURL]) {
[library writeVideoAtPathToSavedPhotosAlbum:outputURL completionBlock:^(NSURL *assetURL, NSError *error){
dispatch_async(dispatch_get_main_queue(), ^{
if (error) {
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:#"Error" message:#"Video Saving Failed" delegate:nil cancelButtonTitle:#"OK" otherButtonTitles:nil];
[alert show];
} else {
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:#"Video Saved" message:#"Saved To Photo Album" delegate:self cancelButtonTitle:#"OK" otherButtonTitles:nil];
[alert show];
}
});
}];
}
}
});
}];}
Try this code. It perfectly worked for me. Hope this will work for you too.

exportAsynchronouslyWithCompletionHandler calls once

I tried asking this before, but got no responses. So I'll try again. I'm trying to lay images on top of a video (like a doodle, or a border). I call the "addAnimation" method three times, and it runs, but the exportAsynchronouslyWithCompletionHandler only gets called on the first try. Any ideas?
- (void)addAnimation:(UIImage *)background for:(NSString *)socialSite
{
// 1 - Load Asset
AVURLAsset* videoAsset = [AVURLAsset URLAssetWithURL:self.videoUrl options:nil];
// 2 - Create AVMutableComposition Object
AVMutableComposition* mixComposition = [AVMutableComposition composition];
// 2.1 - VideoTrack
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
// 2.2 - AudioTrack
AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
//If either of them are nil, then alloc
if( (self.clipVideoTrack == nil) || (self.clipAudioTrack == nil)){
self.clipVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
self.clipAudioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
}
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:self.clipVideoTrack atTime:kCMTimeZero error:nil];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:self.clipAudioTrack atTime:kCMTimeZero error:nil];
CGSize videoSize = [self.clipVideoTrack naturalSize];
// 3 - Background layer
UIImage *myImage = background;
CALayer *aLayer = [CALayer layer];
if(socialSite == nil) {
aLayer.contents = (id)myImage.CGImage;
if(__IPHONE_4_INCH)
aLayer.frame = CGRectMake(0, 0, 640, 640);
else
aLayer.frame = CGRectMake(0, 0, 320, 320);
}
else{
aLayer.contents = (id)myImage.CGImage;
aLayer.frame = CGRectMake(0, 0, myImage.size.width, myImage.size.height);
}
// 3.1 - Video layer
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, myImage.size.width, myImage.size.height);
if(socialSite == nil){
videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
}
else{
videoLayer.frame = CGRectMake(0, 110, videoSize.width, videoSize.height);
}
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:aLayer];
// 3.2 - Add instructions
AVMutableVideoComposition* videoComp = [AVMutableVideoComposition videoComposition];
videoComp.renderSize = videoSize;
videoComp.frameDuration = CMTimeMake(1, 30);
videoComp.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]);
AVAssetTrack *videoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
videoComp.instructions = [NSArray arrayWithObject: instruction];
AVAssetExportSession *assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];//AVAssetExportPresetPassthrough
assetExport.videoComposition = videoComp;
// 4 - Get path
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:
[NSString stringWithFormat:#"FinalVideo-%d.mov",arc4random() % 1000]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
myPathDocs = [myPathDocs stringByAddingPercentEscapesUsingEncoding:NSUTF8StringEncoding];
// 5 - Create exporter
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.shouldOptimizeForNetworkUse = YES;
exporter.videoComposition = videoComp;
[exporter exportAsynchronouslyWithCompletionHandler:^ {
NSData *video;
/*********************************** INSTAGRAM *********************************************/
if([socialSite isEqualToString:#"instagram"]){
video = [NSData dataWithContentsOfURL:exporter.outputURL];
[MPOCreateMPManager sharedManager].createdMarcoPolo.instagramVideo = video;
NSLog(#"IG SIZE (In CreateViewController): %lu",(unsigned long)[MPOCreateMPManager sharedManager].createdMarcoPolo.instagramVideo.length);
}
/*********************************** FB/TWITTER *********************************************/
else if ([socialSite isEqualToString:#"facebook"]){
video = [NSData dataWithContentsOfURL:exporter.outputURL];
[MPOCreateMPManager sharedManager].createdMarcoPolo.fBTwitterVideo = video;
NSLog(#"FB SIZE (In CreateViewController): %lu",(unsigned long)[MPOCreateMPManager sharedManager].createdMarcoPolo.fBTwitterVideo.length);
}
/*********************************** DOODLE *********************************************/
else if (socialSite == nil){
video = [NSData dataWithContentsOfURL:exporter.outputURL];
[MPOCreateMPManager sharedManager].createdMarcoPolo.video = video;
[MPOCreateMPManager sharedManager].createdMarcoPolo.image = [self loadImage:exporter.outputURL];
NSLog(#"*******************************DOODLE DONE*************************************");
}
}];
}

AVMutableComposition of a Solid Color with No AVAsset

Here's my end goal: I'd like to use AVVideoCompositionCoreAnimationTool to create a video from Core Animation. I will not be using an existing AVAsset in this composition.
My question is, how can I use AVMutableComposition to make a video with a static solid color for a given amount of time? After I figure that out, I can add the animation.
Here's my code:
- (void)exportVideo {
AVMutableComposition *mixComposition = [AVMutableComposition composition];
CMTimeRange timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(10, 600));
[mixComposition insertEmptyTimeRange:timeRange];
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[videoTrack insertEmptyTimeRange:timeRange];
AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
mainInstruction.timeRange = timeRange;
mainInstruction.backgroundColor = [UIColor blueColor].CGColor;
AVMutableVideoComposition *mainComposition = [AVMutableVideoComposition videoComposition];
mainComposition.renderSize = CGSizeMake(500, 500);
mainComposition.instructions = [NSArray arrayWithObject:mainInstruction];
mainComposition.frameDuration = CMTimeMake(1, 30);
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"FinalVideo-%d.mov",arc4random() % 1000]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL = url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.shouldOptimizeForNetworkUse = YES;
exporter.videoComposition = mainComposition;
[exporter exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:exporter];
});
}];
}
I know you have to add at least one track to the mixComposition, so I've added a video track and inserted an empty time range, but when I call exportAsynchronouslyWithCompletionHandler, the handler is never called. I can add a dispatch_after for any amount of time after I call export and observe that the exporter has a status of AVAssetExportSessionStatusExporting.
What am I doing wrong?
After playing around with this a few months ago I found that the only reliable way to get it to work is to use a short, blank video in your AVMutableCompositionTrack, then overlay it with the desired layers.
I uploaded a project to GitHub about a month ago as a result of a bug in the simulator. You can download the blank video here, if you'd like.
-(void)exportVideo
{
CGSize renderingSize = CGSizeMake(640, 360); // The desired size of your video
float displayDuration = 2.0f; //The duration of the desired video, in seconds
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"FinalVideo-%d.mov",arc4random() % 1000]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
NSError *error;
[[NSFileManager defaultManager] removeItemAtURL:url error:&error];
mutableComposition = [AVMutableComposition composition];
mutableCompositionVideoTrack = [mutableComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.renderSize = renderingSize;
videoComposition.frameDuration = CMTimeMake(1, 30);
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, videoComposition.renderSize.width, videoComposition.renderSize.height);
videoLayer.frame = CGRectMake(0, 0, videoComposition.renderSize.width, videoComposition.renderSize.height);
[parentLayer addSublayer:videoLayer];
videoComposition.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
NSString *path = [[NSBundle mainBundle] pathForResource:#"blank_1080p" ofType:#"mp4"];
NSURL *trackUrl = [NSURL fileURLWithPath:path];
AVAsset *asset = [AVAsset assetWithURL:trackUrl];
AVAssetTrack *track = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
[mutableCompositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,CMTimeMakeWithSeconds(displayDuration, 600)) ofTrack:track atTime:kCMTimeZero error:nil];
CALayer *imageLayer = [CALayer layer];
imageLayer.bounds = parentLayer.frame;
imageLayer.anchorPoint = CGPointMake(0.5, 0.5);
imageLayer.position = CGPointMake(CGRectGetMidX(imageLayer.bounds), CGRectGetMidY(imageLayer.bounds));
imageLayer.backgroundColor = [UIColor blueColor].CGColor;
imageLayer.contentsGravity = kCAGravityResizeAspectFill;
[parentLayer addSublayer:imageLayer];
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(displayDuration, 600));
videoComposition.instructions = #[instruction];
exporter = [[AVAssetExportSession alloc] initWithAsset:mutableComposition presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL = url;
exporter.videoComposition = videoComposition;
exporter.outputFileType= AVFileTypeMPEG4;
exporter.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(displayDuration, 600));
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^(void){
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:exporter];
});
}];
}

Resources