Composing Video with AVFoundation - ios

I want to compose a video as per my aspect. User will select video from library. Have a look at my screen. There is a background which is UIView. Its colour is changeable. And there is a AVPlayer above that view. I have resized AVPlayer according to the aspect ratio of selected video. Now i want to compose this whole scenario and save it to photo library. I have done this so far. And the saved video should be like the second image. But it is not giving random video.
- (IBAction)videoOutput:(id)sender
{
// 2 - Create AVMutableComposition object. This object will hold your AVMutableCompositionTrack instances.
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
// 3 - Video track
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, self.videoAsset.duration) ofTrack:[[self.videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
// 3.1 - Create AVMutableVideoCompositionInstruction
AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, self.videoAsset.duration);
// 3.2 - Create an AVMutableVideoCompositionLayerInstruction for the video track and fix the orientation.
AVMutableVideoCompositionLayerInstruction *videolayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
AVAssetTrack *videoAssetTrack = [[self.videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation videoAssetOrientation_ = UIImageOrientationUp;
BOOL isVideoAssetPortrait_ = NO;
CGAffineTransform videoTransform = videoAssetTrack.preferredTransform;
if (videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationRight;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationLeft;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) {
videoAssetOrientation_ = UIImageOrientationUp;
}
if (videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) {
videoAssetOrientation_ = UIImageOrientationDown;
}
CGSize naturalSize;
if(isVideoAssetPortrait_){
naturalSize = CGSizeMake(videoAssetTrack.naturalSize.height, videoAssetTrack.naturalSize.width);
} else {
naturalSize = videoAssetTrack.naturalSize;
}
NSLog(#"%#",NSStringFromCGSize(naturalSize));
[videolayerInstruction setTransform:videoAssetTrack.preferredTransform atTime:kCMTimeZero];
// 3.3 - Add instructions
mainInstruction.layerInstructions = [NSArray arrayWithObjects:videolayerInstruction,nil];
AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
mainCompositionInst.renderSize = CGSizeMake(320, 320);
mainCompositionInst.instructions = [NSArray arrayWithObject:mainInstruction];
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
[self applyVideoEffectsToComposition:mainCompositionInst size:naturalSize withColor:[array objectAtIndex:colorTag]];
// 4 - Get path
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:
[NSString stringWithFormat:#"FinalVideo-%d.mov",arc4random() % 1000]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
// 5 - Create exporter
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPreset640x480];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.shouldOptimizeForNetworkUse = YES;
exporter.videoComposition = mainCompositionInst;
[exporter exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:exporter];
});
}];
}
- (void)exportDidFinish:(AVAssetExportSession*)session
{
if (session.status == AVAssetExportSessionStatusCompleted)
{
NSURL *outputURL = session.outputURL;
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputURL]) {
[library writeVideoAtPathToSavedPhotosAlbum:outputURL completionBlock:^(NSURL *assetURL, NSError *error){
dispatch_async(dispatch_get_main_queue(), ^{
if (error) {
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:#"Error" message:#"Video Saving Failed"
delegate:nil cancelButtonTitle:#"OK" otherButtonTitles:nil];
[alert show];
} else {
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:#"Video Saved" message:#"Saved To Photo Album"
delegate:self cancelButtonTitle:#"OK" otherButtonTitles:nil];
[alert show];
}
});
}];
}
}
}
- (UIImage *)imageWithColor:(UIColor *)color rectSize:(CGRect)imageSize {
CGRect rect = imageSize;
UIGraphicsBeginImageContextWithOptions(rect.size, NO, 0);
[color setFill];
UIRectFill(rect); // Fill it with your color
UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return image;
}
- (void)applyVideoEffectsToComposition:(AVMutableVideoComposition *)composition size:(CGSize)size withColor:(UIColor *)color
{
CGFloat temp = 1280;
CGSize videoSize = size;
CGFloat width = ((videoSize.width/videoSize.height)*temp);
CGFloat x = (temp - width)/2;
UIImage *borderImage = nil;
borderImage = [self imageWithColor:color rectSize:CGRectMake(0, 0, size.width, size.height)];
CALayer *backgroundLayer = [CALayer layer];
[backgroundLayer setContents:(id)[borderImage CGImage]];
backgroundLayer.frame = CGRectMake(0, 0, 720, 1280);
[backgroundLayer setMasksToBounds:YES];
CALayer *videoLayer = [CALayer layer];
videoLayer.frame = CGRectMake(x, 0, width-00, temp);
NSLog(#"video layer >> %#",NSStringFromCGRect(videoLayer.frame));
videoLayer.contentsGravity = kCAGravityBottomRight;
CALayer *parentLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, 720, 1280);
parentLayer.contentsGravity = kCAGravityBottomRight;
parentLayer.backgroundColor = [UIColor redColor].CGColor;
[parentLayer addSublayer:backgroundLayer];
[parentLayer addSublayer:videoLayer];
composition.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
}
- (IBAction)btn_play_pressed:(id)sender
{
[self.player play];
}
- (void)btn_pressed:(id)sender
{
colorTag = [(UIButton *)sender tag];
NSLog(#" %d",colorTag);
self.backgroundView.backgroundColor = [array objectAtIndex:colorTag];
}

I don't think you are ever setting AVMutableVideoCompositionInstruction.backgroundColor.
UIColor *redColor = [UIColor redColor];
AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, self.videoAsset.duration);
mainInstruction.backgroundColor = redColor.CGColor;
This would set the background to be red, you might want to use the color that the user specifies.

Related

Saving video to DocumentsDirectory

I am trying to improve my self with coding Objective c and I am not good enough to solve this problem.
I am using the code below to merge video with audio and save the final video to camera roll. The code working perfect but what I need is to save that video to temporary documents, so that I can change or modify the video without saving it every time I do small change.
Here is my code:
- (IBAction)addBackgroundMusicAction:(UIButton *)sender {
if (self.asset == nil) {
return;
}
NSString *outPutPath = [NSTemporaryDirectory() stringByAppendingPathComponent:[NSString stringWithFormat:#"mergeVideo-%d.mov",arc4random() % 1000]];
NSURL *outPutUrl = [NSURL fileURLWithPath:outPutPath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outPutPath])
{
[[NSFileManager defaultManager] removeItemAtPath:outPutPath error:nil];
}
AVMutableComposition *composition = [AVMutableComposition composition];
CMTimeRange videoTimeRange = CMTimeRangeMake(kCMTimeZero, self.asset.duration);
AVMutableCompositionTrack *videoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *videoAssetTrack = [[self.asset tracksWithMediaType:AVMediaTypeVideo] firstObject];
[videoTrack insertTimeRange:videoTimeRange ofTrack:videoAssetTrack atTime:kCMTimeZero error:nil];
AVURLAsset *audioAsset = [[AVURLAsset alloc] initWithURL:[NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:#"myAudio" ofType:#"mp3"]] options:nil];
AVMutableCompositionTrack *audioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *audioAssetTrack = [[audioAsset tracksWithMediaType:AVMediaTypeAudio] firstObject];
[audioTrack insertTimeRange:videoTimeRange ofTrack:audioAssetTrack atTime:kCMTimeZero error:nil];
// 3.1 - Create AVMutableVideoCompositionInstruction
AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, self.asset.duration);
// 3.2 - Create an AVMutableVideoCompositionLayerInstruction for the video track and fix the orientation.
AVMutableVideoCompositionLayerInstruction *videolayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
UIImageOrientation videoAssetOrientation_ = UIImageOrientationUp;
BOOL isVideoAssetPortrait_ = NO;
CGAffineTransform videoTransform = videoAssetTrack.preferredTransform;
if (videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationRight;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationLeft;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) {
videoAssetOrientation_ = UIImageOrientationUp;
}
if (videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) {
videoAssetOrientation_ = UIImageOrientationDown;
}
[videolayerInstruction setTransform:videoAssetTrack.preferredTransform atTime:kCMTimeZero];
[videolayerInstruction setOpacity:0.0 atTime:self.asset.duration];
// 3.3 - Add instructions
mainInstruction.layerInstructions = [NSArray arrayWithObjects:videolayerInstruction,nil];
AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
CGSize naturalSize;
if(isVideoAssetPortrait_){
naturalSize = CGSizeMake(videoAssetTrack.naturalSize.height, videoAssetTrack.naturalSize.width);
} else {
naturalSize = videoAssetTrack.naturalSize;
}
float renderWidth, renderHeight;
renderWidth = naturalSize.width;
renderHeight = naturalSize.height;
mainCompositionInst.renderSize = CGSizeMake(renderWidth, renderHeight);
mainCompositionInst.instructions = [NSArray arrayWithObject:mainInstruction];
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
AVAssetExportSession * assetExport = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetMediumQuality];
assetExport.outputURL = outPutUrl;
assetExport.outputFileType = AVFileTypeQuickTimeMovie;
assetExport.shouldOptimizeForNetworkUse = YES;
assetExport.videoComposition = mainCompositionInst;
[assetExport exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:assetExport];
});
}];
}
And her is where I export the video to the camera roll:
- (void)exportDidFinish:(AVAssetExportSession*)session {
if (session.status == AVAssetExportSessionStatusCompleted) {
NSURL *outputURL = session.outputURL;
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputURL]) {
[library writeVideoAtPathToSavedPhotosAlbum:outputURL completionBlock:^(NSURL *assetURL, NSError *error){
dispatch_async(dispatch_get_main_queue(), ^{
if (error) {
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:#"Error" message:#"Video Saving Failed"
delegate:nil cancelButtonTitle:#"OK" otherButtonTitles:nil];
[alert show];
} else {
AVPlayerItem * playeritem = [AVPlayerItem playerItemWithURL:outputURL];
[_player replaceCurrentItemWithPlayerItem:playeritem];
[_player play];
}
});
}];
}
}
}
As I said, I do not need to save the video to the camera roll, I just need to save it Temporary. Also I need to load that video when I press another button. Saving that video and load it again.
Thanks
use Following code:
NSData *videoData = [NSData dataWithContentsOfURL:outputURL];
if (videoData) {
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *dataPath = [documentsDirectory stringByAppendingPathComponent:#"MyFolder"];
// Create folder if needed
[[NSFileManager defaultManager] createDirectoryAtPath:dataPath withIntermediateDirectories:YES attributes:nil error:nil];
NSString *filePath = [dataPath stringByAppendingPathComponent:#"test.mp4"];
if ([videoData writeToFile:filePath atomically:YES]) {
// yeah - file written
} else {
// oops - file not written
}
} else {
// oops - couldn't get data
}

Crash when use AVAssetExportSession

I use AVAssetExportSession to save video with watermark follow link
But my app was crash at line:
[exporter exportAsynchronouslyWithCompletionHandler:^{ }];
My code:
- (void)mixVideoAsset{
AVAsset *videoAsset = [AVAsset assetWithURL:_urlVideo];
// 2 - Create AVMutableComposition object. This object will hold your AVMutableCompositionTrack instances.
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
// 3 - Video track
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
[videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
// - Audio
AVMutableCompositionTrack *audioCompositionTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *audioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[audioCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioTrack.timeRange.duration) ofTrack:audioTrack atTime:kCMTimeZero error:nil];
// 3.1 - Create AVMutableVideoCompositionInstruction
AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
// 3.2 - Create an AVMutableVideoCompositionLayerInstruction for the video track and fix the orientation.
AVMutableVideoCompositionLayerInstruction *videolayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
AVAssetTrack *videoAssetTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
BOOL isVideoAssetPortrait_ = NO;
CGAffineTransform videoTransform = videoAssetTrack.preferredTransform;
if (videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) {
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {
isVideoAssetPortrait_ = YES;
}
[videolayerInstruction setTransform:videoAssetTrack.preferredTransform atTime:kCMTimeZero];
[videolayerInstruction setOpacity:0.0 atTime:videoAsset.duration];
// 3.3 - Add instructions
mainInstruction.layerInstructions = [NSArray arrayWithObjects:videolayerInstruction,nil];
AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
CGSize naturalSize;
if(isVideoAssetPortrait_){
naturalSize = CGSizeMake(videoAssetTrack.naturalSize.height, videoAssetTrack.naturalSize.width);
} else {
naturalSize = videoAssetTrack.naturalSize;
}
float renderWidth, renderHeight;
renderWidth = naturalSize.width;
renderHeight = naturalSize.height;
mainCompositionInst.renderSize = CGSizeMake(renderWidth, renderHeight);
mainCompositionInst.instructions = [NSArray arrayWithObject:mainInstruction];
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
[self applyVideoEffectsToComposition:mainCompositionInst size:naturalSize];
// 5 - Create exporter
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetHighestQuality];
NSString *exportPath = [PATH_VIDEO_FOLDER stringByAppendingPathComponent:[NSString stringWithFormat:#"%#.mov",[VideoHelper getVideoDateByDate:[NSDate date]]]];
CMTime start = CMTimeMakeWithSeconds(mySAVideoRangeSlider.leftPosition, videoAsset.duration.timescale);
CMTime duration = CMTimeMakeWithSeconds(VIDEO_TIME_TRIM_SECONDS, videoAsset.duration.timescale);
CMTimeRange range = CMTimeRangeMake(start, duration);
exporter.timeRange = range;
exporter.outputURL = [NSURL fileURLWithPath:exportPath];
exporter.outputFileType = AVFileTypeQuickTimeMovie;
// exporter.shouldOptimizeForNetworkUse = YES;
exporter.videoComposition = mainCompositionInst;
[exporter exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
NSLog(#"Export Complete %ld %#", (long)exporter.status, exporter.error);
[VideoHelper saveThumbnailByVideoURL:[NSURL fileURLWithPath:exportPath]];
});
}];
}
applyVideoEffectsToComposition
- (void)applyVideoEffectsToComposition:(AVMutableVideoComposition *)composition size:(CGSize)size
{
// - set up the parent layer
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, size.width, size.height);
videoLayer.frame = CGRectMake(0, 0, size.width, size.height);
[parentLayer addSublayer:videoLayer];
// - set up the overlay
CALayer *overlayLayer = [CALayer layer];
overlayLayer.frame = CGRectMake(0, 0, size.width, size.height);
CAKeyframeAnimation *animation = [self animationForGifWithURL:urlEffect];
[overlayLayer addAnimation:animation forKey:#"contents"];
[parentLayer addSublayer:overlayLayer];
// - apply magic
composition.animationTool = [AVVideoCompositionCoreAnimationTool
videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
}
animationForGifWithURL
- (CAKeyframeAnimation *)animationForGifWithURL:(NSURL *)url {
CAKeyframeAnimation *animation = [CAKeyframeAnimation animationWithKeyPath:#"contents"];
NSMutableArray * frames = [NSMutableArray new];
NSMutableArray *delayTimes = [NSMutableArray new];
CGFloat totalTime = 0.0;
CGImageSourceRef gifSource = CGImageSourceCreateWithURL((CFURLRef)url, NULL);
// get frame count
size_t frameCount = CGImageSourceGetCount(gifSource);
for (size_t i = 0; i < frameCount; ++i) {
// get each frame
CGImageRef frame = CGImageSourceCreateImageAtIndex(gifSource, i, NULL);
[frames addObject:(__bridge id)frame];
CGImageRelease(frame);
// get gif info with each frame
NSDictionary *dict = (NSDictionary*)CFBridgingRelease(CGImageSourceCopyPropertiesAtIndex(gifSource, i, NULL));
// kCGImagePropertyGIFDictionary中kCGImagePropertyGIFDelayTime,kCGImagePropertyGIFUnclampedDelayTime值是一样的
NSDictionary *gifDict = [dict valueForKey:(NSString*)kCGImagePropertyGIFDictionary];
[delayTimes addObject:[gifDict valueForKey:(NSString*)kCGImagePropertyGIFDelayTime]];
totalTime = totalTime + [[gifDict valueForKey:(NSString*)kCGImagePropertyGIFDelayTime] floatValue];
CFRelease((__bridge CFTypeRef)(dict));
}
if (gifSource) {
CFRelease(gifSource);
}
NSMutableArray *times = [NSMutableArray arrayWithCapacity:3];
CGFloat currentTime = 0;
NSInteger count = delayTimes.count;
for (int i = 0; i < count; ++i) {
[times addObject:[NSNumber numberWithFloat:(currentTime / totalTime)]];
currentTime += [[delayTimes objectAtIndex:i] floatValue];
}
NSMutableArray *images = [NSMutableArray arrayWithCapacity:3];
for (int i = 0; i < count; ++i) {
[images addObject:[frames objectAtIndex:i]];
}
animation.keyTimes = times;
animation.values = images;
animation.timingFunction = [CAMediaTimingFunction functionWithName:kCAMediaTimingFunctionLinear];
animation.duration = totalTime;
animation.repeatCount = HUGE_VALF;
animation.beginTime = AVCoreAnimationBeginTimeAtZero;
animation.removedOnCompletion = NO;
return animation;
}

Getting error when i added a watermark in a video

I am creating an application which deals with the video which is exist in local directory of app.Now I want to add a watermark in it. But it is showing an error : -
{NSLocalizedDescription=Cannot Decode, NSLocalizedFailureReason=The
media data could not be decoded. It may be damaged.}
Please help me for getting this issue
-(void)displayWatermarkInVideo
{
NSString *path = [[NSBundle mainBundle]pathForResource:#"Video_20160102191557356_by_videoshow" ofType:#"mp4"];
NSURL *URL = [NSURL fileURLWithPath:path];
AVURLAsset *videoAssets = [[AVURLAsset alloc]initWithURL:URL options:nil];
AVMutableComposition* mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *clipVideoTrack = [[videoAssets tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAssets.duration)
ofTrack:clipVideoTrack
atTime:kCMTimeZero error:nil];
[compositionVideoTrack setPreferredTransform:[[[videoAssets tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] preferredTransform]];
//Add image
UIImage *myImage = [UIImage imageNamed:#"Frame-1.png"];
CALayer *aLayer = [CALayer layer];
aLayer.contents = (id)myImage.CGImage;
aLayer.frame = CGRectMake(5, 25, 57, 57); //Needed for proper display. We are using the app icon (57x57). If you use 0,0 you will not see it
aLayer.opacity = 0.65; //Feel free to alter the alpha here
//Add text instead of image
CGSize videoSize = [videoAssets naturalSize];
CATextLayer *titleLayer = [CATextLayer layer];
titleLayer.string = #"Text goes here";
titleLayer.font = CFBridgingRetain(#"Helvetica");
titleLayer.fontSize = videoSize.height / 6;
//?? titleLayer.shadowOpacity = 0.5;
titleLayer.alignmentMode = kCAAlignmentCenter;
titleLayer.bounds = CGRectMake(0, 0, videoSize.width, videoSize.height / 6); //You may need to adjust this for proper display
//Sorts the layer in proper order
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:aLayer];
[parentLayer addSublayer:titleLayer]; //ONLY IF WE ADDED TEXT
//Create composition
AVMutableVideoComposition* videoComp = [AVMutableVideoComposition videoComposition];
videoComp.renderSize = videoSize;
videoComp.frameDuration = CMTimeMake(1, 30);
videoComp.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
//Create instruction
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]);
AVAssetTrack *videoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
videoComp.instructions = [NSArray arrayWithObject: instruction];
//Export
NSString *documentsDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES)objectAtIndex:0];
NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init];
[dateFormatter setDateFormat:#"yyyy-MM-dd_HH-mm-ss"];
NSString *destinationPath = [documentsDirectory stringByAppendingFormat:#"/utput_%#.mov", [dateFormatter stringFromDate:[NSDate date]]];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality];
exportSession.videoComposition=videoComp;
exportSession.outputURL = [NSURL fileURLWithPath:destinationPath];
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch (exportSession.status)
{
case AVAssetExportSessionStatusCompleted:
NSLog(#"Export OK");
if (UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(destinationPath)) {
UISaveVideoAtPathToSavedPhotosAlbum(destinationPath, self, #selector(video:didFinishSavingWithError:contextInfo:), nil);
}
break;
case AVAssetExportSessionStatusFailed:
NSLog (#"AVAssetExportSessionStatusFailed: %#", exportSession.error);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export Cancelled");
break;
}
}];
}
I used this code to generate video with watermark.
- (void)videoOutput{// 1 - Early exit if there's no video file selected
if (!self.videoAsset) {
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:#"Error" message:#"Please Load a Video Asset First"
delegate:nil cancelButtonTitle:#"OK" otherButtonTitles:nil];
[alert show];
return;
}
// 2 - Create AVMutableComposition object. This object will hold your AVMutableCompositionTrack instances.
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
// 3 - Video track
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
[videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, self.videoAsset.duration)
ofTrack:[[self.videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
// 3.1 - Create AVMutableVideoCompositionInstruction
AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, self.videoAsset.duration);
// 3.2 - Create an AVMutableVideoCompositionLayerInstruction for the video track and fix the orientation.
AVMutableVideoCompositionLayerInstruction *videolayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
AVAssetTrack *videoAssetTrack = [[self.videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation videoAssetOrientation_ = UIImageOrientationUp;
BOOL isVideoAssetPortrait_ = NO;
CGAffineTransform videoTransform = videoAssetTrack.preferredTransform;
if (videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationRight;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationLeft;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) {
videoAssetOrientation_ = UIImageOrientationUp;
}
if (videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) {
videoAssetOrientation_ = UIImageOrientationDown;
}
[videolayerInstruction setTransform:videoAssetTrack.preferredTransform atTime:kCMTimeZero];
[videolayerInstruction setOpacity:0.0 atTime:self.videoAsset.duration];
// 3.3 - Add instructions
mainInstruction.layerInstructions = [NSArray arrayWithObjects:videolayerInstruction,nil];
AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
CGSize naturalSize;
if(isVideoAssetPortrait_){
naturalSize = CGSizeMake(videoAssetTrack.naturalSize.height, videoAssetTrack.naturalSize.width);
} else {
naturalSize = videoAssetTrack.naturalSize;
}
float renderWidth, renderHeight;
renderWidth = naturalSize.width;
renderHeight = naturalSize.height;
mainCompositionInst.renderSize = CGSizeMake(renderWidth, renderHeight);
mainCompositionInst.instructions = [NSArray arrayWithObject:mainInstruction];
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
NSLog(#"naturalSize %f %f", naturalSize.width, naturalSize.height);
// no-op - override this method in the subclass
// 1 - Set up the text layer
CATextLayer *subtitle1Text = [[CATextLayer alloc] init];
[subtitle1Text setFont:#"Helvetica-Bold"];
[subtitle1Text setFontSize:20];
[subtitle1Text setFrame:CGRectMake(0, 0, size.width, 100)];
[subtitle1Text setString:#"Your Text.."];
[subtitle1Text setAlignmentMode:kCAAlignmentCenter];
[subtitle1Text setForegroundColor:[[UIColor whiteColor] CGColor]];
// 2 - The usual overlay
CALayer *overlayLayer = [CALayer layer];
[overlayLayer addSublayer:subtitle1Text];
overlayLayer.frame = CGRectMake(0, 0, size.width, size.height);
[overlayLayer setMasksToBounds:YES];
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, size.width, size.height);
videoLayer.frame = CGRectMake(0, 0, size.width, size.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:overlayLayer];
composition.animationTool = [AVVideoCompositionCoreAnimationTool
videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
// 4 - Get path
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:
[NSString stringWithFormat:#"FinalVideo-%d.mov",arc4random() % 1000]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
// 5 - Create exporter
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.shouldOptimizeForNetworkUse = YES;
exporter.videoComposition = mainCompositionInst;
[exporter exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
if (session.status == AVAssetExportSessionStatusCompleted) {
NSURL *outputURL = session.outputURL;
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputURL]) {
[library writeVideoAtPathToSavedPhotosAlbum:outputURL completionBlock:^(NSURL *assetURL, NSError *error){
dispatch_async(dispatch_get_main_queue(), ^{
if (error) {
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:#"Error" message:#"Video Saving Failed" delegate:nil cancelButtonTitle:#"OK" otherButtonTitles:nil];
[alert show];
} else {
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:#"Video Saved" message:#"Saved To Photo Album" delegate:self cancelButtonTitle:#"OK" otherButtonTitles:nil];
[alert show];
}
});
}];
}
}
});
}];}
Try this code. It perfectly worked for me. Hope this will work for you too.

Mix video streams on iOS

I need to mix two video stream from different inputs into single video file. I wanna embed one video frame into another.
My sources are: iOS app screen recording and frontal camera
Output: videofile of any format
How it can be done?
NSString * str_first=[NSString stringWithFormat:#"%#",[ary_urls objectAtIndex:0]];
NSString * str_secnd=[NSString stringWithFormat:#"%#",[ary_urls objectAtIndex:1]];
NSString * str_third=[NSString stringWithFormat:#"%#",[ary_urls objectAtIndex:2]];
NSURL *url_frst =[[NSURL alloc]initWithString:str_first];
NSURL *url_second =[[NSURL alloc]initWithString:str_secnd];
NSURL *url_third =[[NSURL alloc]initWithString:str_third];
firstAsset = [AVAsset assetWithURL:url_frst];
secondAsset = [AVAsset assetWithURL:url_second];
thirdAsset = [AVAsset assetWithURL:url_third];
if(firstAsset !=nil && secondAsset!=nil){
[ActivityView startAnimating];
//Create AVMutableComposition Object.This object will hold our multiple AVMutableCompositionTrack.
AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];
//VIDEO TRACK
AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration) ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
AVMutableCompositionTrack *secondTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[secondTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, secondAsset.duration) ofTrack:[[secondAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:firstAsset.duration error:nil];
// AUDIO TRACK
// if(audioAsset!=nil){
// AVMutableCompositionTrack *AudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
// [AudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, CMTimeAdd(firstAsset.duration, secondAsset.duration)) ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:nil];
// }
AVMutableVideoCompositionInstruction *MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeAdd(firstAsset.duration, secondAsset.duration));
//FIXING ORIENTATION//
AVMutableVideoCompositionLayerInstruction *firstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack];
AVAssetTrack * firstAssetTrack = [[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation firstAssetOrientation_ = UIImageOrientationUp;
BOOL isFirstAssetPortrait_ = NO;
CGAffineTransform firstTransform = firstAssetTrack.preferredTransform;
if (firstTransform.a == 0 && firstTransform.b == 1.0 && firstTransform.c == -1.0 && firstTransform.d == 0) {
firstAssetOrientation_ = UIImageOrientationRight;
isFirstAssetPortrait_ = YES;
}
if (firstTransform.a == 0 && firstTransform.b == -1.0 && firstTransform.c == 1.0 && firstTransform.d == 0) {
firstAssetOrientation_ = UIImageOrientationLeft;
isFirstAssetPortrait_ = YES;
}
if (firstTransform.a == 1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == 1.0) {
firstAssetOrientation_ = UIImageOrientationUp;
}
if (firstTransform.a == -1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == -1.0) {
firstAssetOrientation_ = UIImageOrientationDown;
}
[firstlayerInstruction setTransform:firstAsset.preferredTransform atTime:kCMTimeZero];
[firstlayerInstruction setOpacity:0.0 atTime:firstAsset.duration];
CGFloat FirstAssetScaleToFitRatio = 320.0/firstAssetTrack.naturalSize.width;
if(isFirstAssetPortrait_)
{
FirstAssetScaleToFitRatio = 320.0/firstAssetTrack.naturalSize.height;
CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
[firstlayerInstruction setTransform:CGAffineTransformConcat(firstAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:kCMTimeZero];
}else
{
CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
[firstlayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(firstAssetTrack.preferredTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, 0)) atTime:kCMTimeZero];
}
[firstlayerInstruction setOpacity:0.0 atTime:firstAsset.duration];
AVMutableVideoCompositionLayerInstruction *secondlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:secondTrack];
AVAssetTrack *secondAssetTrack = [[secondAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation secondAssetOrientation_ = UIImageOrientationUp;
BOOL isSecondAssetPortrait_ = NO;
CGAffineTransform secondTransform = secondAssetTrack.preferredTransform;
if (secondTransform.a == 0 && secondTransform.b == 1.0 && secondTransform.c == -1.0 && secondTransform.d == 0) {
secondAssetOrientation_= UIImageOrientationRight;
isSecondAssetPortrait_ = YES;
}
if (secondTransform.a == 0 && secondTransform.b == -1.0 && secondTransform.c == 1.0 && secondTransform.d == 0) {
secondAssetOrientation_ = UIImageOrientationLeft;
isSecondAssetPortrait_ = YES;
}
if (secondTransform.a == 1.0 && secondTransform.b == 0 && secondTransform.c == 0 && secondTransform.d == 1.0) {
secondAssetOrientation_ = UIImageOrientationUp;
}
if (secondTransform.a == -1.0 && secondTransform.b == 0 && secondTransform.c == 0 && secondTransform.d == -1.0) {
secondAssetOrientation_ = UIImageOrientationDown;
}
[secondlayerInstruction setTransform:secondAsset.preferredTransform atTime:firstAsset.duration];
CGFloat SecondAssetScaleToFitRatio = 320.0/secondAssetTrack.naturalSize.width;
if(isSecondAssetPortrait_){
SecondAssetScaleToFitRatio = 320.0/secondAssetTrack.naturalSize.height;
CGAffineTransform SecondAssetScaleFactor = CGAffineTransformMakeScale(SecondAssetScaleToFitRatio,SecondAssetScaleToFitRatio);
[secondlayerInstruction setTransform:CGAffineTransformConcat(secondAssetTrack.preferredTransform, SecondAssetScaleFactor) atTime:firstAsset.duration];
}else{
;
CGAffineTransform SecondAssetScaleFactor = CGAffineTransformMakeScale(SecondAssetScaleToFitRatio,SecondAssetScaleToFitRatio);
[secondlayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(secondAssetTrack.preferredTransform, SecondAssetScaleFactor),CGAffineTransformMakeTranslation(0, 0)) atTime:firstAsset.duration];
}
MainInstruction.layerInstructions = [NSArray arrayWithObjects:firstlayerInstruction, secondlayerInstruction,nil];
AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
mainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
CGSize naturalSizeFirst, naturalSizeSecond;
if(isFirstAssetPortrait_){
naturalSizeFirst = CGSizeMake(firstAssetTrack.naturalSize.height, firstAssetTrack.naturalSize.width);
} else {
naturalSizeFirst = firstAssetTrack.naturalSize;
}
if(isSecondAssetPortrait_){
naturalSizeSecond = CGSizeMake(secondAssetTrack.naturalSize.height, secondAssetTrack.naturalSize.width);
} else {
naturalSizeSecond = secondAssetTrack.naturalSize;
}
float renderWidth, renderHeight;
if(naturalSizeFirst.width > naturalSizeSecond.width) {
renderWidth = naturalSizeFirst.width;
} else {
renderWidth = naturalSizeSecond.width;
}
if(naturalSizeFirst.height > naturalSizeSecond.height) {
renderHeight = naturalSizeFirst.height;
} else {
renderHeight = naturalSizeSecond.height;
}
mainCompositionInst.renderSize = CGSizeMake(renderWidth, renderHeight);
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"mergeVideo-%d.mp4",arc4random() % 1000]];
url_album = [NSURL fileURLWithPath:myPathDocs];
[ary_temp_url replaceObjectAtIndex:0 withObject:url_album];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url_album;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.videoComposition = mainCompositionInst;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^
{
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:exporter];
});
}];
}
}

Adding overlay on video

I am adding the overlay to the video and share on Facebook and whatsaap. but the quality of video becomes so poor specially on whatsapp whereas other without overlay there is best quality. Code for editing overlay the video with overlay :
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:outputFileURL options:nil];
AVMutableComposition* mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *audioComposition = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *clipVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVAssetTrack *sourceAudioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:clipVideoTrack
atTime:kCMTimeZero error:nil];
[audioComposition insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:sourceAudioTrack atTime:kCMTimeZero error:nil]; //[asset duration]
[compositionVideoTrack setPreferredTransform:[[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] preferredTransform]];
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]);
AVAssetTrack *videoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
//*****************//
UIImageOrientation videoAssetOrientation_;// = UIImageOrientationUp;
BOOL isVideoAssetPortrait_ = NO;
CGAffineTransform videoTransform = clipVideoTrack.preferredTransform;
if (videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationRight;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationLeft;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) {
videoAssetOrientation_ = UIImageOrientationUp;
isVideoAssetPortrait_ = NO;
}
if (videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) {
videoAssetOrientation_ = UIImageOrientationDown;
isVideoAssetPortrait_ = NO;
}
[layerInstruction setTransform:clipVideoTrack.preferredTransform atTime:kCMTimeZero];
[layerInstruction setOpacity:0.0 atTime:videoAsset.duration];
//*****************//
CGSize naturalSize;
if(isVideoAssetPortrait_){
naturalSize = CGSizeMake(clipVideoTrack.naturalSize.height, clipVideoTrack.naturalSize.width);
} else {
naturalSize = clipVideoTrack.naturalSize;
}
UIImage *myImage = [UIImage imageNamed:#"overlay.png"];
CALayer *aLayer = [CALayer layer];
if (naturalSize.width > 359) {
aLayer.frame = CGRectMake((naturalSize.width-340)/2, 0, 340, 90);
}
else if (naturalSize.width > 200) {
aLayer.frame = CGRectMake(10, 0, naturalSize.width-20, 65);
}
else{
aLayer.frame = CGRectMake(15, 0, naturalSize.width-30, 60);
}
aLayer.contents = (id)myImage.CGImage;
//Needed for proper display. We are using the app icon (57x57). If you use 0,0 you will not see it
// aLayer.opacity = 1.0; //Feel free to alter the alpha here
aLayer.backgroundColor = [UIColor clearColor].CGColor;
[aLayer setMasksToBounds:YES];
// CGSize videoSize = [videoAsset naturalSize];
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
//NSURL *url = [NSURL fileURLWithPath:[urlsOfVideos objectAtIndex:self.pageControl.currentPage]];
NSLog(#"videoSize ++++: %#", NSStringFromCGSize(naturalSize));
AVMutableVideoComposition* videoComp = [AVMutableVideoComposition videoComposition] ;
float renderWidth, renderHeight;
renderWidth = naturalSize.width;
renderHeight = naturalSize.height;
videoComp.renderSize = CGSizeMake(renderWidth, renderHeight);
videoComp.instructions = [NSArray arrayWithObject:instruction];
videoComp.frameDuration = CMTimeMake(1, 30);
videoLayer.frame = CGRectMake(0, 0, naturalSize.width, naturalSize.height);
parentLayer.frame = CGRectMake(0, 0, naturalSize.width, naturalSize.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:aLayer];
videoComp.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
/// instruction
instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
videoComp.instructions = [NSArray arrayWithObject: instruction];
assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
assetExport.outputURL = exportUrl;
assetExport.outputFileType = AVFileTypeQuickTimeMovie;
assetExport.shouldOptimizeForNetworkUse = YES;
assetExport.videoComposition = videoComp;
//[strRecordedFilename setString: exportPath];
[assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
switch (assetExport.status) {
case AVAssetExportSessionStatusFailed:{
NSLog(#"Fail");
NSLog(#"asset export fail error : %#", assetExport.error);
dispatch_async(dispatch_get_main_queue(), ^
{
});
break;
}
case AVAssetExportSessionStatusCompleted:{
NSLog(#"Success");
dispatch_async(dispatch_get_main_queue(), ^
{
});
break;
}
default:
break;
}
}
];
}
What am I doing wrong? Any idea?
Use the following code to add overlay on video:
CALayer *overlayLayer = [CALayer layer];
UIImage *overlayImage = [UIImage imageNamed:#"overlay.png"];
[overlayLayer setContents:(id)[overlayImage CGImage]];
overlayLayer.frame = CGRectMake(0, 0, size.width, size.height);
[overlayLayer setMasksToBounds:YES];
//Set Up the Parent Layer
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, size.width, size.height);
videoLayer.frame = CGRectMake(0, 0, size.width, size.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:overlayLayer];
Have a look at the following link containig good tutorial:
http://www.raywenderlich.com/30200/avfoundation-tutorial-adding-overlays-and-animations-to-videos

Resources