Add GIF watermark on a video in iOS - ios

I need to accomplish this function: There is a GIF overlay on a video, hoping to composition this video and GIF to a new video. I'm using the following code, but result is only the video without GIF:
- (void)mixVideoAsset:(AVAsset *)videoAsset {
LLog(#"Begining");
NSDate * begin = [NSDate date];
// 2 - Create AVMutableComposition object. This object will hold your AVMutableCompositionTrack instances.
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
// 3 - Video track
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
[videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
// - Audio
AVMutableCompositionTrack *audioCompositionTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *audioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[audioCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioTrack.timeRange.duration) ofTrack:audioTrack atTime:kCMTimeZero error:nil];
// 3.1 - Create AVMutableVideoCompositionInstruction
AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
// 3.2 - Create an AVMutableVideoCompositionLayerInstruction for the video track and fix the orientation.
AVMutableVideoCompositionLayerInstruction *videolayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
AVAssetTrack *videoAssetTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation videoAssetOrientation_ = UIImageOrientationUp;
BOOL isVideoAssetPortrait_ = NO;
CGAffineTransform videoTransform = videoAssetTrack.preferredTransform;
if (videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationRight;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationLeft;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) {
videoAssetOrientation_ = UIImageOrientationUp;
}
if (videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) {
videoAssetOrientation_ = UIImageOrientationDown;
}
[videolayerInstruction setTransform:videoAssetTrack.preferredTransform atTime:kCMTimeZero];
[videolayerInstruction setOpacity:0.0 atTime:videoAsset.duration];
// 3.3 - Add instructions
mainInstruction.layerInstructions = [NSArray arrayWithObjects:videolayerInstruction,nil];
AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
CGSize naturalSize;
if(isVideoAssetPortrait_){
naturalSize = CGSizeMake(videoAssetTrack.naturalSize.height, videoAssetTrack.naturalSize.width);
} else {
naturalSize = videoAssetTrack.naturalSize;
}
float renderWidth, renderHeight;
renderWidth = naturalSize.width;
renderHeight = naturalSize.height;
mainCompositionInst.renderSize = CGSizeMake(renderWidth, renderHeight);
mainCompositionInst.instructions = [NSArray arrayWithObject:mainInstruction];
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
// Watermark Layers
[self applyVideoEffectsToComposition:mainCompositionInst size:naturalSize];
// 4 - Get path
// NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
// NSString *documentsDirectory = [paths objectAtIndex:0];
// NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:
// [NSString stringWithFormat:#"FinalVideo-%d.mov",arc4random() % 1000]];
// NSURL *url = [NSURL fileURLWithPath:myPathDocs];
NSURL * url = TempVideoURL();
// 5 - Create exporter
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeMPEG4;
exporter.shouldOptimizeForNetworkUse = YES;
exporter.videoComposition = mainCompositionInst;
[exporter exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
NSDate * endDate = [NSDate date];
NSTimeInterval interval = [endDate timeIntervalSinceDate:begin];
LLog(#"completed %f senconds",interval);
ALAssetsLibrary *assetsLibrary = [[ALAssetsLibrary alloc] init];
if ([assetsLibrary videoAtPathIsCompatibleWithSavedPhotosAlbum:exporter.outputURL]) {
[assetsLibrary writeVideoAtPathToSavedPhotosAlbum:exporter.outputURL completionBlock:NULL];
}
});
}];
}
Add Gif Watermark
- (void)applyVideoEffectsToComposition:(AVMutableVideoComposition *)composition size:(CGSize)size
{
// - set up the parent layer
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, size.width, size.height);
videoLayer.frame = CGRectMake(0, 0, size.width, size.height);
[parentLayer addSublayer:videoLayer];
size.width = 100;
size.height = 100;
// - set up the overlay
CALayer *overlayLayer = [CALayer layer];
overlayLayer.frame = CGRectMake(0, 100, size.width, size.height);
NSURL *fileUrl = [[NSBundle mainBundle] URLForResource:#"jiafei" withExtension:#"gif"];
[BBGifManager startGifAnimationWithURL:fileUrl inLayer:overlayLayer];
// UIImage * image = [UIImage imageNamed:#"gifImage.gif"];
// [overlayLayer setContents:(id)[image CGImage]];
// [overlayLayer setMasksToBounds:YES];
[parentLayer addSublayer:overlayLayer];
// - apply magic
composition.animationTool = [AVVideoCompositionCoreAnimationTool
videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
}
Add CALayer Animations
+ (void)startGifAnimationWithURL:(NSURL *)url inLayer:(CALayer *)layer {
CAKeyframeAnimation * animation = [self animationForGifWithURL:url];
[layer addAnimation:animation forKey:#"contents"];
}
Create CAKeyFrameAnimation
+ (CAKeyframeAnimation *)animationForGifWithURL:(NSURL *)url {
CAKeyframeAnimation *animation = [CAKeyframeAnimation animationWithKeyPath:#"contents"];
NSMutableArray * frames = [NSMutableArray new];
NSMutableArray *delayTimes = [NSMutableArray new];
CGFloat totalTime = 0.0;
CGFloat gifWidth;
CGFloat gifHeight;
CGImageSourceRef gifSource = CGImageSourceCreateWithURL((CFURLRef)url, NULL);
// get frame count
size_t frameCount = CGImageSourceGetCount(gifSource);
for (size_t i = 0; i < frameCount; ++i) {
// get each frame
CGImageRef frame = CGImageSourceCreateImageAtIndex(gifSource, i, NULL);
[frames addObject:(__bridge id)frame];
CGImageRelease(frame);
// get gif info with each frame
NSDictionary *dict = (NSDictionary*)CFBridgingRelease(CGImageSourceCopyPropertiesAtIndex(gifSource, i, NULL));
NSLog(#"kCGImagePropertyGIFDictionary %#", [dict valueForKey:(NSString*)kCGImagePropertyGIFDictionary]);
// get gif size
gifWidth = [[dict valueForKey:(NSString*)kCGImagePropertyPixelWidth] floatValue];
gifHeight = [[dict valueForKey:(NSString*)kCGImagePropertyPixelHeight] floatValue];
// kCGImagePropertyGIFDictionary中kCGImagePropertyGIFDelayTime,kCGImagePropertyGIFUnclampedDelayTime值是一样的
NSDictionary *gifDict = [dict valueForKey:(NSString*)kCGImagePropertyGIFDictionary];
[delayTimes addObject:[gifDict valueForKey:(NSString*)kCGImagePropertyGIFDelayTime]];
totalTime = totalTime + [[gifDict valueForKey:(NSString*)kCGImagePropertyGIFDelayTime] floatValue];
CFRelease((__bridge CFTypeRef)(dict));
}
if (gifSource) {
CFRelease(gifSource);
}
NSMutableArray *times = [NSMutableArray arrayWithCapacity:3];
CGFloat currentTime = 0;
NSInteger count = delayTimes.count;
for (int i = 0; i < count; ++i) {
[times addObject:[NSNumber numberWithFloat:(currentTime / totalTime)]];
currentTime += [[delayTimes objectAtIndex:i] floatValue];
}
NSMutableArray *images = [NSMutableArray arrayWithCapacity:3];
for (int i = 0; i < count; ++i) {
[images addObject:[frames objectAtIndex:i]];
}
animation.keyTimes = times;
animation.values = images;
animation.timingFunction = [CAMediaTimingFunction functionWithName:kCAMediaTimingFunctionLinear];
animation.duration = totalTime;
animation.repeatCount = HUGE_VALF;
return animation;
}

You should adjust your animation settings for CoreAnimation:
animation.beginTime = AVCoreAnimationBeginTimeAtZero;
animation.removedOnCompletion = NO;

Just in case here's the example on swift3 how to do the same - insert animated frames/images into the video (not exactly the gif but array of Images). It uses AVAssetExportSession and AVMutableVideoComposition together with AVMutableVideoCompositionInstruction, and CAKeyframeAnimation to animate the frames.

Related

Saving video with overlay of GIF image

I am working on an application in which I record a video. When recording finished I put a GIF image on it with use of Library.
My code for playing video and putting gif image as an overlay
self.avPlayer = [AVPlayer playerWithURL:self.urlstring];
self.avPlayer.actionAtItemEnd = AVPlayerActionAtItemEndNone;
AVPlayerLayer *videoLayer = [AVPlayerLayer playerLayerWithPlayer:self.avPlayer];
videoLayer.frame = self.preview_view.bounds;
videoLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[self.preview_view.layer addSublayer:videoLayer];
NSURL *url = [[NSBundle mainBundle] URLForResource:#"02" withExtension:#"gif"];
self.img_gif.image = [UIImage animatedImageWithAnimatedGIFData:[NSData dataWithContentsOfURL:url]];
But now I want to merge and save video with overlay of this GIF image. I google it didn't find what I want.
Thank you for your help
This is the best answer to merge video with GIF image.
- (void)mixVideoAsset:(AVAsset *)videoAsset {
NSDate * begin = [NSDate date];
// 2 - Create AVMutableComposition object. This object will hold your AVMutableCompositionTrack instances.
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
// 3 - Video track
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
[videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
// - Audio
AVMutableCompositionTrack *audioCompositionTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *audioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[audioCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioTrack.timeRange.duration) ofTrack:audioTrack atTime:kCMTimeZero error:nil];
// 3.1 - Create AVMutableVideoCompositionInstruction
AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
// 3.2 - Create an AVMutableVideoCompositionLayerInstruction for the video track and fix the orientation.
AVMutableVideoCompositionLayerInstruction *videolayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
AVAssetTrack *videoAssetTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation videoAssetOrientation_ = UIImageOrientationUp;
BOOL isVideoAssetPortrait_ = NO;
CGAffineTransform videoTransform = videoAssetTrack.preferredTransform;
if (videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationRight;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationLeft;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) {
videoAssetOrientation_ = UIImageOrientationUp;
}
if (videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) {
videoAssetOrientation_ = UIImageOrientationDown;
}
[videolayerInstruction setTransform:videoAssetTrack.preferredTransform atTime:kCMTimeZero];
[videolayerInstruction setOpacity:0.0 atTime:videoAsset.duration];
// 3.3 - Add instructions
mainInstruction.layerInstructions = [NSArray arrayWithObjects:videolayerInstruction,nil];
AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
CGSize naturalSize;
if(isVideoAssetPortrait_){
naturalSize = CGSizeMake(videoAssetTrack.naturalSize.height, videoAssetTrack.naturalSize.width);
} else {
naturalSize = videoAssetTrack.naturalSize;
}
float renderWidth, renderHeight;
renderWidth = naturalSize.width;
renderHeight = naturalSize.height;
mainCompositionInst.renderSize = CGSizeMake(renderWidth, renderHeight);
mainCompositionInst.instructions = [NSArray arrayWithObject:mainInstruction];
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
// Watermark Layers
[self applyVideoEffectsToComposition:mainCompositionInst size:naturalSize];
// 4 - Get path
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:
[NSString stringWithFormat:#"FinalVideo-%d.mov",arc4random() % 1000]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
// NSURL * url = TempVideoURL();
// 5 - Create exporter
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeMPEG4;
exporter.shouldOptimizeForNetworkUse = YES;
exporter.videoComposition = mainCompositionInst;
[exporter exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
NSDate * endDate = [NSDate date];
NSTimeInterval interval = [endDate timeIntervalSinceDate:begin];
NSLog(#"completed %f senconds",interval);
ALAssetsLibrary *assetsLibrary = [[ALAssetsLibrary alloc] init];
if ([assetsLibrary videoAtPathIsCompatibleWithSavedPhotosAlbum:exporter.outputURL]) {
[assetsLibrary writeVideoAtPathToSavedPhotosAlbum:exporter.outputURL completionBlock:NULL];
}
});
}];
}
- (void)applyVideoEffectsToComposition:(AVMutableVideoComposition *)composition size:(CGSize)size
{
// - set up the parent layer
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, size.width, size.height);
videoLayer.frame = CGRectMake(0, 0, size.width, size.height);
[parentLayer addSublayer:videoLayer];
size.width = 100;
size.height = 100;
// - set up the overlay
CALayer *overlayLayer = [CALayer layer];
overlayLayer.frame = CGRectMake(0, 100, size.width, size.height);
NSURL *fileUrl = [[NSBundle mainBundle] URLForResource:#"jiafei" withExtension:#"gif"];
[self startGifAnimationWithURL:fileUrl inLayer:overlayLayer];
// UIImage * image = [UIImage imageNamed:#"gifImage.gif"];
// [overlayLayer setContents:(id)[image CGImage]];
// [overlayLayer setMasksToBounds:YES];
[parentLayer addSublayer:overlayLayer];
// - apply magic
composition.animationTool = [AVVideoCompositionCoreAnimationTool
videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
}
- (void)startGifAnimationWithURL:(NSURL *)url inLayer:(CALayer *)layer {
CAKeyframeAnimation * animation = [self animationForGifWithURL:url];
[layer addAnimation:animation forKey:#"contents"];
}
- (CAKeyframeAnimation *)animationForGifWithURL:(NSURL *)url {
CAKeyframeAnimation *animation = [CAKeyframeAnimation animationWithKeyPath:#"contents"];
NSMutableArray * frames = [NSMutableArray new];
NSMutableArray *delayTimes = [NSMutableArray new];
CGFloat totalTime = 0.0;
CGFloat gifWidth;
CGFloat gifHeight;
CGImageSourceRef gifSource = CGImageSourceCreateWithURL((CFURLRef)url, NULL);
// get frame count
size_t frameCount = CGImageSourceGetCount(gifSource);
for (size_t i = 0; i < frameCount; ++i) {
// get each frame
CGImageRef frame = CGImageSourceCreateImageAtIndex(gifSource, i, NULL);
[frames addObject:(__bridge id)frame];
CGImageRelease(frame);
// get gif info with each frame
NSDictionary *dict = (NSDictionary*)CFBridgingRelease(CGImageSourceCopyPropertiesAtIndex(gifSource, i, NULL));
NSLog(#"kCGImagePropertyGIFDictionary %#", [dict valueForKey:(NSString*)kCGImagePropertyGIFDictionary]);
// get gif size
gifWidth = [[dict valueForKey:(NSString*)kCGImagePropertyPixelWidth] floatValue];
gifHeight = [[dict valueForKey:(NSString*)kCGImagePropertyPixelHeight] floatValue];
// kCGImagePropertyGIFDictionary中kCGImagePropertyGIFDelayTime,kCGImagePropertyGIFUnclampedDelayTime值是一样的
NSDictionary *gifDict = [dict valueForKey:(NSString*)kCGImagePropertyGIFDictionary];
[delayTimes addObject:[gifDict valueForKey:(NSString*)kCGImagePropertyGIFDelayTime]];
totalTime = totalTime + [[gifDict valueForKey:(NSString*)kCGImagePropertyGIFDelayTime] floatValue];
CFRelease((__bridge CFTypeRef)(dict));
}
if (gifSource) {
CFRelease(gifSource);
}
NSMutableArray *times = [NSMutableArray arrayWithCapacity:3];
CGFloat currentTime = 0;
NSInteger count = delayTimes.count;
for (int i = 0; i < count; ++i) {
[times addObject:[NSNumber numberWithFloat:(currentTime / totalTime)]];
currentTime += [[delayTimes objectAtIndex:i] floatValue];
}
NSMutableArray *images = [NSMutableArray arrayWithCapacity:3];
for (int i = 0; i < count; ++i) {
[images addObject:[frames objectAtIndex:i]];
}
animation.keyTimes = times;
animation.values = images;
animation.timingFunction = [CAMediaTimingFunction functionWithName:kCAMediaTimingFunctionLinear];
animation.duration = totalTime;
animation.repeatCount = HUGE_VALF;
animation.beginTime = AVCoreAnimationBeginTimeAtZero;
animation.removedOnCompletion = NO;
return animation;
}
Here is the swift version of #Jitendra Modi's answer and it worked like a charm.
Swift 5.2:
func animationForGif(with url: URL) -> CAKeyframeAnimation? {
let animation = CAKeyframeAnimation(keyPath: #keyPath(CALayer.contents))
var frames: [CGImage] = []
var delayTimes: [CGFloat] = []
var totalTime: CGFloat = 0.0
// var gifWidth: CGFloat, gifHeight: CGFloat
guard let gifSource = CGImageSourceCreateWithURL(url as CFURL, nil) else {
print("Can not get image source from the gif: \(url)")
return nil
}
// get frame
let frameCount = CGImageSourceGetCount(gifSource)
for i in 0..<frameCount {
guard let frame = CGImageSourceCreateImageAtIndex(gifSource, i, nil) else {
continue
}
guard let dic = CGImageSourceCopyPropertiesAtIndex(gifSource, i, nil) as? [AnyHashable: Any] else { continue }
// gifWidth = dic[kCGImagePropertyPixelWidth] as? CGFloat ?? 0
// gifHeight = dic[kCGImagePropertyPixelHeight] as? CGFloat ?? 0
guard let gifDic: [AnyHashable: Any] = dic[kCGImagePropertyGIFDictionary] as? [AnyHashable: Any] else { continue }
let delayTime = gifDic[kCGImagePropertyGIFDelayTime] as? CGFloat ?? 0
frames.append(frame)
delayTimes.append(delayTime)
totalTime += delayTime
}
if frames.count == 0 {
return nil
}
assert(frames.count == delayTimes.count)
var times: [NSNumber] = []
var currentTime: CGFloat = 0
for i in 0..<delayTimes.count {
times.append(NSNumber(value: Double(currentTime / totalTime)))
currentTime += delayTimes[i]
}
animation.keyTimes = times
animation.values = frames
animation.timingFunction = CAMediaTimingFunction(name: CAMediaTimingFunctionName.linear)
animation.duration = Double(totalTime)
animation.repeatCount = .greatestFiniteMagnitude
animation.beginTime = AVCoreAnimationBeginTimeAtZero
animation.isRemovedOnCompletion = false
return animation
}
And you can use this animation:
let gifLayer = CALayer()
gifLayer.frame = CGRect(x: 0, y: 0, width: 300, height: 300)
if let animation = animationForGif(with: gifUrl) {
gifLayer.add(animation, forKey: "contents")
}
parentLayer.addSublayer(gifLayer)
You can try any of the below code for Screen Recording. It will merger your video and GIF.
You can download sample from the link below provided by Apple. https://developer.apple.com/library/mac/samplecode/AVScreenShack/Introduction/Intro.html
https://github.com/alskipp/ASScreenRecorder
http://codethink.no-ip.org/wordpress/archives/673
Hope this help you..

CGAffineTransform the AVAsset to fit device screen

I have an AVAsset with AVAssetTrack, which have a size (for example - (width = 1920, height = 1080)). What I need - fit this asset into given screen size (for example (width = 320, height = 568)), if asset in landscape - rotate it (90 degrees), if square - add black stripes on top and bottom. I tried this:
- (void)changeAsset:(AVAsset*)asset savetoURL:(NSURL*)toURL withSize:(CGSize)toSize offsetRatioPoint:(CGPoint*)offsetRatioPoint completion:(void (^)(NSURL* in_url, NSError* error))handler
{
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject];
if (!videoTrack)
{
if (handler)
handler(nil, [NSError errorWithDomain:#"com.myapp.error" code:-1 userInfo:#{#"message" : #"there are no video tracks in asset"}]);
return;
}
CGFloat newHeight = [videoTrack naturalSize].height/3*4;
CGFloat newWidth = [videoTrack naturalSize].width/3*4;
const CGFloat videoAspectRatio = newWidth / newHeight;
const CGFloat toSizeAspectRatio = toSize.width / toSize.height;
CGFloat scale = 1.f;
if (videoAspectRatio > toSizeAspectRatio)
{
scale = toSize.height / newHeight;
}
else
{
scale = toSize.width /newWidth;
}
CGAffineTransform scaleTrans = CGAffineTransformMakeScale(scale, scale);
CGAffineTransform translateTrans = CGAffineTransformIdentity;
if (videoAspectRatio > toSizeAspectRatio)
{
if (offsetRatioPoint)
{
const CGFloat dx = offsetRatioPoint->x * newWidth * scale;
translateTrans = CGAffineTransformMakeTranslation(-dx, 0.f);
}
else
{
const CGFloat dx = 0.5f * (newWidth * scale - toSize.width);
translateTrans = CGAffineTransformMakeTranslation(-dx, 0.f);
}
}
else
{
if (offsetRatioPoint)
{
const CGFloat dy = offsetRatioPoint->y * newHeight * scale;
translateTrans = CGAffineTransformMakeTranslation(0.f, -dy);
}
else
{
const CGFloat dy = 0.5f * (newHeight * scale - toSize.height);
translateTrans = CGAffineTransformMakeTranslation(0.f, -dy);
}
}
CGAffineTransform t1 = CGAffineTransformTranslate(translateTrans, toSize.height, -scale*toSize.width);
// Rotate transformation
CGAffineTransform t2 = CGAffineTransformRotate(t1, M_PI_2);
CGAffineTransform finalTrans = CGAffineTransformConcat(scaleTrans, t2);
AVMutableVideoComposition *videoComposition = [[AVMutableVideoComposition alloc] init];
videoComposition.renderSize = toSize;
int32_t frameRate = 30;
videoComposition.frameDuration = CMTimeMake(1, frameRate);
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
[layerInstruction setTransform:finalTrans atTime:kCMTimeZero];
instruction.layerInstructions = #[layerInstruction];
videoComposition.instructions = #[instruction];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPreset640x480];
exporter.videoComposition = videoComposition;
exporter.shouldOptimizeForNetworkUse = YES;
NSString *exportPath = [NSTemporaryDirectory() stringByAppendingPathComponent:kCroppedFileName];
if (toURL)
exportPath = toURL.path;
if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath] == YES)
[[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil];
__block NSURL* outURL = [NSURL fileURLWithPath: exportPath];
exporter.outputURL = outURL;
exporter.outputFileType = AVFileTypeMPEG4;
exporter.timeRange = instruction.timeRange;
NSLog(#"%#", exportPath);
[exporter exportAsynchronouslyWithCompletionHandler:^(void) {
if (!toURL)
{
if ([[NSFileManager defaultManager] fileExistsAtPath:[GlobalConst fullMoviePath]] == YES)
[[NSFileManager defaultManager] removeItemAtPath:[GlobalConst fullMoviePath] error:nil];
NSError *error;
if (![[NSFileManager defaultManager] moveItemAtPath: exportPath toPath:[GlobalConst fullMoviePath] error:&error]) {
NSLog(#"Error %#", error);
}
outURL = [NSURL fileURLWithPath: [GlobalConst fullMoviePath] ];
}
NSLog(#"%#", outURL);
handler(outURL, nil);
}];
}
It's almost do first step - rotate landscape asset, but the result asset is little bit zoomed. Thanks in advance for all advices.
i solve this prblm usrin this code. in this code i rotate landscape to video Portrait . make it square also add image as watermark.
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0), ^(){
// input clip
AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
int videoDimention;
// make it square
UIInterfaceOrientation orientation = [self orientationForTrack:asset];
BOOL isPortrait = (orientation == UIInterfaceOrientationPortrait || orientation == UIInterfaceOrientationPortraitUpsideDown) ? YES: NO;
CGSize videoSize;
NSUserDefaults *userDefault=[NSUserDefaults standardUserDefaults];
if(isPortrait) {
//videoSize = CGSizeMake(complimentSize*.7,clipVideoTrack.naturalSize.height );
videoSize = CGSizeMake(clipVideoTrack.naturalSize.height,clipVideoTrack.naturalSize.width*.7 );
[userDefault setDouble:videoSize.width forKey:VIDEO_WIDTH_OUTPUT];
[userDefault setDouble:videoSize.height forKey:VIDEO_HEIGHT_OUTPUT];
videoDimention=0;// for Portrait
} else {
videoSize = CGSizeMake(clipVideoTrack.naturalSize.width, clipVideoTrack.naturalSize.height);
videoDimention=1;// for Landscape
[userDefault setDouble:videoSize.width forKey:VIDEO_WIDTH_OUTPUT];
[userDefault setDouble:videoSize.height forKey:VIDEO_HEIGHT_OUTPUT];
}
AVMutableVideoComposition* videoComposition = [AVMutableVideoComposition videoComposition];
if([[NSUserDefaults standardUserDefaults] integerForKey:SIZE]==0){
videoComposition.renderSize = CGSizeMake(clipVideoTrack.naturalSize.height , clipVideoTrack.naturalSize.height);
videoDimention=2; // for squre
double height=clipVideoTrack.naturalSize.height;
[userDefault setDouble:height forKey:VIDEO_WIDTH_OUTPUT];
[userDefault setDouble:height forKey:VIDEO_HEIGHT_OUTPUT];
}
else{
videoComposition.renderSize =videoSize;
}
// videoComposition.renderScale=.5;
if([[NSUserDefaults standardUserDefaults] integerForKey:FPS]==0){
videoComposition.frameDuration = CMTimeMake(1, 15);
}
else if ([[NSUserDefaults standardUserDefaults] integerForKey:FPS]==1){
videoComposition.frameDuration = CMTimeMake(1, 20);
}
else if ([[NSUserDefaults standardUserDefaults] integerForKey:FPS]==2){
videoComposition.frameDuration = CMTimeMake(1, 25);
}
else{
videoComposition.frameDuration = CMTimeMake(1, 30);
}
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [asset duration] );;
AVMutableVideoCompositionLayerInstruction* transformer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:clipVideoTrack];
// rotate to portrait
if([self orientationForTrack:asset]==UIInterfaceOrientationPortrait){
CGAffineTransform t1 = CGAffineTransformMakeTranslation(clipVideoTrack.naturalSize.height, -(clipVideoTrack.naturalSize.width - clipVideoTrack.naturalSize.height) /2 );
CGAffineTransform t2 = CGAffineTransformRotate(t1, M_PI_2);
CGAffineTransform finalTransform = t2;
[transformer setTransform:finalTransform atTime:kCMTimeZero];
}
//for water mark
CGSize sizeOfVideo=[asset naturalSize];
//Image of watermark
UIImage *myImage=[UIImage imageNamed:#"watermark"];
CALayer *layerCa = [CALayer layer];
layerCa.contents = (id)myImage.CGImage;
if([[NSUserDefaults standardUserDefaults] integerForKey:SIZE]==0){
layerCa.frame = CGRectMake(videoSize.height-(videoSize.height/6), 0, videoSize.height/6, (videoSize.height/6)/4);
}
else{
layerCa.frame = CGRectMake(videoSize.width-(videoSize.width/6), 0, videoSize.width/6, (videoSize.width/6)/4);
}
// layerCa.frame = CGRectMake(videoSize.width-200, 0, 200, 60);
layerCa.opacity = 1.0;
CALayer *parentLayer=[CALayer layer];
CALayer *videoLayer=[CALayer layer];
parentLayer.frame=CGRectMake(0, 0, videoSize.width, videoSize.height);
videoLayer.frame=CGRectMake(0, 0, videoSize.width, videoSize.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:layerCa];
instruction.layerInstructions = [NSArray arrayWithObject:transformer];
videoComposition.instructions = [NSArray arrayWithObject: instruction];
if([[NSUserDefaults standardUserDefaults] boolForKey:UP_PID]==NO){
videoComposition.animationTool=[AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
}
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetHighestQuality];
// AVMutableComposition *composition = [AVMutableComposition composition];
// [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
// e.g .mov type
exportSession.outputURL = fileURL;
exportSession.videoComposition = videoComposition;
// [exportSession addObserver: forKeyPath:#"progress" options:NSKeyValueObservingOptionNew context:NULL];
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
AVAssetExportSessionStatus status = [exportSession status];
[exportSession exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
VideoEditVC *controller=[[VideoEditVC alloc] init];
controller.isFirst=YES;
controller.videoSize=videoDimention;
[self.navigationController pushViewController:controller animated:YES];
self.delegate=controller;
});
}];
});
you also need to implement this one :
- (UIInterfaceOrientation)orientationForTrack:(AVAsset *)asset
{
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
CGSize size = [videoTrack naturalSize];
CGAffineTransform txf = [videoTrack preferredTransform];
if (size.width == txf.tx && size.height == txf.ty)
return UIInterfaceOrientationLandscapeRight;
else if (txf.tx == 0 && txf.ty == 0)
return UIInterfaceOrientationLandscapeLeft;
else if (txf.tx == 0 && txf.ty == size.width)
return UIInterfaceOrientationPortraitUpsideDown;
else
return UIInterfaceOrientationPortrait;
}

video recording issue in ios

I am working on a Video making app.
In that I need to record a video in first View and after that display in second View.For recording a video I followed this tutorial.
In that I have made some changes as per my need in didFinishRecordingToOutputFileAtURL method.
Here is my updated method.
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error
{
NSLog(#"didFinishRecordingToOutputFileAtURL - enter");
BOOL RecordedSuccessfully = YES;
if ([error code] != noErr)
{
// A problem occurred: Find out if the recording was successful.
id value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey];
if (value)
{
RecordedSuccessfully = [value boolValue];
}
}
else {
NSLog(#"didFinishRecordingToOutputFileAtURL error:%#",error);
}
if (RecordedSuccessfully)
{
//----- RECORDED SUCESSFULLY -----
NSLog(#"didFinishRecordingToOutputFileAtURL - success");
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputFileURL])
{
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
AVMutableCompositionTrack *track = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVAsset *asset = [AVAsset assetWithURL:outputFileURL];
[track insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:[[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:CMTimeMake(0, 1) error:nil];
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:
[NSString stringWithFormat:#"%#%d.mov",NSBundle.mainBundle.infoDictionary[#"CFBundleExecutable"],++videoCounter]];
[[NSFileManager defaultManager] removeItemAtPath:myPathDocs error:nil];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:track];
AVAssetTrack *videoAssetTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation videoAssetOrientation_ = UIImageOrientationUp;
BOOL isVideoAssetPortrait_ = NO;
CGAffineTransform videoTransform = videoAssetTrack.preferredTransform;
if (videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationRight;
isVideoAssetPortrait_ = YES;
if ([[[NSUserDefaults standardUserDefaults] stringForKey:#"orientation"] isEqualToString:#"landscape"]) {
videoAssetOrientation_ = UIImageOrientationUp;
}
}
if (videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationLeft;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) {
videoAssetOrientation_ = UIImageOrientationUp;
}
if (videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) {
videoAssetOrientation_ = UIImageOrientationDown;
}
CGSize naturalSize;
if(isVideoAssetPortrait_){
naturalSize = CGSizeMake(videoAssetTrack.naturalSize.height, videoAssetTrack.naturalSize.width);
} else {
naturalSize = videoAssetTrack.naturalSize;
}
float renderWidth, renderHeight;
if (![self.ratioLabel.text isEqualToString:#"16:9"]) {
renderWidth = naturalSize.width;
renderHeight = naturalSize.width;
NSLog(#"Video:: width=%f height=%f",naturalSize.width,naturalSize.height);
}
else {
renderWidth = naturalSize.width;
renderHeight = naturalSize.height;
NSLog(#"Video:: width=%f height=%f",naturalSize.width,naturalSize.height);
}
if (![self.ratioLabel.text isEqualToString:#"16:9"])
{
CGAffineTransform t1 = CGAffineTransformMakeTranslation(videoAssetTrack.naturalSize.height, -(videoAssetTrack.naturalSize.width - videoAssetTrack.naturalSize.height) /2);
CGAffineTransform t2 = CGAffineTransformRotate(t1, M_PI_2);
[layerInstruction setTransform:t2 atTime:kCMTimeZero];
}
else
{
CGAffineTransform t2 = CGAffineTransformMakeRotation( M_PI_2);
[layerInstruction setTransform:t2 atTime:kCMTimeZero];
}
AVCaptureDevicePosition position = [[VideoInputDevice device] position];
if (position == AVCaptureDevicePositionFront)
{
/* For front camera only */
CGAffineTransform t = CGAffineTransformMakeScale(-1.0f, 1.0f);
t = CGAffineTransformTranslate(t, -videoAssetTrack.naturalSize.width, 0);
t = CGAffineTransformRotate(t, (DEGREES_TO_RADIANS(90.0)));
t = CGAffineTransformTranslate(t, 0.0f, -videoAssetTrack.naturalSize.width);
[layerInstruction setTransform:t atTime:kCMTimeZero];
/* For front camera only */
}
[layerInstruction setOpacity:0.0 atTime:asset.duration];
instruction.layerInstructions = [NSArray arrayWithObjects:layerInstruction,nil];
AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
mainCompositionInst.renderSize = CGSizeMake(renderWidth, renderHeight);
mainCompositionInst.instructions = [NSArray arrayWithObject:instruction];
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
AVAssetExportSession *exporter;
exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPreset1280x720];
exporter.videoComposition = mainCompositionInst;
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
self.doneButton.userInteractionEnabled = YES;
if(videoAddr==nil)
{
videoAddr = [[NSMutableArray alloc] init];
}
[videoAddr addObject:exporter.outputURL];
[[PreviewLayer connection] setEnabled:YES];
AVAsset *asset = [AVAsset assetWithURL:exporter.outputURL];
NSLog(#"remaining seconds before:%f",lastSecond);
double assetDuration = CMTimeGetSeconds(asset.duration);
if (assetDuration>3.0)
assetDuration = 3.0;
lastSecond = lastSecond- assetDuration;
NSLog(#"remaining seconds after:%f",lastSecond);
self.secondsLabel.text = [NSString stringWithFormat:#"%0.1fs",lastSecond];
self.secondsLabel.hidden = NO;
NSData *data = [NSKeyedArchiver archivedDataWithRootObject:videoAddr];
[[NSUserDefaults standardUserDefaults] setObject:data forKey:#"videoAddr"];
[[NSUserDefaults standardUserDefaults] synchronize];
videoURL = outputFileURL;
flagAutorotate = NO;
self.cancelButton.hidden = self.doneButton.hidden = NO;
imgCancel.hidden = imgDone.hidden = NO;
if ([[NSUserDefaults standardUserDefaults] boolForKey:#"Vibration"])
AudioServicesPlayAlertSound(kSystemSoundID_Vibrate);
[[UIApplication sharedApplication] endIgnoringInteractionEvents];
});
}];
}
else {
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:#"Error" message:[NSString stringWithFormat:#"Video can not be saved\nPlease free some storage space"] delegate:self cancelButtonTitle:nil otherButtonTitles:nil, nil];
[alert show];
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(2.0 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
[alert dismissWithClickedButtonIndex:0 animated:YES];
});
}
}
}
But here Is the issue.Video is not being recorded exactly shown in preview.
See these 2 screenShots.
Video recording preview
Video Playing View
The reason is because your iPad screen aspect ratio is not the same as camera aspect ratio.
You can modify camera preview size by setting videoGravity property of AVCaptureVideoPreviewLayer,
which influences how content is viewed relative to the layer bounds:
layer.videoGravity = AVLayerVideoGravityResizeAspect;
But in that case preview won't be fullscreen.
If you want the video with the same aspect ratio as on preview fullscreen, you will have to crop it. Cropping process explained here:
Exporting AVCaptureSession video in a size that matches the preview layer
Video capture with 1:1 aspect ratio in iOS

ios ELCImagePicker: Connection to assetsd was interrupted or assetsd died

I am using ELCImagePicker to select multiple video from the library and getting this "Connection to assetsd was interrupted or assetsd died" error when I am trying to export multiple recorded video selected from the library. But it works fine if I select all the downloaded video using ELCImagePicker or I use UIImagePicker to select these recorded video from the library. Is there any solution for this type of problem?
My Code:
-(void)elcImagePickerController:(ELCImagePickerController *)picker didFinishPickingMediaWithInfo:(NSArray *)info
{
[self dismissViewControllerAnimated:YES completion:nil];
for (NSDictionary *dict in info) {
if ([dict objectForKey:UIImagePickerControllerMediaType] == ALAssetTypeVideo){
if ([dict objectForKey:UIImagePickerControllerOriginalImage]){
videoUrl=[dict objectForKey:UIImagePickerControllerReferenceURL];
[self InsertVideoAsset];
}
}
}
[self GetMargedVideo];
}
Some Times my merged composition only play the audio,not the video,but sometimes both audio and video works fine. Is there any problem of the bellow code? Please help me...
-(void)GetMargedVideo{
LastTime=kCMTimeZero;
TotalTime=kCMTimeZero;
mixComposition=nil; // AVMutableComposition
mainCompositionInst=nil; // AVMutableVideoComposition
mixComposition=[AVMutableComposition composition];
mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
renderWidth=1280;
renderHeight=1280;
[Objects removeAllObjects];
//LayerInstruction used to get video layer Instructions
AVMutableVideoCompositionLayerInstruction *firstlayerInstruction;
self.stokeimage.hidden=YES;
for(int i=0; i<[VideoInfo count];i++)
{
self.stokeimage.hidden=NO;
TargetVideo=i;
VideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
AudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
VideoProperty *vp =[VideoInfo objectAtIndex:i];
STime=vp.startTime;
ETime=vp.endTime;
TimeDiff=CMTimeSubtract(ETime, STime);
LastTime=TotalTime;
TotalTime=CMTimeAdd(TotalTime, TimeDiff);
vp.appearTime=LastTime;
TargetTime=LastTime;
avasset=[AVAsset assetWithURL:vp.Url];
//Insert Video and Audio to the Composition "mixComposition"
[VideoTrack insertTimeRange:CMTimeRangeMake(STime, TimeDiff)
ofTrack:[[avasset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:LastTime error:nil];
if([[avasset tracksWithMediaType:AVMediaTypeAudio] count])
{
if(!GetMusic)
{
[AudioTrack insertTimeRange:CMTimeRangeMake(STime, TimeDiff)
ofTrack:[[avasset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:LastTime error:nil];
}
}
// Add instructions
if(vp.GetInstuction)
{
// GET INSTRUCTION: if Video already have instructions
firstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:VideoTrack];
[firstlayerInstruction setTransform:vp.LayerInstruction atTime:LastTime];
[firstlayerInstruction setOpacity:0 atTime:TotalTime];
[Objects addObject:firstlayerInstruction];
}
else
{
// GET INSTRUCTION: When a Video add first time to the composition
AVAssetTrack *assetTrack = [[avasset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:VideoTrack];
CGAffineTransform videoTransform = assetTrack.preferredTransform;
CGSize naturalSize = assetTrack.naturalSize;
BOOL bLandscape = NO;
CGSize renderSize = CGSizeMake(self.videoplayer.frame.size.width * [[UIScreen mainScreen] scale], self.videoplayer.frame.size.width * [[UIScreen mainScreen] scale]);
renderSize =CGSizeMake(renderWidth, renderHeight);
if(self.videoplayer.frame.size.width > self.videoplayer.frame.size.height && bIsVideoPortrait)
{
bLandscape = YES;
renderSize = CGSizeMake(renderSize.height, renderSize.width);
naturalSize = CGSizeMake(naturalSize.height, naturalSize.width);
}
else if(self.videoplayer.frame.size.height > self.videoplayer.frame.size.width && !bIsVideoPortrait)
{
bLandscape = YES;
renderSize = CGSizeMake(renderSize.height, renderSize.width);
naturalSize = CGSizeMake(naturalSize.height, naturalSize.width);
}
//Orientation Check
CGAffineTransform firstTransform = assetTrack.preferredTransform;
BOOL PotraitVideo=NO;
if (firstTransform.a == 0 && firstTransform.b == 1.0 && firstTransform.c == -1.0 && firstTransform.d == 0) {
PotraitVideo=YES;
// NSLog(#"Potratit Video");
}
if (firstTransform.a == 0 && firstTransform.b == -1.0 && firstTransform.c == 1.0 && firstTransform.d == 0) {
PotraitVideo=YES;
// NSLog(#"Potratit Video");
}
//Orientation Check Finish
if(bIsVideoPortrait)
naturalSize = CGSizeMake(naturalSize.height, naturalSize.width);
scaleValue = 1;
translationPoint = CGPointMake(self.videoplayer.frame.origin.x, self.videoplayer.frame.origin.y);
CGFloat pointX = translationPoint.x * naturalSize.width / self.videoplayer.frame.size.width;
CGFloat pointY = translationPoint.y * naturalSize.height / self.videoplayer.frame.size.height;
pointY=0;
pointX=0;
CGAffineTransform new = CGAffineTransformConcat(videoTransform, CGAffineTransformMakeScale(scaleValue, scaleValue));
CGAffineTransform newer = CGAffineTransformConcat(new, CGAffineTransformMakeTranslation(pointX, pointY));
CGFloat rotateTranslateX = 0;
CGFloat rotateTranslateY = 0;
if(rotationValue - 0.0f > 0.01f && rotationValue - 180.f < 0.01)
rotateTranslateX = MIN((naturalSize.width * rotationValue) / 90.0f, naturalSize.width);
if(rotationValue - 90.0f > 0.01f && rotationValue < 360.0f)
rotateTranslateY = MIN((naturalSize.height * rotationValue) / 180.0f, naturalSize.height);
CGAffineTransform rotationT = CGAffineTransformConcat(newer, CGAffineTransformMakeRotation(DEGREES_TO_RADIANS(rotationValue)));
CGAffineTransform rotateTranslate = CGAffineTransformConcat(rotationT, CGAffineTransformMakeTranslation(rotateTranslateX, rotateTranslateY));
CGSize temp = CGSizeApplyAffineTransform(assetTrack.naturalSize, videoTransform);
CGSize size = CGSizeMake(fabsf(temp.width), fabsf(temp.height));
if(bLandscape)
{
size = CGSizeMake(size.height, size.width);
}
float s1 = renderSize.width/size.width;
float s2 = renderSize.height/size.height;
float s = MIN(s1, s2);
CGAffineTransform new2 = CGAffineTransformConcat(rotateTranslate, CGAffineTransformMakeScale(s,s));
float x = (renderSize.width - size.width*s)/2;
float y = (renderSize.height - size.height*s)/2;
newer2 = CGAffineTransformIdentity;
if(bLandscape)
newer2 = CGAffineTransformConcat(new2, CGAffineTransformMakeTranslation(x, y));
else
newer2 = CGAffineTransformConcat(new2, CGAffineTransformMakeTranslation(x, y));
//Store layerInstruction to an array "Objects"
[layerInstruction setTransform:newer2 atTime:LastTime];
[layerInstruction setOpacity:0.0 atTime: TotalTime];
[Objects addObject:layerInstruction];
vp.GetInstuction=YES;
vp.LayerInstruction=newer2;
vp.Portrait=PotraitVideo;
[VideoInfo replaceObjectAtIndex:i withObject:vp];
}
}
if(GetMusic)
{
OriginalAsset=mixComposition;
AudioTrack=nil;
AudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
[AudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, TotalTime)
ofTrack:[[MusicAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:nil];
}
//Apply all the instruction to the the Videocomposition "mainCompositionInst"
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, TotalTime);
mainInstruction.layerInstructions = Objects;
mainCompositionInst = [AVMutableVideoComposition videoComposition];
mainCompositionInst.instructions = [NSArray arrayWithObject:mainInstruction];
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
mainCompositionInst.renderSize = CGSizeMake(renderWidth, renderHeight);
[self PlayVideo];
}

How to record video with overlay view

Hi I am trying to record video with overlay.
I have written:
-(void)addOvelayViewToVideo:(NSURL *)videoURL
to add overlay view on recorded video but it is not working.
I written the code to record video in viewDidLoad using AVCaptureSession.
//In ViewDidLoad
//CONFIGURE DISPLAY OUTPUT
self.previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession];
[self.previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
self.previewLayer.frame = self.view.frame;
[self.view.layer addSublayer:self.previewLayer];
-(void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error
{
if(error.code != noErr)
{
id value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey];
if(value)
{
isSuccess = [value boolValue];
}
}
if(isSuccess)
{
ALAssetsLibrary *assetsLibrary = [[ALAssetsLibrary alloc] init];
if([assetsLibrary videoAtPathIsCompatibleWithSavedPhotosAlbum:outputFileURL])
{
[self addOverviewToVideo:outputFileURL];
}
else{
NSLog(#"could not saved to photos album.");
}
}
}
-(void)addOvelayViewToVideo:(NSURL *)videoURL
{
AVAsset *asset = [AVAsset assetWithURL:videoURL];
AVMutableComposition *composition = [[AVMutableComposition alloc] init];
AVMutableCompositionTrack *compositionTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:[[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionInstruction *compositionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
compositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
AVMutableVideoCompositionLayerInstruction *videoLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionTrack];
AVAssetTrack *assetTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation videoAssetOrientation_ = UIImageOrientationUp;
BOOL isVideoAssetPortrait_ = NO;
CGAffineTransform videoTransform = assetTrack.preferredTransform;
if (videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationRight;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationLeft;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) {
videoAssetOrientation_ = UIImageOrientationUp;
}
if (videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) {
videoAssetOrientation_ = UIImageOrientationDown;
}
[videoLayerInstruction setTransform:assetTrack.preferredTransform atTime:kCMTimeZero];
[videoLayerInstruction setOpacity:0.0 atTime:asset.duration];
compositionInstruction.layerInstructions = [NSArray arrayWithObject:videoLayerInstruction];
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
CGSize naturalSize = CGSizeMake(assetTrack.naturalSize.height, assetTrack.naturalSize.width);
float renderWidth, renderHeight;
renderWidth = naturalSize.width;
renderHeight = naturalSize.height;
videoComposition.renderSize = CGSizeMake(renderWidth, renderHeight);
videoComposition.instructions = [NSArray arrayWithObject:compositionInstruction];
videoComposition.frameDuration = CMTimeMake(1, 30);
CALayer *overlayLayer = [CALayer layer];
UIImage *overlayImage = [UIImage imageNamed:#"sampleHUD"];
[overlayLayer setContents:(id)[overlayImage CGImage]];
overlayLayer.frame = CGRectMake(0, 0, naturalSize.width, naturalSize.height);
[overlayLayer setMasksToBounds:YES];
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, naturalSize.width, naturalSize.height);
videoLayer.frame = CGRectMake(0, 0, naturalSize.width, naturalSize.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:overlayLayer];
videoComposition.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
NSLog(#"renderSize:%f,%f", videoComposition.renderSize.width, videoComposition.renderSize.height);
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetHighestQuality];
exportSession.outputURL = videoURL;
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
exportSession.shouldOptimizeForNetworkUse = YES;
exportSession.videoComposition = videoComposition;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
//save the video in photos album
});
}];
}
I am still unable to figure out what is going wrong here. Need guidance on this.
Can I add overlay while recording video?
Any help will be appreciated.

Resources