I am working on an application in which I record a video. When recording finished I put a GIF image on it with use of Library.
My code for playing video and putting gif image as an overlay
self.avPlayer = [AVPlayer playerWithURL:self.urlstring];
self.avPlayer.actionAtItemEnd = AVPlayerActionAtItemEndNone;
AVPlayerLayer *videoLayer = [AVPlayerLayer playerLayerWithPlayer:self.avPlayer];
videoLayer.frame = self.preview_view.bounds;
videoLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[self.preview_view.layer addSublayer:videoLayer];
NSURL *url = [[NSBundle mainBundle] URLForResource:#"02" withExtension:#"gif"];
self.img_gif.image = [UIImage animatedImageWithAnimatedGIFData:[NSData dataWithContentsOfURL:url]];
But now I want to merge and save video with overlay of this GIF image. I google it didn't find what I want.
Thank you for your help
This is the best answer to merge video with GIF image.
- (void)mixVideoAsset:(AVAsset *)videoAsset {
NSDate * begin = [NSDate date];
// 2 - Create AVMutableComposition object. This object will hold your AVMutableCompositionTrack instances.
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
// 3 - Video track
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
[videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
// - Audio
AVMutableCompositionTrack *audioCompositionTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *audioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[audioCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioTrack.timeRange.duration) ofTrack:audioTrack atTime:kCMTimeZero error:nil];
// 3.1 - Create AVMutableVideoCompositionInstruction
AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
// 3.2 - Create an AVMutableVideoCompositionLayerInstruction for the video track and fix the orientation.
AVMutableVideoCompositionLayerInstruction *videolayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
AVAssetTrack *videoAssetTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation videoAssetOrientation_ = UIImageOrientationUp;
BOOL isVideoAssetPortrait_ = NO;
CGAffineTransform videoTransform = videoAssetTrack.preferredTransform;
if (videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationRight;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationLeft;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) {
videoAssetOrientation_ = UIImageOrientationUp;
}
if (videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) {
videoAssetOrientation_ = UIImageOrientationDown;
}
[videolayerInstruction setTransform:videoAssetTrack.preferredTransform atTime:kCMTimeZero];
[videolayerInstruction setOpacity:0.0 atTime:videoAsset.duration];
// 3.3 - Add instructions
mainInstruction.layerInstructions = [NSArray arrayWithObjects:videolayerInstruction,nil];
AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
CGSize naturalSize;
if(isVideoAssetPortrait_){
naturalSize = CGSizeMake(videoAssetTrack.naturalSize.height, videoAssetTrack.naturalSize.width);
} else {
naturalSize = videoAssetTrack.naturalSize;
}
float renderWidth, renderHeight;
renderWidth = naturalSize.width;
renderHeight = naturalSize.height;
mainCompositionInst.renderSize = CGSizeMake(renderWidth, renderHeight);
mainCompositionInst.instructions = [NSArray arrayWithObject:mainInstruction];
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
// Watermark Layers
[self applyVideoEffectsToComposition:mainCompositionInst size:naturalSize];
// 4 - Get path
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:
[NSString stringWithFormat:#"FinalVideo-%d.mov",arc4random() % 1000]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
// NSURL * url = TempVideoURL();
// 5 - Create exporter
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeMPEG4;
exporter.shouldOptimizeForNetworkUse = YES;
exporter.videoComposition = mainCompositionInst;
[exporter exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
NSDate * endDate = [NSDate date];
NSTimeInterval interval = [endDate timeIntervalSinceDate:begin];
NSLog(#"completed %f senconds",interval);
ALAssetsLibrary *assetsLibrary = [[ALAssetsLibrary alloc] init];
if ([assetsLibrary videoAtPathIsCompatibleWithSavedPhotosAlbum:exporter.outputURL]) {
[assetsLibrary writeVideoAtPathToSavedPhotosAlbum:exporter.outputURL completionBlock:NULL];
}
});
}];
}
- (void)applyVideoEffectsToComposition:(AVMutableVideoComposition *)composition size:(CGSize)size
{
// - set up the parent layer
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, size.width, size.height);
videoLayer.frame = CGRectMake(0, 0, size.width, size.height);
[parentLayer addSublayer:videoLayer];
size.width = 100;
size.height = 100;
// - set up the overlay
CALayer *overlayLayer = [CALayer layer];
overlayLayer.frame = CGRectMake(0, 100, size.width, size.height);
NSURL *fileUrl = [[NSBundle mainBundle] URLForResource:#"jiafei" withExtension:#"gif"];
[self startGifAnimationWithURL:fileUrl inLayer:overlayLayer];
// UIImage * image = [UIImage imageNamed:#"gifImage.gif"];
// [overlayLayer setContents:(id)[image CGImage]];
// [overlayLayer setMasksToBounds:YES];
[parentLayer addSublayer:overlayLayer];
// - apply magic
composition.animationTool = [AVVideoCompositionCoreAnimationTool
videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
}
- (void)startGifAnimationWithURL:(NSURL *)url inLayer:(CALayer *)layer {
CAKeyframeAnimation * animation = [self animationForGifWithURL:url];
[layer addAnimation:animation forKey:#"contents"];
}
- (CAKeyframeAnimation *)animationForGifWithURL:(NSURL *)url {
CAKeyframeAnimation *animation = [CAKeyframeAnimation animationWithKeyPath:#"contents"];
NSMutableArray * frames = [NSMutableArray new];
NSMutableArray *delayTimes = [NSMutableArray new];
CGFloat totalTime = 0.0;
CGFloat gifWidth;
CGFloat gifHeight;
CGImageSourceRef gifSource = CGImageSourceCreateWithURL((CFURLRef)url, NULL);
// get frame count
size_t frameCount = CGImageSourceGetCount(gifSource);
for (size_t i = 0; i < frameCount; ++i) {
// get each frame
CGImageRef frame = CGImageSourceCreateImageAtIndex(gifSource, i, NULL);
[frames addObject:(__bridge id)frame];
CGImageRelease(frame);
// get gif info with each frame
NSDictionary *dict = (NSDictionary*)CFBridgingRelease(CGImageSourceCopyPropertiesAtIndex(gifSource, i, NULL));
NSLog(#"kCGImagePropertyGIFDictionary %#", [dict valueForKey:(NSString*)kCGImagePropertyGIFDictionary]);
// get gif size
gifWidth = [[dict valueForKey:(NSString*)kCGImagePropertyPixelWidth] floatValue];
gifHeight = [[dict valueForKey:(NSString*)kCGImagePropertyPixelHeight] floatValue];
// kCGImagePropertyGIFDictionary中kCGImagePropertyGIFDelayTime,kCGImagePropertyGIFUnclampedDelayTime值是一样的
NSDictionary *gifDict = [dict valueForKey:(NSString*)kCGImagePropertyGIFDictionary];
[delayTimes addObject:[gifDict valueForKey:(NSString*)kCGImagePropertyGIFDelayTime]];
totalTime = totalTime + [[gifDict valueForKey:(NSString*)kCGImagePropertyGIFDelayTime] floatValue];
CFRelease((__bridge CFTypeRef)(dict));
}
if (gifSource) {
CFRelease(gifSource);
}
NSMutableArray *times = [NSMutableArray arrayWithCapacity:3];
CGFloat currentTime = 0;
NSInteger count = delayTimes.count;
for (int i = 0; i < count; ++i) {
[times addObject:[NSNumber numberWithFloat:(currentTime / totalTime)]];
currentTime += [[delayTimes objectAtIndex:i] floatValue];
}
NSMutableArray *images = [NSMutableArray arrayWithCapacity:3];
for (int i = 0; i < count; ++i) {
[images addObject:[frames objectAtIndex:i]];
}
animation.keyTimes = times;
animation.values = images;
animation.timingFunction = [CAMediaTimingFunction functionWithName:kCAMediaTimingFunctionLinear];
animation.duration = totalTime;
animation.repeatCount = HUGE_VALF;
animation.beginTime = AVCoreAnimationBeginTimeAtZero;
animation.removedOnCompletion = NO;
return animation;
}
Here is the swift version of #Jitendra Modi's answer and it worked like a charm.
Swift 5.2:
func animationForGif(with url: URL) -> CAKeyframeAnimation? {
let animation = CAKeyframeAnimation(keyPath: #keyPath(CALayer.contents))
var frames: [CGImage] = []
var delayTimes: [CGFloat] = []
var totalTime: CGFloat = 0.0
// var gifWidth: CGFloat, gifHeight: CGFloat
guard let gifSource = CGImageSourceCreateWithURL(url as CFURL, nil) else {
print("Can not get image source from the gif: \(url)")
return nil
}
// get frame
let frameCount = CGImageSourceGetCount(gifSource)
for i in 0..<frameCount {
guard let frame = CGImageSourceCreateImageAtIndex(gifSource, i, nil) else {
continue
}
guard let dic = CGImageSourceCopyPropertiesAtIndex(gifSource, i, nil) as? [AnyHashable: Any] else { continue }
// gifWidth = dic[kCGImagePropertyPixelWidth] as? CGFloat ?? 0
// gifHeight = dic[kCGImagePropertyPixelHeight] as? CGFloat ?? 0
guard let gifDic: [AnyHashable: Any] = dic[kCGImagePropertyGIFDictionary] as? [AnyHashable: Any] else { continue }
let delayTime = gifDic[kCGImagePropertyGIFDelayTime] as? CGFloat ?? 0
frames.append(frame)
delayTimes.append(delayTime)
totalTime += delayTime
}
if frames.count == 0 {
return nil
}
assert(frames.count == delayTimes.count)
var times: [NSNumber] = []
var currentTime: CGFloat = 0
for i in 0..<delayTimes.count {
times.append(NSNumber(value: Double(currentTime / totalTime)))
currentTime += delayTimes[i]
}
animation.keyTimes = times
animation.values = frames
animation.timingFunction = CAMediaTimingFunction(name: CAMediaTimingFunctionName.linear)
animation.duration = Double(totalTime)
animation.repeatCount = .greatestFiniteMagnitude
animation.beginTime = AVCoreAnimationBeginTimeAtZero
animation.isRemovedOnCompletion = false
return animation
}
And you can use this animation:
let gifLayer = CALayer()
gifLayer.frame = CGRect(x: 0, y: 0, width: 300, height: 300)
if let animation = animationForGif(with: gifUrl) {
gifLayer.add(animation, forKey: "contents")
}
parentLayer.addSublayer(gifLayer)
You can try any of the below code for Screen Recording. It will merger your video and GIF.
You can download sample from the link below provided by Apple. https://developer.apple.com/library/mac/samplecode/AVScreenShack/Introduction/Intro.html
https://github.com/alskipp/ASScreenRecorder
http://codethink.no-ip.org/wordpress/archives/673
Hope this help you..
Related
I am creating a video based application where I have to select any video from local gallery and have to add a text using CATextLayer over the Video. For this I am using below code:
- (void) createWatermark:(UIImage*)image video:(NSURL*)videoURL
{
if (videoURL == nil)
return;
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:videoURL options:nil];
AVMutableComposition* mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack* compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack* clipVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:clipVideoTrack
atTime:kCMTimeZero error:nil];
[compositionVideoTrack setPreferredTransform:[[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] preferredTransform]];
// create the layer with the watermark image
CALayer* aLayer = [CALayer layer];
aLayer.contents = (id)image.CGImage;
aLayer.frame = CGRectMake(50, 100, image.size.width, image.size.height);
aLayer.opacity = 0.9;
//sorts the layer in proper order
AVAssetTrack* videoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
CGSize videoSize = [videoTrack naturalSize];
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:aLayer];
// create text Layer
CATextLayer* titleLayer = [CATextLayer layer];
titleLayer.backgroundColor = [UIColor clearColor].CGColor;
titleLayer.string = #"Dummy text";
titleLayer.font = CFBridgingRetain(#"Helvetica");
titleLayer.fontSize = 28;
titleLayer.shadowOpacity = 0.5;
titleLayer.alignmentMode = kCAAlignmentCenter;
titleLayer.frame = CGRectMake(0, 50, videoSize.width, videoSize.height / 6);
[parentLayer addSublayer:titleLayer];
//create the composition and add the instructions to insert the layer:
AVMutableVideoComposition* videoComp = [AVMutableVideoComposition videoComposition];
videoComp.renderSize = videoSize;
videoComp.frameDuration = CMTimeMake(1, 30);
videoComp.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
/// instruction
AVMutableVideoCompositionInstruction* instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]);
AVAssetTrack* mixVideoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:mixVideoTrack];
instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
videoComp.instructions = [NSArray arrayWithObject: instruction];
// export video
_assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality];
_assetExport.videoComposition = videoComp;
NSLog (#"created exporter. supportedFileTypes: %#", _assetExport.supportedFileTypes);
NSString* videoName = #"NewWatermarkedVideo.mov";
NSString* exportPath = [NSTemporaryDirectory() stringByAppendingPathComponent:videoName];
NSURL* exportUrl = [NSURL fileURLWithPath:exportPath];
if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath])
[[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil];
_assetExport.outputFileType = AVFileTypeQuickTimeMovie;
_assetExport.outputURL = exportUrl;
_assetExport.shouldOptimizeForNetworkUse = YES;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
//Final code here
switch (_assetExport.status)
{
case AVAssetExportSessionStatusUnknown:
NSLog(#"Unknown");
break;
case AVAssetExportSessionStatusWaiting:
NSLog(#"Waiting");
break;
case AVAssetExportSessionStatusExporting:
NSLog(#"Exporting");
break;
case AVAssetExportSessionStatusCompleted:
NSLog(#"Created new water mark image");
_playButton.hidden = NO;
break;
case AVAssetExportSessionStatusFailed:
NSLog(#"Failed- %#", _assetExport.error);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Cancelled");
break;
}
}
];
}
Using above code, I can able to set a text over my video. But now I have to give either FadeIN/Fade Out animation on CATextLayer OR to display moving text on Video. Please advise me. Thanks in Advance!
Try to use CAKeyframeAnimation class declared in core animation framework.
for example:
//start from zero opacity
titleLayer.opacity = 0;
CAKeyframeAnimation *fadeInAndOut = [CAKeyframeAnimation animationWithKeyPath:#"opacity"];
fadeInAndOut.duration = 5.0; //5 seconds will be your fade in fade out animation.
fadeInAndOut.autoreverses = NO;
// from 0.0 * animDuration time to 0.2 * animDuration time your animation will animate opacity from zero to 1.0.
// from 0.2 * animDuration time to 0.8 * animDuration time your opacity will be 1.0
// from 0.8 * animDuration time to 1.0 * animDuration time your animation will animate opacity from 1.0 opacity to zero.
fadeInAndOut.keyTimes = #[#(0.0), #(0.2), #(0.8), #(1.0)];
fadeInAndOut.values = #[#(0.0), #(1.0), #(1.0), #(0.0)];
fadeInAndOut.beginTime = 1.0;
fadeInAndOut.removedOnCompletion = NO;
fadeInAndOut.fillMode = kCAFillModeBoth;
[titleLayer addAnimation:fadeInAndOut forKey:nil];
I hope it's clear for you. using same way you can animate your label position.
UPDATE
for animate position try this
CAKeyframeAnimation *fadeInAndOut = [CAKeyframeAnimation animationWithKeyPath:#"position"];
fadeInAndOut.duration = 5.0;
fadeInAndOut.autoreverses = NO;
fadeInAndOut.keyTimes = #[#(0.0), #(0.2), #(0.8), #(1.0)];
fadeInAndOut.values = #[[NSValue valueWithCGPoint:CGPointMake(20, 40)],
[NSValue valueWithCGPoint:CGPointMake(200, 40)],
[NSValue valueWithCGPoint:CGPointMake(200, 40)],
[NSValue valueWithCGPoint:CGPointMake(400, 40)]];
fadeInAndOut.beginTime = 1.0;
fadeInAndOut.removedOnCompletion = NO;
fadeInAndOut.fillMode = kCAFillModeBoth;
[titleLayer addAnimation:fadeInAndOut forKey:nil];
I hardcoded points for explain purpose. you can calculate points and animation duration yourself.
First add parent layer:
let parentLayer = CALayer()
After that add overlayer for adding fade animation:
let overlayLayer = CALayer()
And call extension for adding fade animation:
overlayLayer.addFadeAnimationWithTime(showTime: startTime, endTime: endTime)
And add this to parent layer:
parentLayer.addSublayer(overlayLayer)
Extension for fade animation with startTime and endTime:
extension CALayer {
func addFadeAnimationWithTime(showTime:CGFloat, endTime:CGFloat) {
let fadeInAnimation = CABasicAnimation.init(keyPath: "opacity")
fadeInAnimation.duration = 1
fadeInAnimation.fromValue = NSNumber(value: 0)
fadeInAnimation.toValue = NSNumber(value: 1)
fadeInAnimation.isRemovedOnCompletion = false
fadeInAnimation.beginTime = CFTimeInterval(showTime)
fadeInAnimation.fillMode = CAMediaTimingFillMode.backwards
self.add(fadeInAnimation, forKey: "fadeIn")
if endTime > 0 {
let fadeOutAnimation = CABasicAnimation.init(keyPath: "opacity")
fadeOutAnimation.duration = 1
fadeOutAnimation.fromValue = NSNumber(value: 1)
fadeOutAnimation.toValue = NSNumber(value: 0)
fadeOutAnimation.isRemovedOnCompletion = false
fadeOutAnimation.beginTime = CFTimeInterval(endTime)
fadeOutAnimation.fillMode = CAMediaTimingFillMode.forwards
self.add(fadeOutAnimation, forKey: "fadeOut")
}
}
}
I have an AVAsset with AVAssetTrack, which have a size (for example - (width = 1920, height = 1080)). What I need - fit this asset into given screen size (for example (width = 320, height = 568)), if asset in landscape - rotate it (90 degrees), if square - add black stripes on top and bottom. I tried this:
- (void)changeAsset:(AVAsset*)asset savetoURL:(NSURL*)toURL withSize:(CGSize)toSize offsetRatioPoint:(CGPoint*)offsetRatioPoint completion:(void (^)(NSURL* in_url, NSError* error))handler
{
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject];
if (!videoTrack)
{
if (handler)
handler(nil, [NSError errorWithDomain:#"com.myapp.error" code:-1 userInfo:#{#"message" : #"there are no video tracks in asset"}]);
return;
}
CGFloat newHeight = [videoTrack naturalSize].height/3*4;
CGFloat newWidth = [videoTrack naturalSize].width/3*4;
const CGFloat videoAspectRatio = newWidth / newHeight;
const CGFloat toSizeAspectRatio = toSize.width / toSize.height;
CGFloat scale = 1.f;
if (videoAspectRatio > toSizeAspectRatio)
{
scale = toSize.height / newHeight;
}
else
{
scale = toSize.width /newWidth;
}
CGAffineTransform scaleTrans = CGAffineTransformMakeScale(scale, scale);
CGAffineTransform translateTrans = CGAffineTransformIdentity;
if (videoAspectRatio > toSizeAspectRatio)
{
if (offsetRatioPoint)
{
const CGFloat dx = offsetRatioPoint->x * newWidth * scale;
translateTrans = CGAffineTransformMakeTranslation(-dx, 0.f);
}
else
{
const CGFloat dx = 0.5f * (newWidth * scale - toSize.width);
translateTrans = CGAffineTransformMakeTranslation(-dx, 0.f);
}
}
else
{
if (offsetRatioPoint)
{
const CGFloat dy = offsetRatioPoint->y * newHeight * scale;
translateTrans = CGAffineTransformMakeTranslation(0.f, -dy);
}
else
{
const CGFloat dy = 0.5f * (newHeight * scale - toSize.height);
translateTrans = CGAffineTransformMakeTranslation(0.f, -dy);
}
}
CGAffineTransform t1 = CGAffineTransformTranslate(translateTrans, toSize.height, -scale*toSize.width);
// Rotate transformation
CGAffineTransform t2 = CGAffineTransformRotate(t1, M_PI_2);
CGAffineTransform finalTrans = CGAffineTransformConcat(scaleTrans, t2);
AVMutableVideoComposition *videoComposition = [[AVMutableVideoComposition alloc] init];
videoComposition.renderSize = toSize;
int32_t frameRate = 30;
videoComposition.frameDuration = CMTimeMake(1, frameRate);
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
[layerInstruction setTransform:finalTrans atTime:kCMTimeZero];
instruction.layerInstructions = #[layerInstruction];
videoComposition.instructions = #[instruction];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPreset640x480];
exporter.videoComposition = videoComposition;
exporter.shouldOptimizeForNetworkUse = YES;
NSString *exportPath = [NSTemporaryDirectory() stringByAppendingPathComponent:kCroppedFileName];
if (toURL)
exportPath = toURL.path;
if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath] == YES)
[[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil];
__block NSURL* outURL = [NSURL fileURLWithPath: exportPath];
exporter.outputURL = outURL;
exporter.outputFileType = AVFileTypeMPEG4;
exporter.timeRange = instruction.timeRange;
NSLog(#"%#", exportPath);
[exporter exportAsynchronouslyWithCompletionHandler:^(void) {
if (!toURL)
{
if ([[NSFileManager defaultManager] fileExistsAtPath:[GlobalConst fullMoviePath]] == YES)
[[NSFileManager defaultManager] removeItemAtPath:[GlobalConst fullMoviePath] error:nil];
NSError *error;
if (![[NSFileManager defaultManager] moveItemAtPath: exportPath toPath:[GlobalConst fullMoviePath] error:&error]) {
NSLog(#"Error %#", error);
}
outURL = [NSURL fileURLWithPath: [GlobalConst fullMoviePath] ];
}
NSLog(#"%#", outURL);
handler(outURL, nil);
}];
}
It's almost do first step - rotate landscape asset, but the result asset is little bit zoomed. Thanks in advance for all advices.
i solve this prblm usrin this code. in this code i rotate landscape to video Portrait . make it square also add image as watermark.
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0), ^(){
// input clip
AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
int videoDimention;
// make it square
UIInterfaceOrientation orientation = [self orientationForTrack:asset];
BOOL isPortrait = (orientation == UIInterfaceOrientationPortrait || orientation == UIInterfaceOrientationPortraitUpsideDown) ? YES: NO;
CGSize videoSize;
NSUserDefaults *userDefault=[NSUserDefaults standardUserDefaults];
if(isPortrait) {
//videoSize = CGSizeMake(complimentSize*.7,clipVideoTrack.naturalSize.height );
videoSize = CGSizeMake(clipVideoTrack.naturalSize.height,clipVideoTrack.naturalSize.width*.7 );
[userDefault setDouble:videoSize.width forKey:VIDEO_WIDTH_OUTPUT];
[userDefault setDouble:videoSize.height forKey:VIDEO_HEIGHT_OUTPUT];
videoDimention=0;// for Portrait
} else {
videoSize = CGSizeMake(clipVideoTrack.naturalSize.width, clipVideoTrack.naturalSize.height);
videoDimention=1;// for Landscape
[userDefault setDouble:videoSize.width forKey:VIDEO_WIDTH_OUTPUT];
[userDefault setDouble:videoSize.height forKey:VIDEO_HEIGHT_OUTPUT];
}
AVMutableVideoComposition* videoComposition = [AVMutableVideoComposition videoComposition];
if([[NSUserDefaults standardUserDefaults] integerForKey:SIZE]==0){
videoComposition.renderSize = CGSizeMake(clipVideoTrack.naturalSize.height , clipVideoTrack.naturalSize.height);
videoDimention=2; // for squre
double height=clipVideoTrack.naturalSize.height;
[userDefault setDouble:height forKey:VIDEO_WIDTH_OUTPUT];
[userDefault setDouble:height forKey:VIDEO_HEIGHT_OUTPUT];
}
else{
videoComposition.renderSize =videoSize;
}
// videoComposition.renderScale=.5;
if([[NSUserDefaults standardUserDefaults] integerForKey:FPS]==0){
videoComposition.frameDuration = CMTimeMake(1, 15);
}
else if ([[NSUserDefaults standardUserDefaults] integerForKey:FPS]==1){
videoComposition.frameDuration = CMTimeMake(1, 20);
}
else if ([[NSUserDefaults standardUserDefaults] integerForKey:FPS]==2){
videoComposition.frameDuration = CMTimeMake(1, 25);
}
else{
videoComposition.frameDuration = CMTimeMake(1, 30);
}
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [asset duration] );;
AVMutableVideoCompositionLayerInstruction* transformer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:clipVideoTrack];
// rotate to portrait
if([self orientationForTrack:asset]==UIInterfaceOrientationPortrait){
CGAffineTransform t1 = CGAffineTransformMakeTranslation(clipVideoTrack.naturalSize.height, -(clipVideoTrack.naturalSize.width - clipVideoTrack.naturalSize.height) /2 );
CGAffineTransform t2 = CGAffineTransformRotate(t1, M_PI_2);
CGAffineTransform finalTransform = t2;
[transformer setTransform:finalTransform atTime:kCMTimeZero];
}
//for water mark
CGSize sizeOfVideo=[asset naturalSize];
//Image of watermark
UIImage *myImage=[UIImage imageNamed:#"watermark"];
CALayer *layerCa = [CALayer layer];
layerCa.contents = (id)myImage.CGImage;
if([[NSUserDefaults standardUserDefaults] integerForKey:SIZE]==0){
layerCa.frame = CGRectMake(videoSize.height-(videoSize.height/6), 0, videoSize.height/6, (videoSize.height/6)/4);
}
else{
layerCa.frame = CGRectMake(videoSize.width-(videoSize.width/6), 0, videoSize.width/6, (videoSize.width/6)/4);
}
// layerCa.frame = CGRectMake(videoSize.width-200, 0, 200, 60);
layerCa.opacity = 1.0;
CALayer *parentLayer=[CALayer layer];
CALayer *videoLayer=[CALayer layer];
parentLayer.frame=CGRectMake(0, 0, videoSize.width, videoSize.height);
videoLayer.frame=CGRectMake(0, 0, videoSize.width, videoSize.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:layerCa];
instruction.layerInstructions = [NSArray arrayWithObject:transformer];
videoComposition.instructions = [NSArray arrayWithObject: instruction];
if([[NSUserDefaults standardUserDefaults] boolForKey:UP_PID]==NO){
videoComposition.animationTool=[AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
}
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetHighestQuality];
// AVMutableComposition *composition = [AVMutableComposition composition];
// [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
// e.g .mov type
exportSession.outputURL = fileURL;
exportSession.videoComposition = videoComposition;
// [exportSession addObserver: forKeyPath:#"progress" options:NSKeyValueObservingOptionNew context:NULL];
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
AVAssetExportSessionStatus status = [exportSession status];
[exportSession exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
VideoEditVC *controller=[[VideoEditVC alloc] init];
controller.isFirst=YES;
controller.videoSize=videoDimention;
[self.navigationController pushViewController:controller animated:YES];
self.delegate=controller;
});
}];
});
you also need to implement this one :
- (UIInterfaceOrientation)orientationForTrack:(AVAsset *)asset
{
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
CGSize size = [videoTrack naturalSize];
CGAffineTransform txf = [videoTrack preferredTransform];
if (size.width == txf.tx && size.height == txf.ty)
return UIInterfaceOrientationLandscapeRight;
else if (txf.tx == 0 && txf.ty == 0)
return UIInterfaceOrientationLandscapeLeft;
else if (txf.tx == 0 && txf.ty == size.width)
return UIInterfaceOrientationPortraitUpsideDown;
else
return UIInterfaceOrientationPortrait;
}
I need to accomplish this function: There is a GIF overlay on a video, hoping to composition this video and GIF to a new video. I'm using the following code, but result is only the video without GIF:
- (void)mixVideoAsset:(AVAsset *)videoAsset {
LLog(#"Begining");
NSDate * begin = [NSDate date];
// 2 - Create AVMutableComposition object. This object will hold your AVMutableCompositionTrack instances.
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
// 3 - Video track
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
[videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
// - Audio
AVMutableCompositionTrack *audioCompositionTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *audioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[audioCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioTrack.timeRange.duration) ofTrack:audioTrack atTime:kCMTimeZero error:nil];
// 3.1 - Create AVMutableVideoCompositionInstruction
AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
// 3.2 - Create an AVMutableVideoCompositionLayerInstruction for the video track and fix the orientation.
AVMutableVideoCompositionLayerInstruction *videolayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
AVAssetTrack *videoAssetTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation videoAssetOrientation_ = UIImageOrientationUp;
BOOL isVideoAssetPortrait_ = NO;
CGAffineTransform videoTransform = videoAssetTrack.preferredTransform;
if (videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationRight;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationLeft;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) {
videoAssetOrientation_ = UIImageOrientationUp;
}
if (videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) {
videoAssetOrientation_ = UIImageOrientationDown;
}
[videolayerInstruction setTransform:videoAssetTrack.preferredTransform atTime:kCMTimeZero];
[videolayerInstruction setOpacity:0.0 atTime:videoAsset.duration];
// 3.3 - Add instructions
mainInstruction.layerInstructions = [NSArray arrayWithObjects:videolayerInstruction,nil];
AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
CGSize naturalSize;
if(isVideoAssetPortrait_){
naturalSize = CGSizeMake(videoAssetTrack.naturalSize.height, videoAssetTrack.naturalSize.width);
} else {
naturalSize = videoAssetTrack.naturalSize;
}
float renderWidth, renderHeight;
renderWidth = naturalSize.width;
renderHeight = naturalSize.height;
mainCompositionInst.renderSize = CGSizeMake(renderWidth, renderHeight);
mainCompositionInst.instructions = [NSArray arrayWithObject:mainInstruction];
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
// Watermark Layers
[self applyVideoEffectsToComposition:mainCompositionInst size:naturalSize];
// 4 - Get path
// NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
// NSString *documentsDirectory = [paths objectAtIndex:0];
// NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:
// [NSString stringWithFormat:#"FinalVideo-%d.mov",arc4random() % 1000]];
// NSURL *url = [NSURL fileURLWithPath:myPathDocs];
NSURL * url = TempVideoURL();
// 5 - Create exporter
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeMPEG4;
exporter.shouldOptimizeForNetworkUse = YES;
exporter.videoComposition = mainCompositionInst;
[exporter exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
NSDate * endDate = [NSDate date];
NSTimeInterval interval = [endDate timeIntervalSinceDate:begin];
LLog(#"completed %f senconds",interval);
ALAssetsLibrary *assetsLibrary = [[ALAssetsLibrary alloc] init];
if ([assetsLibrary videoAtPathIsCompatibleWithSavedPhotosAlbum:exporter.outputURL]) {
[assetsLibrary writeVideoAtPathToSavedPhotosAlbum:exporter.outputURL completionBlock:NULL];
}
});
}];
}
Add Gif Watermark
- (void)applyVideoEffectsToComposition:(AVMutableVideoComposition *)composition size:(CGSize)size
{
// - set up the parent layer
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, size.width, size.height);
videoLayer.frame = CGRectMake(0, 0, size.width, size.height);
[parentLayer addSublayer:videoLayer];
size.width = 100;
size.height = 100;
// - set up the overlay
CALayer *overlayLayer = [CALayer layer];
overlayLayer.frame = CGRectMake(0, 100, size.width, size.height);
NSURL *fileUrl = [[NSBundle mainBundle] URLForResource:#"jiafei" withExtension:#"gif"];
[BBGifManager startGifAnimationWithURL:fileUrl inLayer:overlayLayer];
// UIImage * image = [UIImage imageNamed:#"gifImage.gif"];
// [overlayLayer setContents:(id)[image CGImage]];
// [overlayLayer setMasksToBounds:YES];
[parentLayer addSublayer:overlayLayer];
// - apply magic
composition.animationTool = [AVVideoCompositionCoreAnimationTool
videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
}
Add CALayer Animations
+ (void)startGifAnimationWithURL:(NSURL *)url inLayer:(CALayer *)layer {
CAKeyframeAnimation * animation = [self animationForGifWithURL:url];
[layer addAnimation:animation forKey:#"contents"];
}
Create CAKeyFrameAnimation
+ (CAKeyframeAnimation *)animationForGifWithURL:(NSURL *)url {
CAKeyframeAnimation *animation = [CAKeyframeAnimation animationWithKeyPath:#"contents"];
NSMutableArray * frames = [NSMutableArray new];
NSMutableArray *delayTimes = [NSMutableArray new];
CGFloat totalTime = 0.0;
CGFloat gifWidth;
CGFloat gifHeight;
CGImageSourceRef gifSource = CGImageSourceCreateWithURL((CFURLRef)url, NULL);
// get frame count
size_t frameCount = CGImageSourceGetCount(gifSource);
for (size_t i = 0; i < frameCount; ++i) {
// get each frame
CGImageRef frame = CGImageSourceCreateImageAtIndex(gifSource, i, NULL);
[frames addObject:(__bridge id)frame];
CGImageRelease(frame);
// get gif info with each frame
NSDictionary *dict = (NSDictionary*)CFBridgingRelease(CGImageSourceCopyPropertiesAtIndex(gifSource, i, NULL));
NSLog(#"kCGImagePropertyGIFDictionary %#", [dict valueForKey:(NSString*)kCGImagePropertyGIFDictionary]);
// get gif size
gifWidth = [[dict valueForKey:(NSString*)kCGImagePropertyPixelWidth] floatValue];
gifHeight = [[dict valueForKey:(NSString*)kCGImagePropertyPixelHeight] floatValue];
// kCGImagePropertyGIFDictionary中kCGImagePropertyGIFDelayTime,kCGImagePropertyGIFUnclampedDelayTime值是一样的
NSDictionary *gifDict = [dict valueForKey:(NSString*)kCGImagePropertyGIFDictionary];
[delayTimes addObject:[gifDict valueForKey:(NSString*)kCGImagePropertyGIFDelayTime]];
totalTime = totalTime + [[gifDict valueForKey:(NSString*)kCGImagePropertyGIFDelayTime] floatValue];
CFRelease((__bridge CFTypeRef)(dict));
}
if (gifSource) {
CFRelease(gifSource);
}
NSMutableArray *times = [NSMutableArray arrayWithCapacity:3];
CGFloat currentTime = 0;
NSInteger count = delayTimes.count;
for (int i = 0; i < count; ++i) {
[times addObject:[NSNumber numberWithFloat:(currentTime / totalTime)]];
currentTime += [[delayTimes objectAtIndex:i] floatValue];
}
NSMutableArray *images = [NSMutableArray arrayWithCapacity:3];
for (int i = 0; i < count; ++i) {
[images addObject:[frames objectAtIndex:i]];
}
animation.keyTimes = times;
animation.values = images;
animation.timingFunction = [CAMediaTimingFunction functionWithName:kCAMediaTimingFunctionLinear];
animation.duration = totalTime;
animation.repeatCount = HUGE_VALF;
return animation;
}
You should adjust your animation settings for CoreAnimation:
animation.beginTime = AVCoreAnimationBeginTimeAtZero;
animation.removedOnCompletion = NO;
Just in case here's the example on swift3 how to do the same - insert animated frames/images into the video (not exactly the gif but array of Images). It uses AVAssetExportSession and AVMutableVideoComposition together with AVMutableVideoCompositionInstruction, and CAKeyframeAnimation to animate the frames.
I am using ELCImagePicker to select multiple video from the library and getting this "Connection to assetsd was interrupted or assetsd died" error when I am trying to export multiple recorded video selected from the library. But it works fine if I select all the downloaded video using ELCImagePicker or I use UIImagePicker to select these recorded video from the library. Is there any solution for this type of problem?
My Code:
-(void)elcImagePickerController:(ELCImagePickerController *)picker didFinishPickingMediaWithInfo:(NSArray *)info
{
[self dismissViewControllerAnimated:YES completion:nil];
for (NSDictionary *dict in info) {
if ([dict objectForKey:UIImagePickerControllerMediaType] == ALAssetTypeVideo){
if ([dict objectForKey:UIImagePickerControllerOriginalImage]){
videoUrl=[dict objectForKey:UIImagePickerControllerReferenceURL];
[self InsertVideoAsset];
}
}
}
[self GetMargedVideo];
}
Some Times my merged composition only play the audio,not the video,but sometimes both audio and video works fine. Is there any problem of the bellow code? Please help me...
-(void)GetMargedVideo{
LastTime=kCMTimeZero;
TotalTime=kCMTimeZero;
mixComposition=nil; // AVMutableComposition
mainCompositionInst=nil; // AVMutableVideoComposition
mixComposition=[AVMutableComposition composition];
mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
renderWidth=1280;
renderHeight=1280;
[Objects removeAllObjects];
//LayerInstruction used to get video layer Instructions
AVMutableVideoCompositionLayerInstruction *firstlayerInstruction;
self.stokeimage.hidden=YES;
for(int i=0; i<[VideoInfo count];i++)
{
self.stokeimage.hidden=NO;
TargetVideo=i;
VideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
AudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
VideoProperty *vp =[VideoInfo objectAtIndex:i];
STime=vp.startTime;
ETime=vp.endTime;
TimeDiff=CMTimeSubtract(ETime, STime);
LastTime=TotalTime;
TotalTime=CMTimeAdd(TotalTime, TimeDiff);
vp.appearTime=LastTime;
TargetTime=LastTime;
avasset=[AVAsset assetWithURL:vp.Url];
//Insert Video and Audio to the Composition "mixComposition"
[VideoTrack insertTimeRange:CMTimeRangeMake(STime, TimeDiff)
ofTrack:[[avasset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:LastTime error:nil];
if([[avasset tracksWithMediaType:AVMediaTypeAudio] count])
{
if(!GetMusic)
{
[AudioTrack insertTimeRange:CMTimeRangeMake(STime, TimeDiff)
ofTrack:[[avasset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:LastTime error:nil];
}
}
// Add instructions
if(vp.GetInstuction)
{
// GET INSTRUCTION: if Video already have instructions
firstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:VideoTrack];
[firstlayerInstruction setTransform:vp.LayerInstruction atTime:LastTime];
[firstlayerInstruction setOpacity:0 atTime:TotalTime];
[Objects addObject:firstlayerInstruction];
}
else
{
// GET INSTRUCTION: When a Video add first time to the composition
AVAssetTrack *assetTrack = [[avasset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:VideoTrack];
CGAffineTransform videoTransform = assetTrack.preferredTransform;
CGSize naturalSize = assetTrack.naturalSize;
BOOL bLandscape = NO;
CGSize renderSize = CGSizeMake(self.videoplayer.frame.size.width * [[UIScreen mainScreen] scale], self.videoplayer.frame.size.width * [[UIScreen mainScreen] scale]);
renderSize =CGSizeMake(renderWidth, renderHeight);
if(self.videoplayer.frame.size.width > self.videoplayer.frame.size.height && bIsVideoPortrait)
{
bLandscape = YES;
renderSize = CGSizeMake(renderSize.height, renderSize.width);
naturalSize = CGSizeMake(naturalSize.height, naturalSize.width);
}
else if(self.videoplayer.frame.size.height > self.videoplayer.frame.size.width && !bIsVideoPortrait)
{
bLandscape = YES;
renderSize = CGSizeMake(renderSize.height, renderSize.width);
naturalSize = CGSizeMake(naturalSize.height, naturalSize.width);
}
//Orientation Check
CGAffineTransform firstTransform = assetTrack.preferredTransform;
BOOL PotraitVideo=NO;
if (firstTransform.a == 0 && firstTransform.b == 1.0 && firstTransform.c == -1.0 && firstTransform.d == 0) {
PotraitVideo=YES;
// NSLog(#"Potratit Video");
}
if (firstTransform.a == 0 && firstTransform.b == -1.0 && firstTransform.c == 1.0 && firstTransform.d == 0) {
PotraitVideo=YES;
// NSLog(#"Potratit Video");
}
//Orientation Check Finish
if(bIsVideoPortrait)
naturalSize = CGSizeMake(naturalSize.height, naturalSize.width);
scaleValue = 1;
translationPoint = CGPointMake(self.videoplayer.frame.origin.x, self.videoplayer.frame.origin.y);
CGFloat pointX = translationPoint.x * naturalSize.width / self.videoplayer.frame.size.width;
CGFloat pointY = translationPoint.y * naturalSize.height / self.videoplayer.frame.size.height;
pointY=0;
pointX=0;
CGAffineTransform new = CGAffineTransformConcat(videoTransform, CGAffineTransformMakeScale(scaleValue, scaleValue));
CGAffineTransform newer = CGAffineTransformConcat(new, CGAffineTransformMakeTranslation(pointX, pointY));
CGFloat rotateTranslateX = 0;
CGFloat rotateTranslateY = 0;
if(rotationValue - 0.0f > 0.01f && rotationValue - 180.f < 0.01)
rotateTranslateX = MIN((naturalSize.width * rotationValue) / 90.0f, naturalSize.width);
if(rotationValue - 90.0f > 0.01f && rotationValue < 360.0f)
rotateTranslateY = MIN((naturalSize.height * rotationValue) / 180.0f, naturalSize.height);
CGAffineTransform rotationT = CGAffineTransformConcat(newer, CGAffineTransformMakeRotation(DEGREES_TO_RADIANS(rotationValue)));
CGAffineTransform rotateTranslate = CGAffineTransformConcat(rotationT, CGAffineTransformMakeTranslation(rotateTranslateX, rotateTranslateY));
CGSize temp = CGSizeApplyAffineTransform(assetTrack.naturalSize, videoTransform);
CGSize size = CGSizeMake(fabsf(temp.width), fabsf(temp.height));
if(bLandscape)
{
size = CGSizeMake(size.height, size.width);
}
float s1 = renderSize.width/size.width;
float s2 = renderSize.height/size.height;
float s = MIN(s1, s2);
CGAffineTransform new2 = CGAffineTransformConcat(rotateTranslate, CGAffineTransformMakeScale(s,s));
float x = (renderSize.width - size.width*s)/2;
float y = (renderSize.height - size.height*s)/2;
newer2 = CGAffineTransformIdentity;
if(bLandscape)
newer2 = CGAffineTransformConcat(new2, CGAffineTransformMakeTranslation(x, y));
else
newer2 = CGAffineTransformConcat(new2, CGAffineTransformMakeTranslation(x, y));
//Store layerInstruction to an array "Objects"
[layerInstruction setTransform:newer2 atTime:LastTime];
[layerInstruction setOpacity:0.0 atTime: TotalTime];
[Objects addObject:layerInstruction];
vp.GetInstuction=YES;
vp.LayerInstruction=newer2;
vp.Portrait=PotraitVideo;
[VideoInfo replaceObjectAtIndex:i withObject:vp];
}
}
if(GetMusic)
{
OriginalAsset=mixComposition;
AudioTrack=nil;
AudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
[AudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, TotalTime)
ofTrack:[[MusicAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:nil];
}
//Apply all the instruction to the the Videocomposition "mainCompositionInst"
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, TotalTime);
mainInstruction.layerInstructions = Objects;
mainCompositionInst = [AVMutableVideoComposition videoComposition];
mainCompositionInst.instructions = [NSArray arrayWithObject:mainInstruction];
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
mainCompositionInst.renderSize = CGSizeMake(renderWidth, renderHeight);
[self PlayVideo];
}
Hi I am trying to record video with overlay.
I have written:
-(void)addOvelayViewToVideo:(NSURL *)videoURL
to add overlay view on recorded video but it is not working.
I written the code to record video in viewDidLoad using AVCaptureSession.
//In ViewDidLoad
//CONFIGURE DISPLAY OUTPUT
self.previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession];
[self.previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
self.previewLayer.frame = self.view.frame;
[self.view.layer addSublayer:self.previewLayer];
-(void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error
{
if(error.code != noErr)
{
id value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey];
if(value)
{
isSuccess = [value boolValue];
}
}
if(isSuccess)
{
ALAssetsLibrary *assetsLibrary = [[ALAssetsLibrary alloc] init];
if([assetsLibrary videoAtPathIsCompatibleWithSavedPhotosAlbum:outputFileURL])
{
[self addOverviewToVideo:outputFileURL];
}
else{
NSLog(#"could not saved to photos album.");
}
}
}
-(void)addOvelayViewToVideo:(NSURL *)videoURL
{
AVAsset *asset = [AVAsset assetWithURL:videoURL];
AVMutableComposition *composition = [[AVMutableComposition alloc] init];
AVMutableCompositionTrack *compositionTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:[[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionInstruction *compositionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
compositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
AVMutableVideoCompositionLayerInstruction *videoLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionTrack];
AVAssetTrack *assetTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation videoAssetOrientation_ = UIImageOrientationUp;
BOOL isVideoAssetPortrait_ = NO;
CGAffineTransform videoTransform = assetTrack.preferredTransform;
if (videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationRight;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationLeft;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) {
videoAssetOrientation_ = UIImageOrientationUp;
}
if (videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) {
videoAssetOrientation_ = UIImageOrientationDown;
}
[videoLayerInstruction setTransform:assetTrack.preferredTransform atTime:kCMTimeZero];
[videoLayerInstruction setOpacity:0.0 atTime:asset.duration];
compositionInstruction.layerInstructions = [NSArray arrayWithObject:videoLayerInstruction];
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
CGSize naturalSize = CGSizeMake(assetTrack.naturalSize.height, assetTrack.naturalSize.width);
float renderWidth, renderHeight;
renderWidth = naturalSize.width;
renderHeight = naturalSize.height;
videoComposition.renderSize = CGSizeMake(renderWidth, renderHeight);
videoComposition.instructions = [NSArray arrayWithObject:compositionInstruction];
videoComposition.frameDuration = CMTimeMake(1, 30);
CALayer *overlayLayer = [CALayer layer];
UIImage *overlayImage = [UIImage imageNamed:#"sampleHUD"];
[overlayLayer setContents:(id)[overlayImage CGImage]];
overlayLayer.frame = CGRectMake(0, 0, naturalSize.width, naturalSize.height);
[overlayLayer setMasksToBounds:YES];
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, naturalSize.width, naturalSize.height);
videoLayer.frame = CGRectMake(0, 0, naturalSize.width, naturalSize.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:overlayLayer];
videoComposition.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
NSLog(#"renderSize:%f,%f", videoComposition.renderSize.width, videoComposition.renderSize.height);
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetHighestQuality];
exportSession.outputURL = videoURL;
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
exportSession.shouldOptimizeForNetworkUse = YES;
exportSession.videoComposition = videoComposition;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
//save the video in photos album
});
}];
}
I am still unable to figure out what is going wrong here. Need guidance on this.
Can I add overlay while recording video?
Any help will be appreciated.