Moving overlay on video with touch iOS - ios

I have been working on video editing and I have done the following things successfully:
1.merge multiple videos.
2.Adjust the speed of video.
3.Clip video.
Now I need to show an image over video and then move that image over that video.I have two problems:
PROBLEM 1:
I have successfully done the overlay part i.e adding image over video.But how am I supposed to move this image?
PROBLEM 2:
Also I am not able to check the preview of the video with an image over it.I am able to check that if and only if I save that video At some path and then play it using that path.
Below is the code that I have used to accomplish overlay task:
composition_ = [[AVMutableComposition alloc]init];
AVMutableCompositionTrack *compositionTrack = [composition_ addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *assetTrack = [[self.asset tracksWithMediaType:AVMediaTypeVideo]firstObject];
[compositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, self.asset.duration) ofTrack:assetTrack atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionInstruction *videoCompositionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
videoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, self.asset.duration);
AVMutableVideoCompositionLayerInstruction *videoCompositionLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionTrack];
videoCompositionInstruction.layerInstructions = #[videoCompositionLayerInstruction];
[videoCompositionLayerInstruction setTransform:assetTrack.preferredTransform atTime:kCMTimeZero];
[videoCompositionLayerInstruction setOpacity:0.0 atTime:self.asset.duration];
CGSize naturalSize = CGSizeMake(assetTrack.naturalSize.height, assetTrack.naturalSize.width);
videoComposition_ = [AVMutableVideoComposition videoComposition];
videoComposition_.renderSize = CGSizeMake(naturalSize.width, naturalSize.height);
videoComposition_.instructions = #[videoCompositionInstruction];
videoComposition_.frameDuration = CMTimeMake(1, 30);
//start image code from here
CALayer *overlayLayer = [CALayer layer];
overlayLayer.contents = (__bridge id _Nullable)([[UIImage imageNamed:#"overlay_icon.png"]CGImage]);
overlayLayer.frame = CGRectMake(naturalSize.width/2 , naturalSize.height/2, naturalSize.width/8, naturalSize.height/8);
overlayLayer.opacity = 0.5;
[overlayLayer setMasksToBounds:YES];
overlayLayer.backgroundColor = [[UIColor redColor]CGColor];
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0 , 0, naturalSize.width, naturalSize.height);
videoLayer.frame = CGRectMake(0 , 0, naturalSize.width, naturalSize.height);
videoLayer.backgroundColor = [[UIColor redColor]CGColor];
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:overlayLayer];
videoComposition_.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];

-(void)prepareOverlayVideo{
composition_ = [[AVMutableComposition alloc]init];//correct
AVMutableCompositionTrack *compositionTrack = [composition_ addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];//correct
[compositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, self.asset.duration) ofTrack:
[[self.asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero error:nil];//correct
assetTrack = [[self.asset tracksWithMediaType:AVMediaTypeVideo]firstObject];//correct
AVMutableVideoCompositionInstruction *videoCompositionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];//correct
videoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero,self.asset.duration);//correct
AVMutableVideoCompositionLayerInstruction *videoCompositionLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:assetTrack];//correct
UIImageOrientation videoAssetOrientation_ = UIImageOrientationUp;//correct
BOOL isVideoAssetPortrait_ = NO;
CGAffineTransform videoTransform = assetTrack.preferredTransform;
if (videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 &&
videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationRight;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationLeft;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) {
videoAssetOrientation_ = UIImageOrientationUp;
}
if (videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) {
videoAssetOrientation_ = UIImageOrientationDown;
}
//[videoCompositionLayerInstruction setOpacityRampFromStartOpacity:1.f toEndOpacity:0.5f timeRange:CMTimeRangeMake(kCMTimeZero, assetTrack.timeRange.duration)];
// CGFloat FirstAssetScaleToFitRatio = 320.0/assetTrack.naturalSize.height;
// CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
// [videoCompositionLayerInstruction setTransform:CGAffineTransformConcat(assetTrack.preferredTransform, FirstAssetScaleFactor) atTime:kCMTimeZero];
[videoCompositionLayerInstruction setTransform:assetTrack.preferredTransform atTime:kCMTimeZero];
[videoCompositionLayerInstruction setOpacity:0.0 atTime:self.asset.duration];
videoCompositionInstruction.layerInstructions = [NSArray arrayWithObjects:videoCompositionLayerInstruction,nil];//correct
if(isVideoAssetPortrait_){
naturalSize = CGSizeMake(assetTrack.naturalSize.height, assetTrack.naturalSize.width);
}
else {
naturalSize = assetTrack.naturalSize;
}
videoComposition_ = [AVMutableVideoComposition videoComposition];
videoComposition_.renderSize = CGSizeMake(naturalSize.width, naturalSize.height);
videoComposition_.instructions = #[videoCompositionInstruction];
videoComposition_.frameDuration = CMTimeMake(1, 30);
}
AnimationTool Part
CGRect newRect = [self.view convertRect:overlayView.frame toView:videoRectView];
CGFloat current_x = newRect.origin.x;
CGFloat current_y;
if(isSizeChanged && overlayView.frame.size.width>60)
current_y = newRect.origin.y+50;
else
current_y = newRect.origin.y;
overlayView.frame = newRect;
imageView_.frame = newRect;
CGPoint overlayPoint = [self mapSize:assetTrack.naturalSize withSize:playerLayer.videoRect.size forPoint:CGPointMake(current_x, current_y)];
//invert Y-axis
CGFloat finalOverlayPoint_y = CGRectGetMaxY(CGRectMake(0, 0, naturalSize.width, naturalSize.height)) - overlayPoint.y-50;
CGPoint newOverlayPoint = CGPointMake(overlayPoint.x, finalOverlayPoint_y);
overlayView.layer.frame = CGRectMake(overlayPoint.x, newOverlayPoint.y,finalOverlayRect.size.width, finalOverlayRect.size.height);
imageView_.layer.frame = CGRectMake(overlayPoint.x, newOverlayPoint.y,finalOverlayRect.size.width, finalOverlayRect.size.height);
if(isShapeACircle)
imageView_.layer.cornerRadius = finalOverlayRect.size.height/2;
else
imageView_.layer.cornerRadius = 0.f;
imageView_.layer.borderWidth = 5.f;
overlayView.layer.contentsScale = [UIScreen mainScreen].scale;
[overlayView.layer setMasksToBounds:YES];
//add animation
CABasicAnimation *animation1 = [CABasicAnimation animationWithKeyPath:#"opacity"];
[animation1 setDuration:0.5 ];
[animation1 setFromValue:[NSNumber numberWithFloat:1.0]];
[animation1 setToValue:[NSNumber numberWithFloat:1.0]];
//[animation1 setBeginTime:[self.timeLabel.text floatValue]];
[animation1 setBeginTime:_overlayTime];
[animation1 setRemovedOnCompletion:YES];
[animation1 setFillMode:kCAFillModeForwards];
[imageView_.layer addAnimation:animation1 forKey:#"animateOpacity"];
imageView_.layer.opacity = 0.0;
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0 , 0, naturalSize.width, naturalSize.height);
parentLayer.backgroundColor = [[UIColor redColor]CGColor];
videoLayer.frame = CGRectMake(0 , 0, naturalSize.width, naturalSize.height);
//CALayer *watermarkLayer = [self setWaterMarkInLayer:videoLayer];
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:imageView_.layer];
//[parentLayer addSublayer:watermarkLayer];
videoComposition_.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
//Note this method.....This is the success part
-(CGPoint)mapSize:(CGSize)assetTrack_size withSize:(CGSize)videoLayer_size forPoint:(CGPoint)overlay_point{
NSLog(#"%f",playerLayer.videoRect.size.width);
NSLog(#"%f",playerLayer.videoRect.size.height);
NSLog(#"%f",playerLayer.videoRect.origin.x);
NSLog(#"%f",playerLayer.videoRect.origin.y);
CGFloat assetTrack_width = assetTrack_size.width;
CGFloat assetTrack_height = assetTrack_size.height;
CGFloat videoLayer_width = videoLayer_size.width;
CGFloat videoLayer_height = videoLayer_size.height;
CGFloat ratio_width = assetTrack_width/videoLayer_width;
CGFloat ratio_height = assetTrack_height/videoLayer_height;
CGFloat new_width = overlay_point.x * ratio_width;
CGFloat new_height = overlay_point.y * ratio_height;
finalOverlayRect = CGRectMake(0, 0,overlayView.frame.size.width * ratio_width , overlayView.frame.size.height * ratio_height) ;
CGPoint overlayPoint = CGPointMake(new_width, new_height);
return overlayPoint;
}
#pigeon_39 This is the code for overlay.I know its a bit late, but I was on a break for a while, and I had no recollection of this question whatsoever.If it helps you now, you can refer to this answer.If you face any problems, we can continue the conversation on chat here.Ask me wherever you get stuck but do some research on your own first.Note that this is not the complete code as I will have to do a lot of editing to provide you the actual code.My code involves a lot of conditions.All the best mate :-)

I searched everywhere on the net but I don't seem to find the solution.Even the apple docs about "Video Editing" have no mention about this.
Finally I came up with my own logic.Here is what I will do:
1.I will capture a video and move to next page with an image on it.
2.Add gesture to the image to move it anywhere over the video.
3.Then add the above code to merge that image over the video.
I thin this is the perfect solution.I will post the code in 2 days i.e. when I write it. But the logic is pretty straight forward.

Related

How to show different text over video based on time range using CATextLayer? [duplicate]

I have found a few examples that show how to add a text overlay on a video.
Ray's Tutorial - http://www.raywenderlich.com/30200/avfoundation-tutorial-adding-overlays-and-animations-to-videos
This SO answer - How can I add a watermark in a captured video on iOS
There were a few others that I referenced as well. My code looks almost identical to that answer and I've been trying to tweak it to get the text overlay to only show up for a second or two at the beginning of the video. Any help on how I could accomplish that?
Here is my code. This works as is to export the video with the overlay being shown for the entire duration.
if(vA) {
videoCompositionTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[videoCompositionTrack insertTimeRange:videoTimerange ofTrack:[[vA tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:&error];
[videoCompositionTrack setPreferredTransform:[[[vA tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] preferredTransform]];
if(error)
NSLog(#"%#", error);
}
if(aA) {
audioCompositionTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[audioCompositionTrack insertTimeRange:audioTimerange ofTrack:[[aA tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:&error];
if(error)
NSLog(#"%#", error);
}
CGSize videoSize = [videoCompositionTrack naturalSize];
UIImage *myImage = [UIImage imageNamed:#"cover.png"];
CALayer *aLayer = [CALayer layer];
aLayer.contents = (id)myImage.CGImage;
aLayer.frame = CGRectMake(5, 25, 100, 56);
aLayer.opacity = 0.7;
CATextLayer *titleLayer = [CATextLayer layer];
titleLayer.string = titleText;
titleLayer.fontSize = 18;
titleLayer.foregroundColor = titleColor.CGColor;
titleLayer.alignmentMode = kCAAlignmentCenter;
titleLayer.frame = CGRectMake(20, 10, videoSize.width - 40, 20);
[titleLayer displayIfNeeded];
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:aLayer];
if(titleText && [titleText length] > 0) {
[parentLayer addSublayer:titleLayer];
}
AVMutableVideoComposition *videoComp = [AVMutableVideoComposition videoComposition];
videoComp.renderSize = videoSize;
videoComp.frameDuration = CMTimeMake(1, 30);
videoComp.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
AVAssetTrack *videoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
[instruction setTimeRange:CMTimeRangeMake(kCMTimeZero, [mixComposition duration])];
instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
videoComp.instructions = [NSArray arrayWithObject:instruction];
AVAssetExportSession *_assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality];
_assetExport.videoComposition = videoComp;
_assetExport.outputFileType = AVFileTypeMPEG4;
_assetExport.outputURL = outputFileUrl;
[_assetExport exportAsynchronouslyWithCompletionHandler:^{
AVAssetExportSessionStatus status = [_assetExport status];
switch (status) {
case AVAssetExportSessionStatusFailed:
NSLog(#"Export Failed");
NSLog(#"Export Error: %#", [_assetExport.error localizedDescription]);
NSLog(#"Export Error Reason: %#", [_assetExport.error localizedFailureReason]);
break;
case AVAssetExportSessionStatusCompleted:
NSLog(#"Export Completed");
[self performSelectorOnMainThread:#selector(updateProgressIndicator:) withObject:[NSNumber numberWithFloat:2] waitUntilDone:YES];
break;
case AVAssetExportSessionStatusUnknown:
NSLog(#"Export Unknown");
break;
case AVAssetExportSessionStatusExporting:
NSLog(#"Export Exporting");
break;
case AVAssetExportSessionStatusWaiting:
NSLog(#"Export Waiting");
break;
}
}];
I figured out what I needed to do. It wasn't anything special really. It just required a better understanding of what was all possible.
Basically all I had to do was add a basic opacity animation to the layer with text in it.
// My original code for creating the text layer
CATextLayer *titleLayer = [CATextLayer layer];
.
.
.
[titleLayer displayIfNeeded];
// the code for the opacity animation which then removes the text
CABasicAnimation *animation = [CABasicAnimation animationWithKeyPath:#"opacity"];
[animation setDuration:0];
[animation setFromValue:[NSNumber numberWithFloat:1.0]];
[animation setToValue:[NSNumber numberWithFloat:0.0]];
[animation setBeginTime:1];
[animation setRemovedOnCompletion:NO];
[animation setFillMode:kCAFillModeForwards];
[titleLayer addAnimation:animation forKey:#"animateOpacity"];
We should add two opacity animations for Video overlay at particular time rang.
First animation for show overlay at start duration and second animation for hide overlay at end duration. We should take care to set fill mode of animation. Sample code of Video overlay for particular time Rang.
let textLayer = CATextLayer()
textLayer.opacity = 0.0
.
.
textLayer.frame = CGRect(x: 0.0, y: 0.0, width: 150.0, height:100.0)
textLayer.string = "Overlay Text"
let startVisible = CABasicAnimation.init(keyPath:"opacity")
startVisible.duration = 0.1 // for fade in duration
startVisible.repeatCount = 1
startVisible.fromValue = 0.0
startVisible.toValue = 1.0
startVisible.beginTime = overlay.startTime.seconds // overlay time range start duration
startVisible.isRemovedOnCompletion = false
startVisible.fillMode = kCAFillModeForwards
textLayer.add(startVisible, forKey: "startAnimation")
let endVisible = CABasicAnimation.init(keyPath:"opacity")
endVisible.duration = 0.1
endVisible.repeatCount = 1
endVisible.fromValue = 1.0
endVisible.toValue = 0.0
endVisible.beginTime = overlay.endTime.seconds
endVisible.fillMode = kCAFillModeForwards
endVisible.isRemovedOnCompletion = false
textLayer.add(endVisible, forKey: "endAnimation")

Add text to Video for specific time in iOS

I am creating a video based application where I have to select any video from local gallery and have to add a text using CATextLayer over the Video. For this I am using below code:
- (void) createWatermark:(UIImage*)image video:(NSURL*)videoURL
{
if (videoURL == nil)
return;
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:videoURL options:nil];
AVMutableComposition* mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack* compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack* clipVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:clipVideoTrack
atTime:kCMTimeZero error:nil];
[compositionVideoTrack setPreferredTransform:[[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] preferredTransform]];
// create the layer with the watermark image
CALayer* aLayer = [CALayer layer];
aLayer.contents = (id)image.CGImage;
aLayer.frame = CGRectMake(50, 100, image.size.width, image.size.height);
aLayer.opacity = 0.9;
//sorts the layer in proper order
AVAssetTrack* videoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
CGSize videoSize = [videoTrack naturalSize];
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:aLayer];
// create text Layer
CATextLayer* titleLayer = [CATextLayer layer];
titleLayer.backgroundColor = [UIColor clearColor].CGColor;
titleLayer.string = #"Dummy text";
titleLayer.font = CFBridgingRetain(#"Helvetica");
titleLayer.fontSize = 28;
titleLayer.shadowOpacity = 0.5;
titleLayer.alignmentMode = kCAAlignmentCenter;
titleLayer.frame = CGRectMake(0, 50, videoSize.width, videoSize.height / 6);
[parentLayer addSublayer:titleLayer];
//create the composition and add the instructions to insert the layer:
AVMutableVideoComposition* videoComp = [AVMutableVideoComposition videoComposition];
videoComp.renderSize = videoSize;
videoComp.frameDuration = CMTimeMake(1, 30);
videoComp.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
/// instruction
AVMutableVideoCompositionInstruction* instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]);
AVAssetTrack* mixVideoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:mixVideoTrack];
instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
videoComp.instructions = [NSArray arrayWithObject: instruction];
// export video
_assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality];
_assetExport.videoComposition = videoComp;
NSLog (#"created exporter. supportedFileTypes: %#", _assetExport.supportedFileTypes);
NSString* videoName = #"NewWatermarkedVideo.mov";
NSString* exportPath = [NSTemporaryDirectory() stringByAppendingPathComponent:videoName];
NSURL* exportUrl = [NSURL fileURLWithPath:exportPath];
if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath])
[[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil];
_assetExport.outputFileType = AVFileTypeQuickTimeMovie;
_assetExport.outputURL = exportUrl;
_assetExport.shouldOptimizeForNetworkUse = YES;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
//Final code here
switch (_assetExport.status)
{
case AVAssetExportSessionStatusUnknown:
NSLog(#"Unknown");
break;
case AVAssetExportSessionStatusWaiting:
NSLog(#"Waiting");
break;
case AVAssetExportSessionStatusExporting:
NSLog(#"Exporting");
break;
case AVAssetExportSessionStatusCompleted:
NSLog(#"Created new water mark image");
_playButton.hidden = NO;
break;
case AVAssetExportSessionStatusFailed:
NSLog(#"Failed- %#", _assetExport.error);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Cancelled");
break;
}
}
];
}
Using above code, I can able to set a text over my video. But now I have to give either FadeIN/Fade Out animation on CATextLayer OR to display moving text on Video. Please advise me. Thanks in Advance!
Try to use CAKeyframeAnimation class declared in core animation framework.
for example:
//start from zero opacity
titleLayer.opacity = 0;
CAKeyframeAnimation *fadeInAndOut = [CAKeyframeAnimation animationWithKeyPath:#"opacity"];
fadeInAndOut.duration = 5.0; //5 seconds will be your fade in fade out animation.
fadeInAndOut.autoreverses = NO;
// from 0.0 * animDuration time to 0.2 * animDuration time your animation will animate opacity from zero to 1.0.
// from 0.2 * animDuration time to 0.8 * animDuration time your opacity will be 1.0
// from 0.8 * animDuration time to 1.0 * animDuration time your animation will animate opacity from 1.0 opacity to zero.
fadeInAndOut.keyTimes = #[#(0.0), #(0.2), #(0.8), #(1.0)];
fadeInAndOut.values = #[#(0.0), #(1.0), #(1.0), #(0.0)];
fadeInAndOut.beginTime = 1.0;
fadeInAndOut.removedOnCompletion = NO;
fadeInAndOut.fillMode = kCAFillModeBoth;
[titleLayer addAnimation:fadeInAndOut forKey:nil];
I hope it's clear for you. using same way you can animate your label position.
UPDATE
for animate position try this
CAKeyframeAnimation *fadeInAndOut = [CAKeyframeAnimation animationWithKeyPath:#"position"];
fadeInAndOut.duration = 5.0;
fadeInAndOut.autoreverses = NO;
fadeInAndOut.keyTimes = #[#(0.0), #(0.2), #(0.8), #(1.0)];
fadeInAndOut.values = #[[NSValue valueWithCGPoint:CGPointMake(20, 40)],
[NSValue valueWithCGPoint:CGPointMake(200, 40)],
[NSValue valueWithCGPoint:CGPointMake(200, 40)],
[NSValue valueWithCGPoint:CGPointMake(400, 40)]];
fadeInAndOut.beginTime = 1.0;
fadeInAndOut.removedOnCompletion = NO;
fadeInAndOut.fillMode = kCAFillModeBoth;
[titleLayer addAnimation:fadeInAndOut forKey:nil];
I hardcoded points for explain purpose. you can calculate points and animation duration yourself.
First add parent layer:
let parentLayer = CALayer()
After that add overlayer for adding fade animation:
let overlayLayer = CALayer()
And call extension for adding fade animation:
overlayLayer.addFadeAnimationWithTime(showTime: startTime, endTime: endTime)
And add this to parent layer:
parentLayer.addSublayer(overlayLayer)
Extension for fade animation with startTime and endTime:
extension CALayer {
func addFadeAnimationWithTime(showTime:CGFloat, endTime:CGFloat) {
let fadeInAnimation = CABasicAnimation.init(keyPath: "opacity")
fadeInAnimation.duration = 1
fadeInAnimation.fromValue = NSNumber(value: 0)
fadeInAnimation.toValue = NSNumber(value: 1)
fadeInAnimation.isRemovedOnCompletion = false
fadeInAnimation.beginTime = CFTimeInterval(showTime)
fadeInAnimation.fillMode = CAMediaTimingFillMode.backwards
self.add(fadeInAnimation, forKey: "fadeIn")
if endTime > 0 {
let fadeOutAnimation = CABasicAnimation.init(keyPath: "opacity")
fadeOutAnimation.duration = 1
fadeOutAnimation.fromValue = NSNumber(value: 1)
fadeOutAnimation.toValue = NSNumber(value: 0)
fadeOutAnimation.isRemovedOnCompletion = false
fadeOutAnimation.beginTime = CFTimeInterval(endTime)
fadeOutAnimation.fillMode = CAMediaTimingFillMode.forwards
self.add(fadeOutAnimation, forKey: "fadeOut")
}
}
}

Saving video with overlay of GIF image

I am working on an application in which I record a video. When recording finished I put a GIF image on it with use of Library.
My code for playing video and putting gif image as an overlay
self.avPlayer = [AVPlayer playerWithURL:self.urlstring];
self.avPlayer.actionAtItemEnd = AVPlayerActionAtItemEndNone;
AVPlayerLayer *videoLayer = [AVPlayerLayer playerLayerWithPlayer:self.avPlayer];
videoLayer.frame = self.preview_view.bounds;
videoLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[self.preview_view.layer addSublayer:videoLayer];
NSURL *url = [[NSBundle mainBundle] URLForResource:#"02" withExtension:#"gif"];
self.img_gif.image = [UIImage animatedImageWithAnimatedGIFData:[NSData dataWithContentsOfURL:url]];
But now I want to merge and save video with overlay of this GIF image. I google it didn't find what I want.
Thank you for your help
This is the best answer to merge video with GIF image.
- (void)mixVideoAsset:(AVAsset *)videoAsset {
NSDate * begin = [NSDate date];
// 2 - Create AVMutableComposition object. This object will hold your AVMutableCompositionTrack instances.
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
// 3 - Video track
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
[videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
// - Audio
AVMutableCompositionTrack *audioCompositionTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *audioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[audioCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioTrack.timeRange.duration) ofTrack:audioTrack atTime:kCMTimeZero error:nil];
// 3.1 - Create AVMutableVideoCompositionInstruction
AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
// 3.2 - Create an AVMutableVideoCompositionLayerInstruction for the video track and fix the orientation.
AVMutableVideoCompositionLayerInstruction *videolayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
AVAssetTrack *videoAssetTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation videoAssetOrientation_ = UIImageOrientationUp;
BOOL isVideoAssetPortrait_ = NO;
CGAffineTransform videoTransform = videoAssetTrack.preferredTransform;
if (videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationRight;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationLeft;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) {
videoAssetOrientation_ = UIImageOrientationUp;
}
if (videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) {
videoAssetOrientation_ = UIImageOrientationDown;
}
[videolayerInstruction setTransform:videoAssetTrack.preferredTransform atTime:kCMTimeZero];
[videolayerInstruction setOpacity:0.0 atTime:videoAsset.duration];
// 3.3 - Add instructions
mainInstruction.layerInstructions = [NSArray arrayWithObjects:videolayerInstruction,nil];
AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
CGSize naturalSize;
if(isVideoAssetPortrait_){
naturalSize = CGSizeMake(videoAssetTrack.naturalSize.height, videoAssetTrack.naturalSize.width);
} else {
naturalSize = videoAssetTrack.naturalSize;
}
float renderWidth, renderHeight;
renderWidth = naturalSize.width;
renderHeight = naturalSize.height;
mainCompositionInst.renderSize = CGSizeMake(renderWidth, renderHeight);
mainCompositionInst.instructions = [NSArray arrayWithObject:mainInstruction];
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
// Watermark Layers
[self applyVideoEffectsToComposition:mainCompositionInst size:naturalSize];
// 4 - Get path
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:
[NSString stringWithFormat:#"FinalVideo-%d.mov",arc4random() % 1000]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
// NSURL * url = TempVideoURL();
// 5 - Create exporter
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeMPEG4;
exporter.shouldOptimizeForNetworkUse = YES;
exporter.videoComposition = mainCompositionInst;
[exporter exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
NSDate * endDate = [NSDate date];
NSTimeInterval interval = [endDate timeIntervalSinceDate:begin];
NSLog(#"completed %f senconds",interval);
ALAssetsLibrary *assetsLibrary = [[ALAssetsLibrary alloc] init];
if ([assetsLibrary videoAtPathIsCompatibleWithSavedPhotosAlbum:exporter.outputURL]) {
[assetsLibrary writeVideoAtPathToSavedPhotosAlbum:exporter.outputURL completionBlock:NULL];
}
});
}];
}
- (void)applyVideoEffectsToComposition:(AVMutableVideoComposition *)composition size:(CGSize)size
{
// - set up the parent layer
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, size.width, size.height);
videoLayer.frame = CGRectMake(0, 0, size.width, size.height);
[parentLayer addSublayer:videoLayer];
size.width = 100;
size.height = 100;
// - set up the overlay
CALayer *overlayLayer = [CALayer layer];
overlayLayer.frame = CGRectMake(0, 100, size.width, size.height);
NSURL *fileUrl = [[NSBundle mainBundle] URLForResource:#"jiafei" withExtension:#"gif"];
[self startGifAnimationWithURL:fileUrl inLayer:overlayLayer];
// UIImage * image = [UIImage imageNamed:#"gifImage.gif"];
// [overlayLayer setContents:(id)[image CGImage]];
// [overlayLayer setMasksToBounds:YES];
[parentLayer addSublayer:overlayLayer];
// - apply magic
composition.animationTool = [AVVideoCompositionCoreAnimationTool
videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
}
- (void)startGifAnimationWithURL:(NSURL *)url inLayer:(CALayer *)layer {
CAKeyframeAnimation * animation = [self animationForGifWithURL:url];
[layer addAnimation:animation forKey:#"contents"];
}
- (CAKeyframeAnimation *)animationForGifWithURL:(NSURL *)url {
CAKeyframeAnimation *animation = [CAKeyframeAnimation animationWithKeyPath:#"contents"];
NSMutableArray * frames = [NSMutableArray new];
NSMutableArray *delayTimes = [NSMutableArray new];
CGFloat totalTime = 0.0;
CGFloat gifWidth;
CGFloat gifHeight;
CGImageSourceRef gifSource = CGImageSourceCreateWithURL((CFURLRef)url, NULL);
// get frame count
size_t frameCount = CGImageSourceGetCount(gifSource);
for (size_t i = 0; i < frameCount; ++i) {
// get each frame
CGImageRef frame = CGImageSourceCreateImageAtIndex(gifSource, i, NULL);
[frames addObject:(__bridge id)frame];
CGImageRelease(frame);
// get gif info with each frame
NSDictionary *dict = (NSDictionary*)CFBridgingRelease(CGImageSourceCopyPropertiesAtIndex(gifSource, i, NULL));
NSLog(#"kCGImagePropertyGIFDictionary %#", [dict valueForKey:(NSString*)kCGImagePropertyGIFDictionary]);
// get gif size
gifWidth = [[dict valueForKey:(NSString*)kCGImagePropertyPixelWidth] floatValue];
gifHeight = [[dict valueForKey:(NSString*)kCGImagePropertyPixelHeight] floatValue];
// kCGImagePropertyGIFDictionary中kCGImagePropertyGIFDelayTime,kCGImagePropertyGIFUnclampedDelayTime值是一样的
NSDictionary *gifDict = [dict valueForKey:(NSString*)kCGImagePropertyGIFDictionary];
[delayTimes addObject:[gifDict valueForKey:(NSString*)kCGImagePropertyGIFDelayTime]];
totalTime = totalTime + [[gifDict valueForKey:(NSString*)kCGImagePropertyGIFDelayTime] floatValue];
CFRelease((__bridge CFTypeRef)(dict));
}
if (gifSource) {
CFRelease(gifSource);
}
NSMutableArray *times = [NSMutableArray arrayWithCapacity:3];
CGFloat currentTime = 0;
NSInteger count = delayTimes.count;
for (int i = 0; i < count; ++i) {
[times addObject:[NSNumber numberWithFloat:(currentTime / totalTime)]];
currentTime += [[delayTimes objectAtIndex:i] floatValue];
}
NSMutableArray *images = [NSMutableArray arrayWithCapacity:3];
for (int i = 0; i < count; ++i) {
[images addObject:[frames objectAtIndex:i]];
}
animation.keyTimes = times;
animation.values = images;
animation.timingFunction = [CAMediaTimingFunction functionWithName:kCAMediaTimingFunctionLinear];
animation.duration = totalTime;
animation.repeatCount = HUGE_VALF;
animation.beginTime = AVCoreAnimationBeginTimeAtZero;
animation.removedOnCompletion = NO;
return animation;
}
Here is the swift version of #Jitendra Modi's answer and it worked like a charm.
Swift 5.2:
func animationForGif(with url: URL) -> CAKeyframeAnimation? {
let animation = CAKeyframeAnimation(keyPath: #keyPath(CALayer.contents))
var frames: [CGImage] = []
var delayTimes: [CGFloat] = []
var totalTime: CGFloat = 0.0
// var gifWidth: CGFloat, gifHeight: CGFloat
guard let gifSource = CGImageSourceCreateWithURL(url as CFURL, nil) else {
print("Can not get image source from the gif: \(url)")
return nil
}
// get frame
let frameCount = CGImageSourceGetCount(gifSource)
for i in 0..<frameCount {
guard let frame = CGImageSourceCreateImageAtIndex(gifSource, i, nil) else {
continue
}
guard let dic = CGImageSourceCopyPropertiesAtIndex(gifSource, i, nil) as? [AnyHashable: Any] else { continue }
// gifWidth = dic[kCGImagePropertyPixelWidth] as? CGFloat ?? 0
// gifHeight = dic[kCGImagePropertyPixelHeight] as? CGFloat ?? 0
guard let gifDic: [AnyHashable: Any] = dic[kCGImagePropertyGIFDictionary] as? [AnyHashable: Any] else { continue }
let delayTime = gifDic[kCGImagePropertyGIFDelayTime] as? CGFloat ?? 0
frames.append(frame)
delayTimes.append(delayTime)
totalTime += delayTime
}
if frames.count == 0 {
return nil
}
assert(frames.count == delayTimes.count)
var times: [NSNumber] = []
var currentTime: CGFloat = 0
for i in 0..<delayTimes.count {
times.append(NSNumber(value: Double(currentTime / totalTime)))
currentTime += delayTimes[i]
}
animation.keyTimes = times
animation.values = frames
animation.timingFunction = CAMediaTimingFunction(name: CAMediaTimingFunctionName.linear)
animation.duration = Double(totalTime)
animation.repeatCount = .greatestFiniteMagnitude
animation.beginTime = AVCoreAnimationBeginTimeAtZero
animation.isRemovedOnCompletion = false
return animation
}
And you can use this animation:
let gifLayer = CALayer()
gifLayer.frame = CGRect(x: 0, y: 0, width: 300, height: 300)
if let animation = animationForGif(with: gifUrl) {
gifLayer.add(animation, forKey: "contents")
}
parentLayer.addSublayer(gifLayer)
You can try any of the below code for Screen Recording. It will merger your video and GIF.
You can download sample from the link below provided by Apple. https://developer.apple.com/library/mac/samplecode/AVScreenShack/Introduction/Intro.html
https://github.com/alskipp/ASScreenRecorder
http://codethink.no-ip.org/wordpress/archives/673
Hope this help you..

Add GIF watermark on a video in iOS

I need to accomplish this function: There is a GIF overlay on a video, hoping to composition this video and GIF to a new video. I'm using the following code, but result is only the video without GIF:
- (void)mixVideoAsset:(AVAsset *)videoAsset {
LLog(#"Begining");
NSDate * begin = [NSDate date];
// 2 - Create AVMutableComposition object. This object will hold your AVMutableCompositionTrack instances.
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
// 3 - Video track
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
[videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
// - Audio
AVMutableCompositionTrack *audioCompositionTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *audioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[audioCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioTrack.timeRange.duration) ofTrack:audioTrack atTime:kCMTimeZero error:nil];
// 3.1 - Create AVMutableVideoCompositionInstruction
AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
// 3.2 - Create an AVMutableVideoCompositionLayerInstruction for the video track and fix the orientation.
AVMutableVideoCompositionLayerInstruction *videolayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
AVAssetTrack *videoAssetTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation videoAssetOrientation_ = UIImageOrientationUp;
BOOL isVideoAssetPortrait_ = NO;
CGAffineTransform videoTransform = videoAssetTrack.preferredTransform;
if (videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationRight;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationLeft;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) {
videoAssetOrientation_ = UIImageOrientationUp;
}
if (videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) {
videoAssetOrientation_ = UIImageOrientationDown;
}
[videolayerInstruction setTransform:videoAssetTrack.preferredTransform atTime:kCMTimeZero];
[videolayerInstruction setOpacity:0.0 atTime:videoAsset.duration];
// 3.3 - Add instructions
mainInstruction.layerInstructions = [NSArray arrayWithObjects:videolayerInstruction,nil];
AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
CGSize naturalSize;
if(isVideoAssetPortrait_){
naturalSize = CGSizeMake(videoAssetTrack.naturalSize.height, videoAssetTrack.naturalSize.width);
} else {
naturalSize = videoAssetTrack.naturalSize;
}
float renderWidth, renderHeight;
renderWidth = naturalSize.width;
renderHeight = naturalSize.height;
mainCompositionInst.renderSize = CGSizeMake(renderWidth, renderHeight);
mainCompositionInst.instructions = [NSArray arrayWithObject:mainInstruction];
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
// Watermark Layers
[self applyVideoEffectsToComposition:mainCompositionInst size:naturalSize];
// 4 - Get path
// NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
// NSString *documentsDirectory = [paths objectAtIndex:0];
// NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:
// [NSString stringWithFormat:#"FinalVideo-%d.mov",arc4random() % 1000]];
// NSURL *url = [NSURL fileURLWithPath:myPathDocs];
NSURL * url = TempVideoURL();
// 5 - Create exporter
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeMPEG4;
exporter.shouldOptimizeForNetworkUse = YES;
exporter.videoComposition = mainCompositionInst;
[exporter exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
NSDate * endDate = [NSDate date];
NSTimeInterval interval = [endDate timeIntervalSinceDate:begin];
LLog(#"completed %f senconds",interval);
ALAssetsLibrary *assetsLibrary = [[ALAssetsLibrary alloc] init];
if ([assetsLibrary videoAtPathIsCompatibleWithSavedPhotosAlbum:exporter.outputURL]) {
[assetsLibrary writeVideoAtPathToSavedPhotosAlbum:exporter.outputURL completionBlock:NULL];
}
});
}];
}
Add Gif Watermark
- (void)applyVideoEffectsToComposition:(AVMutableVideoComposition *)composition size:(CGSize)size
{
// - set up the parent layer
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, size.width, size.height);
videoLayer.frame = CGRectMake(0, 0, size.width, size.height);
[parentLayer addSublayer:videoLayer];
size.width = 100;
size.height = 100;
// - set up the overlay
CALayer *overlayLayer = [CALayer layer];
overlayLayer.frame = CGRectMake(0, 100, size.width, size.height);
NSURL *fileUrl = [[NSBundle mainBundle] URLForResource:#"jiafei" withExtension:#"gif"];
[BBGifManager startGifAnimationWithURL:fileUrl inLayer:overlayLayer];
// UIImage * image = [UIImage imageNamed:#"gifImage.gif"];
// [overlayLayer setContents:(id)[image CGImage]];
// [overlayLayer setMasksToBounds:YES];
[parentLayer addSublayer:overlayLayer];
// - apply magic
composition.animationTool = [AVVideoCompositionCoreAnimationTool
videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
}
Add CALayer Animations
+ (void)startGifAnimationWithURL:(NSURL *)url inLayer:(CALayer *)layer {
CAKeyframeAnimation * animation = [self animationForGifWithURL:url];
[layer addAnimation:animation forKey:#"contents"];
}
Create CAKeyFrameAnimation
+ (CAKeyframeAnimation *)animationForGifWithURL:(NSURL *)url {
CAKeyframeAnimation *animation = [CAKeyframeAnimation animationWithKeyPath:#"contents"];
NSMutableArray * frames = [NSMutableArray new];
NSMutableArray *delayTimes = [NSMutableArray new];
CGFloat totalTime = 0.0;
CGFloat gifWidth;
CGFloat gifHeight;
CGImageSourceRef gifSource = CGImageSourceCreateWithURL((CFURLRef)url, NULL);
// get frame count
size_t frameCount = CGImageSourceGetCount(gifSource);
for (size_t i = 0; i < frameCount; ++i) {
// get each frame
CGImageRef frame = CGImageSourceCreateImageAtIndex(gifSource, i, NULL);
[frames addObject:(__bridge id)frame];
CGImageRelease(frame);
// get gif info with each frame
NSDictionary *dict = (NSDictionary*)CFBridgingRelease(CGImageSourceCopyPropertiesAtIndex(gifSource, i, NULL));
NSLog(#"kCGImagePropertyGIFDictionary %#", [dict valueForKey:(NSString*)kCGImagePropertyGIFDictionary]);
// get gif size
gifWidth = [[dict valueForKey:(NSString*)kCGImagePropertyPixelWidth] floatValue];
gifHeight = [[dict valueForKey:(NSString*)kCGImagePropertyPixelHeight] floatValue];
// kCGImagePropertyGIFDictionary中kCGImagePropertyGIFDelayTime,kCGImagePropertyGIFUnclampedDelayTime值是一样的
NSDictionary *gifDict = [dict valueForKey:(NSString*)kCGImagePropertyGIFDictionary];
[delayTimes addObject:[gifDict valueForKey:(NSString*)kCGImagePropertyGIFDelayTime]];
totalTime = totalTime + [[gifDict valueForKey:(NSString*)kCGImagePropertyGIFDelayTime] floatValue];
CFRelease((__bridge CFTypeRef)(dict));
}
if (gifSource) {
CFRelease(gifSource);
}
NSMutableArray *times = [NSMutableArray arrayWithCapacity:3];
CGFloat currentTime = 0;
NSInteger count = delayTimes.count;
for (int i = 0; i < count; ++i) {
[times addObject:[NSNumber numberWithFloat:(currentTime / totalTime)]];
currentTime += [[delayTimes objectAtIndex:i] floatValue];
}
NSMutableArray *images = [NSMutableArray arrayWithCapacity:3];
for (int i = 0; i < count; ++i) {
[images addObject:[frames objectAtIndex:i]];
}
animation.keyTimes = times;
animation.values = images;
animation.timingFunction = [CAMediaTimingFunction functionWithName:kCAMediaTimingFunctionLinear];
animation.duration = totalTime;
animation.repeatCount = HUGE_VALF;
return animation;
}
You should adjust your animation settings for CoreAnimation:
animation.beginTime = AVCoreAnimationBeginTimeAtZero;
animation.removedOnCompletion = NO;
Just in case here's the example on swift3 how to do the same - insert animated frames/images into the video (not exactly the gif but array of Images). It uses AVAssetExportSession and AVMutableVideoComposition together with AVMutableVideoCompositionInstruction, and CAKeyframeAnimation to animate the frames.

How to record video with overlay view

Hi I am trying to record video with overlay.
I have written:
-(void)addOvelayViewToVideo:(NSURL *)videoURL
to add overlay view on recorded video but it is not working.
I written the code to record video in viewDidLoad using AVCaptureSession.
//In ViewDidLoad
//CONFIGURE DISPLAY OUTPUT
self.previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession];
[self.previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
self.previewLayer.frame = self.view.frame;
[self.view.layer addSublayer:self.previewLayer];
-(void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error
{
if(error.code != noErr)
{
id value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey];
if(value)
{
isSuccess = [value boolValue];
}
}
if(isSuccess)
{
ALAssetsLibrary *assetsLibrary = [[ALAssetsLibrary alloc] init];
if([assetsLibrary videoAtPathIsCompatibleWithSavedPhotosAlbum:outputFileURL])
{
[self addOverviewToVideo:outputFileURL];
}
else{
NSLog(#"could not saved to photos album.");
}
}
}
-(void)addOvelayViewToVideo:(NSURL *)videoURL
{
AVAsset *asset = [AVAsset assetWithURL:videoURL];
AVMutableComposition *composition = [[AVMutableComposition alloc] init];
AVMutableCompositionTrack *compositionTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:[[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionInstruction *compositionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
compositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
AVMutableVideoCompositionLayerInstruction *videoLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionTrack];
AVAssetTrack *assetTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation videoAssetOrientation_ = UIImageOrientationUp;
BOOL isVideoAssetPortrait_ = NO;
CGAffineTransform videoTransform = assetTrack.preferredTransform;
if (videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationRight;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationLeft;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) {
videoAssetOrientation_ = UIImageOrientationUp;
}
if (videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) {
videoAssetOrientation_ = UIImageOrientationDown;
}
[videoLayerInstruction setTransform:assetTrack.preferredTransform atTime:kCMTimeZero];
[videoLayerInstruction setOpacity:0.0 atTime:asset.duration];
compositionInstruction.layerInstructions = [NSArray arrayWithObject:videoLayerInstruction];
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
CGSize naturalSize = CGSizeMake(assetTrack.naturalSize.height, assetTrack.naturalSize.width);
float renderWidth, renderHeight;
renderWidth = naturalSize.width;
renderHeight = naturalSize.height;
videoComposition.renderSize = CGSizeMake(renderWidth, renderHeight);
videoComposition.instructions = [NSArray arrayWithObject:compositionInstruction];
videoComposition.frameDuration = CMTimeMake(1, 30);
CALayer *overlayLayer = [CALayer layer];
UIImage *overlayImage = [UIImage imageNamed:#"sampleHUD"];
[overlayLayer setContents:(id)[overlayImage CGImage]];
overlayLayer.frame = CGRectMake(0, 0, naturalSize.width, naturalSize.height);
[overlayLayer setMasksToBounds:YES];
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, naturalSize.width, naturalSize.height);
videoLayer.frame = CGRectMake(0, 0, naturalSize.width, naturalSize.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:overlayLayer];
videoComposition.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
NSLog(#"renderSize:%f,%f", videoComposition.renderSize.width, videoComposition.renderSize.height);
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetHighestQuality];
exportSession.outputURL = videoURL;
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
exportSession.shouldOptimizeForNetworkUse = YES;
exportSession.videoComposition = videoComposition;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
//save the video in photos album
});
}];
}
I am still unable to figure out what is going wrong here. Need guidance on this.
Can I add overlay while recording video?
Any help will be appreciated.

Resources