CGAffineTransform the AVAsset to fit device screen - ios

I have an AVAsset with AVAssetTrack, which have a size (for example - (width = 1920, height = 1080)). What I need - fit this asset into given screen size (for example (width = 320, height = 568)), if asset in landscape - rotate it (90 degrees), if square - add black stripes on top and bottom. I tried this:
- (void)changeAsset:(AVAsset*)asset savetoURL:(NSURL*)toURL withSize:(CGSize)toSize offsetRatioPoint:(CGPoint*)offsetRatioPoint completion:(void (^)(NSURL* in_url, NSError* error))handler
{
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject];
if (!videoTrack)
{
if (handler)
handler(nil, [NSError errorWithDomain:#"com.myapp.error" code:-1 userInfo:#{#"message" : #"there are no video tracks in asset"}]);
return;
}
CGFloat newHeight = [videoTrack naturalSize].height/3*4;
CGFloat newWidth = [videoTrack naturalSize].width/3*4;
const CGFloat videoAspectRatio = newWidth / newHeight;
const CGFloat toSizeAspectRatio = toSize.width / toSize.height;
CGFloat scale = 1.f;
if (videoAspectRatio > toSizeAspectRatio)
{
scale = toSize.height / newHeight;
}
else
{
scale = toSize.width /newWidth;
}
CGAffineTransform scaleTrans = CGAffineTransformMakeScale(scale, scale);
CGAffineTransform translateTrans = CGAffineTransformIdentity;
if (videoAspectRatio > toSizeAspectRatio)
{
if (offsetRatioPoint)
{
const CGFloat dx = offsetRatioPoint->x * newWidth * scale;
translateTrans = CGAffineTransformMakeTranslation(-dx, 0.f);
}
else
{
const CGFloat dx = 0.5f * (newWidth * scale - toSize.width);
translateTrans = CGAffineTransformMakeTranslation(-dx, 0.f);
}
}
else
{
if (offsetRatioPoint)
{
const CGFloat dy = offsetRatioPoint->y * newHeight * scale;
translateTrans = CGAffineTransformMakeTranslation(0.f, -dy);
}
else
{
const CGFloat dy = 0.5f * (newHeight * scale - toSize.height);
translateTrans = CGAffineTransformMakeTranslation(0.f, -dy);
}
}
CGAffineTransform t1 = CGAffineTransformTranslate(translateTrans, toSize.height, -scale*toSize.width);
// Rotate transformation
CGAffineTransform t2 = CGAffineTransformRotate(t1, M_PI_2);
CGAffineTransform finalTrans = CGAffineTransformConcat(scaleTrans, t2);
AVMutableVideoComposition *videoComposition = [[AVMutableVideoComposition alloc] init];
videoComposition.renderSize = toSize;
int32_t frameRate = 30;
videoComposition.frameDuration = CMTimeMake(1, frameRate);
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
[layerInstruction setTransform:finalTrans atTime:kCMTimeZero];
instruction.layerInstructions = #[layerInstruction];
videoComposition.instructions = #[instruction];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPreset640x480];
exporter.videoComposition = videoComposition;
exporter.shouldOptimizeForNetworkUse = YES;
NSString *exportPath = [NSTemporaryDirectory() stringByAppendingPathComponent:kCroppedFileName];
if (toURL)
exportPath = toURL.path;
if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath] == YES)
[[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil];
__block NSURL* outURL = [NSURL fileURLWithPath: exportPath];
exporter.outputURL = outURL;
exporter.outputFileType = AVFileTypeMPEG4;
exporter.timeRange = instruction.timeRange;
NSLog(#"%#", exportPath);
[exporter exportAsynchronouslyWithCompletionHandler:^(void) {
if (!toURL)
{
if ([[NSFileManager defaultManager] fileExistsAtPath:[GlobalConst fullMoviePath]] == YES)
[[NSFileManager defaultManager] removeItemAtPath:[GlobalConst fullMoviePath] error:nil];
NSError *error;
if (![[NSFileManager defaultManager] moveItemAtPath: exportPath toPath:[GlobalConst fullMoviePath] error:&error]) {
NSLog(#"Error %#", error);
}
outURL = [NSURL fileURLWithPath: [GlobalConst fullMoviePath] ];
}
NSLog(#"%#", outURL);
handler(outURL, nil);
}];
}
It's almost do first step - rotate landscape asset, but the result asset is little bit zoomed. Thanks in advance for all advices.

i solve this prblm usrin this code. in this code i rotate landscape to video Portrait . make it square also add image as watermark.
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0), ^(){
// input clip
AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
int videoDimention;
// make it square
UIInterfaceOrientation orientation = [self orientationForTrack:asset];
BOOL isPortrait = (orientation == UIInterfaceOrientationPortrait || orientation == UIInterfaceOrientationPortraitUpsideDown) ? YES: NO;
CGSize videoSize;
NSUserDefaults *userDefault=[NSUserDefaults standardUserDefaults];
if(isPortrait) {
//videoSize = CGSizeMake(complimentSize*.7,clipVideoTrack.naturalSize.height );
videoSize = CGSizeMake(clipVideoTrack.naturalSize.height,clipVideoTrack.naturalSize.width*.7 );
[userDefault setDouble:videoSize.width forKey:VIDEO_WIDTH_OUTPUT];
[userDefault setDouble:videoSize.height forKey:VIDEO_HEIGHT_OUTPUT];
videoDimention=0;// for Portrait
} else {
videoSize = CGSizeMake(clipVideoTrack.naturalSize.width, clipVideoTrack.naturalSize.height);
videoDimention=1;// for Landscape
[userDefault setDouble:videoSize.width forKey:VIDEO_WIDTH_OUTPUT];
[userDefault setDouble:videoSize.height forKey:VIDEO_HEIGHT_OUTPUT];
}
AVMutableVideoComposition* videoComposition = [AVMutableVideoComposition videoComposition];
if([[NSUserDefaults standardUserDefaults] integerForKey:SIZE]==0){
videoComposition.renderSize = CGSizeMake(clipVideoTrack.naturalSize.height , clipVideoTrack.naturalSize.height);
videoDimention=2; // for squre
double height=clipVideoTrack.naturalSize.height;
[userDefault setDouble:height forKey:VIDEO_WIDTH_OUTPUT];
[userDefault setDouble:height forKey:VIDEO_HEIGHT_OUTPUT];
}
else{
videoComposition.renderSize =videoSize;
}
// videoComposition.renderScale=.5;
if([[NSUserDefaults standardUserDefaults] integerForKey:FPS]==0){
videoComposition.frameDuration = CMTimeMake(1, 15);
}
else if ([[NSUserDefaults standardUserDefaults] integerForKey:FPS]==1){
videoComposition.frameDuration = CMTimeMake(1, 20);
}
else if ([[NSUserDefaults standardUserDefaults] integerForKey:FPS]==2){
videoComposition.frameDuration = CMTimeMake(1, 25);
}
else{
videoComposition.frameDuration = CMTimeMake(1, 30);
}
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [asset duration] );;
AVMutableVideoCompositionLayerInstruction* transformer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:clipVideoTrack];
// rotate to portrait
if([self orientationForTrack:asset]==UIInterfaceOrientationPortrait){
CGAffineTransform t1 = CGAffineTransformMakeTranslation(clipVideoTrack.naturalSize.height, -(clipVideoTrack.naturalSize.width - clipVideoTrack.naturalSize.height) /2 );
CGAffineTransform t2 = CGAffineTransformRotate(t1, M_PI_2);
CGAffineTransform finalTransform = t2;
[transformer setTransform:finalTransform atTime:kCMTimeZero];
}
//for water mark
CGSize sizeOfVideo=[asset naturalSize];
//Image of watermark
UIImage *myImage=[UIImage imageNamed:#"watermark"];
CALayer *layerCa = [CALayer layer];
layerCa.contents = (id)myImage.CGImage;
if([[NSUserDefaults standardUserDefaults] integerForKey:SIZE]==0){
layerCa.frame = CGRectMake(videoSize.height-(videoSize.height/6), 0, videoSize.height/6, (videoSize.height/6)/4);
}
else{
layerCa.frame = CGRectMake(videoSize.width-(videoSize.width/6), 0, videoSize.width/6, (videoSize.width/6)/4);
}
// layerCa.frame = CGRectMake(videoSize.width-200, 0, 200, 60);
layerCa.opacity = 1.0;
CALayer *parentLayer=[CALayer layer];
CALayer *videoLayer=[CALayer layer];
parentLayer.frame=CGRectMake(0, 0, videoSize.width, videoSize.height);
videoLayer.frame=CGRectMake(0, 0, videoSize.width, videoSize.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:layerCa];
instruction.layerInstructions = [NSArray arrayWithObject:transformer];
videoComposition.instructions = [NSArray arrayWithObject: instruction];
if([[NSUserDefaults standardUserDefaults] boolForKey:UP_PID]==NO){
videoComposition.animationTool=[AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
}
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetHighestQuality];
// AVMutableComposition *composition = [AVMutableComposition composition];
// [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
// e.g .mov type
exportSession.outputURL = fileURL;
exportSession.videoComposition = videoComposition;
// [exportSession addObserver: forKeyPath:#"progress" options:NSKeyValueObservingOptionNew context:NULL];
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
AVAssetExportSessionStatus status = [exportSession status];
[exportSession exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
VideoEditVC *controller=[[VideoEditVC alloc] init];
controller.isFirst=YES;
controller.videoSize=videoDimention;
[self.navigationController pushViewController:controller animated:YES];
self.delegate=controller;
});
}];
});
you also need to implement this one :
- (UIInterfaceOrientation)orientationForTrack:(AVAsset *)asset
{
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
CGSize size = [videoTrack naturalSize];
CGAffineTransform txf = [videoTrack preferredTransform];
if (size.width == txf.tx && size.height == txf.ty)
return UIInterfaceOrientationLandscapeRight;
else if (txf.tx == 0 && txf.ty == 0)
return UIInterfaceOrientationLandscapeLeft;
else if (txf.tx == 0 && txf.ty == size.width)
return UIInterfaceOrientationPortraitUpsideDown;
else
return UIInterfaceOrientationPortrait;
}

Related

Video Orientation Issue

I am actually working with the AVFoundation framework.
I am trying to play two videos simultaneously, one with filter and other the normal video.
I have done the whole task. I'm just stuck on setting the orientation at the end while saving entire video in gallery.
I have already tried a lot but could not get any thing fruitful.
Please help me in setting the orientations.
//Save action
#IBAction func saveAction(_ sender: UIButton) {
HelperClass.shared().applyFilter(self.firstAsset, andSecondAsset:self.secondAsset, onviewController:self, andcompos:self.composition, completion: { (value,error,url) in
if(url != nil){
print("url",url!)
}
})
MBProgressHUD.showAdded(to:self.view, animated: true)
}
#pragma mark - Overlay task is happening here,filtered video that I am getting from applyfilter method is overlayed with foregroundvideo
-(void)saveVideosToPhotoAlbum:(AVURLAsset*)filteredasset andOriginalAsset:(AVURLAsset*)origAsset onViewController:(UIViewController*)vc{
AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];
//----first track---//
AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, origAsset.duration) ofTrack:[[origAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
//----second track---//
AVMutableCompositionTrack *secondTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[secondTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, filteredasset.duration) ofTrack:[[filteredasset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionInstruction * MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, origAsset.duration);
//----FirstlayerInstruction---//
AVMutableVideoCompositionLayerInstruction *FirstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack];
CGAffineTransform Scale = CGAffineTransformMakeScale(0.8f,1.0f);
CGAffineTransform Move = CGAffineTransformMakeTranslation(40,0);
[FirstlayerInstruction setTransform:CGAffineTransformConcat(Scale,Move) atTime:kCMTimeZero];
//----FirstlayerInstruction---//
AVMutableVideoCompositionLayerInstruction *SecondlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:secondTrack];
//
CGAffineTransform SecondScale = CGAffineTransformMakeScale(1.5f,1.5f);
CGAffineTransform SecondMove = CGAffineTransformMakeTranslation(0,0);;
[SecondlayerInstruction setTransform:CGAffineTransformConcat(SecondScale,SecondMove) atTime:kCMTimeZero];
MainInstruction.layerInstructions = [NSArray arrayWithObjects:FirstlayerInstruction,SecondlayerInstruction,nil];;
AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];
MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
MainCompositionInst.frameDuration = CMTimeMake(1, 50);
MainCompositionInst.renderSize = CGSizeMake(firstTrack.naturalSize.width, firstTrack.naturalSize.height);
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:#"overlapVideotest.mov"];
if([[NSFileManager defaultManager] fileExistsAtPath:myPathDocs])
{
[[NSFileManager defaultManager] removeItemAtPath:myPathDocs error:nil];
}
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPreset1280x720];
exporter.outputURL=url;
[exporter setVideoComposition:MainCompositionInst];
exporter.outputFileType = AVFileTypeQuickTimeMovie;
[exporter exportAsynchronouslyWithCompletionHandler:^
{
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:exporter onViewController:vc];
});
}];
}
#pragma mark - Saving the final video to photo gallery here
- (void)exportDidFinish:(AVAssetExportSession*)session onViewController:(UIViewController*)vc {
if(session.status == AVAssetExportSessionStatusCompleted) {
NSURL *outputURL = session.outputURL;
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputURL]) {
[library writeVideoAtPathToSavedPhotosAlbum:outputURL
completionBlock:^(NSURL *assetURL, NSError *error) {
dispatch_async(dispatch_get_main_queue(), ^{
[MBProgressHUD hideHUDForView:vc.view animated:true];
if (error) {
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:#"Error" message:#"Video Saving Failed" delegate:nil cancelButtonTitle:#"Ok" otherButtonTitles: nil, nil];
[alert show];
} else {
UIAlertView *alert = [[UIAlertView alloc]
initWithTitle:#"Video Saved" message:#"Saved To Photo Album" delegate:self cancelButtonTitle:#"Ok" otherButtonTitles: nil];
[alert show];
// [self loadMoviePlayer:outputURL];
}
});
}];
}
}else{
[MBProgressHUD hideHUDForView:vc.view animated:true];
NSLog(#"found an issue %#",session.error);
}
}
You will have to play with the transformation of input video track. First, you have to get input videotrack orientation and find relative transformation according to it for your AVMutableCompositionLayerInstruction.
Here is the proper way to do it. I mimic your saveVideo method with my own code base. Also i changed render size to 720 x 720 to make it look better for all different resolution videos.
Take orientation and zoomOrientation object in .h file to use it globally in .m file. Like AVCaptureVideoOrientation zoomOrientation, AVCaptureVideoOrientation orientation.
-(void)saveVideosToPhotoAlbum:(AVURLAsset*)filteredasset andOriginalAsset:(AVURLAsset*)origAsset onViewController:(UIViewController*)vc{
AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];
//----first track---//
AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, origAsset.duration) ofTrack:[[origAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
//----second track---//
AVMutableCompositionTrack *secondTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[secondTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, filteredasset.duration) ofTrack:[[filteredasset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionInstruction * MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, origAsset.duration);
//----FirstlayerInstruction---//
AVMutableVideoCompositionLayerInstruction *FirstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack];
orientation = [self videoOrientation:origAsset];
zoomOrientation = [self videoOrientation:filteredasset];
BOOL isPortrait = NO;
switch (orientation) {
case AVCaptureVideoOrientationLandscapeRight:
isPortrait = NO;
break;
case AVCaptureVideoOrientationLandscapeLeft:
isPortrait = NO;
break;
case AVCaptureVideoOrientationPortrait:
isPortrait = YES;
break;
case AVCaptureVideoOrientationPortraitUpsideDown:
isPortrait = YES;
break;
default:
break;
}
CGSize naturalSize = firstTrack.naturalSize;
if(isPortrait){
naturalSize = CGSizeMake(naturalSize.height,naturalSize.width);
}
BOOL isZoomPortrait = NO;
switch (zoomOrientation) {
case AVCaptureVideoOrientationLandscapeRight:
isZoomPortrait = NO;
break;
case AVCaptureVideoOrientationLandscapeLeft:
isZoomPortrait = NO;
break;
case AVCaptureVideoOrientationPortrait:
isZoomPortrait = YES;
break;
case AVCaptureVideoOrientationPortraitUpsideDown:
isZoomPortrait = YES;
break;
default:
break;
}
CGSize zoomNaturalSize = secondTrack.naturalSize;
if(isZoomPortrait){
zoomNaturalSize = CGSizeMake(zoomNaturalSize.height,zoomNaturalSize.width);
}
CGFloat aspectWidth = 720/naturalSize.width;
CGFloat aspectheight = 720/naturalSize.height;
CGFloat zoomAspectWidth = 720/zoomNaturalSize.width;
CGFloat zoomAspectheight = 720/zoomNaturalSize.height;
CGFloat scale = MIN(aspectWidth, aspectheight);
CGFloat zoomScale = MAX(zoomAspectWidth, zoomAspectheight);
CGAffineTransform transform = [self transformFromOrientationWithVideoSizeWithAspect:naturalSize scale1:scale];
CGAffineTransform zoomTransform = [self zoomTransformFromOrientationWithVideoSizeWithAspect:zoomNaturalSize scale1:zoomScale];
[FirstlayerInstruction setTransform:transform atTime:kCMTimeZero];
//----FirstlayerInstruction---//
AVMutableVideoCompositionLayerInstruction *SecondlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:secondTrack];
[SecondlayerInstruction setTransform:zoomTransform atTime:kCMTimeZero];
MainInstruction.layerInstructions = [NSArray arrayWithObjects:FirstlayerInstruction,SecondlayerInstruction,nil];;
AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];
MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
MainCompositionInst.frameDuration = CMTimeMake(1, 30);
MainCompositionInst.renderSize = CGSizeMake(720, 720);
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:#"overlapVideotest.mov"];
if([[NSFileManager defaultManager] fileExistsAtPath:myPathDocs])
{
[[NSFileManager defaultManager] removeItemAtPath:myPathDocs error:nil];
}
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
[exporter setVideoComposition:MainCompositionInst];
exporter.outputFileType = AVFileTypeQuickTimeMovie;
[exporter exportAsynchronouslyWithCompletionHandler:^
{
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:exporter onViewController:vc];
});
}];
}
- (CGAffineTransform)transformFromOrientationWithVideoSizeWithAspect:(CGSize)naturalSize scale1:(float)scale
{
CGAffineTransform transform = CGAffineTransformIdentity;
transform = CGAffineTransformScale(transform,scale,scale);
CGSize size = CGSizeMake(720.0, 720.0);
switch (orientation) {
case AVCaptureVideoOrientationLandscapeRight:
transform = CGAffineTransformTranslate(transform, (size.width/scale) / 2.0 - naturalSize.width/2.0, (size.height/scale) / 2.0 - naturalSize.height/2.0);
break;
case AVCaptureVideoOrientationLandscapeLeft:
transform = CGAffineTransformTranslate(transform,
naturalSize.width,
naturalSize.height);
transform = CGAffineTransformRotate(transform, M_PI);
if (naturalSize.width > naturalSize.height){
transform = CGAffineTransformTranslate(transform, 0, -((size.height/scale) / 2.0 - naturalSize.height/2.0));
}else{
//transform = transform.translatedBy(x: -((size.width/scale) / 2.0 - naturalSize.width/2.0), y: 0)
transform = CGAffineTransformTranslate(transform, -((size.width/scale) / 2.0 - naturalSize.width/2.0), 0);
}
break;
case AVCaptureVideoOrientationPortrait:
transform = CGAffineTransformTranslate(transform, naturalSize.width, 0);
transform = CGAffineTransformRotate(transform, M_PI_2);
//transform = transform.translatedBy(x: 0, y: -((size.width/scale) / 2.0 - naturalSize.width/2.0))
transform = CGAffineTransformTranslate(transform, 0, -((size.width/scale) / 2.0 - naturalSize.width/2.0));
break;
case AVCaptureVideoOrientationPortraitUpsideDown:
transform = CGAffineTransformTranslate(transform, 0, naturalSize.height);
// transform = CGAffineTransformTranslate(transform, 0, (naturalSize.width - naturalSize.height)/2);
transform = CGAffineTransformRotate(transform, -M_PI_2);
break;
default:
break;
}
return transform;
}
- (CGAffineTransform)zoomTransformFromOrientationWithVideoSizeWithAspect:(CGSize)naturalSize scale1:(float)scale
{
CGAffineTransform transform = CGAffineTransformIdentity;
transform = CGAffineTransformScale(transform,scale,scale);
CGSize size = CGSizeMake(720.0, 720.0);
switch (zoomOrientation) {
case AVCaptureVideoOrientationLandscapeRight:
transform = CGAffineTransformTranslate(transform, (size.width/scale) / 2.0 - naturalSize.width/2.0, (size.height/scale) / 2.0 - naturalSize.height/2.0);
break;
case AVCaptureVideoOrientationLandscapeLeft:
transform = CGAffineTransformTranslate(transform,
naturalSize.width,
naturalSize.height);
transform = CGAffineTransformRotate(transform, M_PI);
if (naturalSize.width > naturalSize.height){
transform = CGAffineTransformTranslate(transform, 0, -((size.height/scale) / 2.0 - naturalSize.height/2.0));
}else{
//transform = transform.translatedBy(x: -((size.width/scale) / 2.0 - naturalSize.width/2.0), y: 0)
transform = CGAffineTransformTranslate(transform, -((size.width/scale) / 2.0 - naturalSize.width/2.0), 0);
}
break;
case AVCaptureVideoOrientationPortrait:
transform = CGAffineTransformTranslate(transform, naturalSize.width, 0);
transform = CGAffineTransformRotate(transform, M_PI_2);
//transform = transform.translatedBy(x: 0, y: -((size.width/scale) / 2.0 - naturalSize.width/2.0))
transform = CGAffineTransformTranslate(transform, 0, -((size.width/scale) / 2.0 - naturalSize.width/2.0));
break;
case AVCaptureVideoOrientationPortraitUpsideDown:
transform = CGAffineTransformTranslate(transform, 0, naturalSize.height);
// transform = CGAffineTransformTranslate(transform, 0, (naturalSize.width - naturalSize.height)/2);
transform = CGAffineTransformRotate(transform, -M_PI_2);
break;
default:
break;
}
return transform;
}
-(AVCaptureVideoOrientation)videoOrientation:(AVAsset *)asset
{
AVCaptureVideoOrientation result = 0;
NSArray *tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
if([tracks count] > 0) {
AVAssetTrack *videoTrack = [tracks objectAtIndex:0];
CGAffineTransform t = videoTrack.preferredTransform;
// Portrait
if(t.a == 0 && t.b == 1.0 && t.c == -1.0 && t.d == 0)
{
result = AVCaptureVideoOrientationPortrait;
}
// PortraitUpsideDown
if(t.a == 0 && t.b == -1.0 && t.c == 1.0 && t.d == 0) {
result = AVCaptureVideoOrientationPortraitUpsideDown;
}
// LandscapeRight
if(t.a == 1.0 && t.b == 0 && t.c == 0 && t.d == 1.0)
{
result = AVCaptureVideoOrientationLandscapeRight;
}
// LandscapeLeft
if(t.a == -1.0 && t.b == 0 && t.c == 0 && t.d == -1.0)
{
result = AVCaptureVideoOrientationLandscapeLeft;
}
}
return result;
}

capture the video showing in preview frame only iOS

Hi ,I am working on camera using AVFoundation frame work. Now video is recording properly but i want to record the video only preview area as in screen. Something like instagram. please help if anyone have idea-
I tried to do it with the following code but not get video as requirement-
AVAsset *asset = [AVAsset assetWithURL:inputURL];
AVAssetTrack *assetVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] lastObject];
CGFloat sideLength = _previewView.frame.size.height;
CGSize originalSize = assetVideoTrack.naturalSize;
CGFloat scale;
if (originalSize.width < originalSize.height) {
scale = sideLength / originalSize.width;
} else {
scale = sideLength / originalSize.height;
}
CGSize scaledSize = CGSizeMake(originalSize.width * scale, originalSize.height * scale);
CGPoint topLeft = CGPointMake(sideLength * .5 - scaledSize.width * .5, sideLength * .5 - scaledSize.height * .5);
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:assetVideoTrack];
CGAffineTransform orientationTransform = assetVideoTrack.preferredTransform;
if (orientationTransform.tx == originalSize.width || orientationTransform.tx == originalSize.height) {
orientationTransform.tx = sideLength;
}
if (orientationTransform.ty == originalSize.width || orientationTransform.ty == originalSize.height) {
orientationTransform.ty = sideLength;
}
CGAffineTransform transform = CGAffineTransformConcat(CGAffineTransformConcat(CGAffineTransformMakeScale(scale, scale), CGAffineTransformMakeTranslation(topLeft.x, topLeft.y)), orientationTransform);
[layerInstruction setTransform:transform atTime:kCMTimeZero];
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.layerInstructions = #[layerInstruction];
instruction.timeRange = assetVideoTrack.timeRange;
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.renderSize = CGSizeMake(sideLength, sideLength);
videoComposition.renderScale = 1.0;
videoComposition.frameDuration = CMTimeMake(1, 30);
videoComposition.instructions = #[instruction];
// export
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetHighestQuality] ;
// exporter.videoComposition = videoComposition;
exporter.outputURL=outputURL;
exporter.outputFileType=AVFileTypeQuickTimeMovie;
[exporter exportAsynchronouslyWithCompletionHandler:^(void){
switch(exporter.status) {
case AVAssetExportSessionStatusCompleted:
completion(exporter);
NSLog(#"file exported successfully");
break;
default:
NSLog(#"file did not export successfully");
}
}];
Preview Layer code-
#property (nonatomic, weak) IBOutlet PreviewView *previewView;
AVCaptureVideoPreviewLayer *previewLayer = (AVCaptureVideoPreviewLayer *)self.previewView.layer;
connection.videoOrientation = previewLayer.connection.videoOrientation;
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
previewLayer.bounds = self.previewView.bounds;
previewLayer.position=CGPointMake(CGRectGetMidX(self.previewView.bounds), CGRectGetMidY(self.previewView.bounds));
Please give me idea so I can resolve the issue.Thanks

Saving video with overlay of GIF image

I am working on an application in which I record a video. When recording finished I put a GIF image on it with use of Library.
My code for playing video and putting gif image as an overlay
self.avPlayer = [AVPlayer playerWithURL:self.urlstring];
self.avPlayer.actionAtItemEnd = AVPlayerActionAtItemEndNone;
AVPlayerLayer *videoLayer = [AVPlayerLayer playerLayerWithPlayer:self.avPlayer];
videoLayer.frame = self.preview_view.bounds;
videoLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[self.preview_view.layer addSublayer:videoLayer];
NSURL *url = [[NSBundle mainBundle] URLForResource:#"02" withExtension:#"gif"];
self.img_gif.image = [UIImage animatedImageWithAnimatedGIFData:[NSData dataWithContentsOfURL:url]];
But now I want to merge and save video with overlay of this GIF image. I google it didn't find what I want.
Thank you for your help
This is the best answer to merge video with GIF image.
- (void)mixVideoAsset:(AVAsset *)videoAsset {
NSDate * begin = [NSDate date];
// 2 - Create AVMutableComposition object. This object will hold your AVMutableCompositionTrack instances.
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
// 3 - Video track
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
[videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
// - Audio
AVMutableCompositionTrack *audioCompositionTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *audioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[audioCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioTrack.timeRange.duration) ofTrack:audioTrack atTime:kCMTimeZero error:nil];
// 3.1 - Create AVMutableVideoCompositionInstruction
AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
// 3.2 - Create an AVMutableVideoCompositionLayerInstruction for the video track and fix the orientation.
AVMutableVideoCompositionLayerInstruction *videolayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
AVAssetTrack *videoAssetTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation videoAssetOrientation_ = UIImageOrientationUp;
BOOL isVideoAssetPortrait_ = NO;
CGAffineTransform videoTransform = videoAssetTrack.preferredTransform;
if (videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationRight;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationLeft;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) {
videoAssetOrientation_ = UIImageOrientationUp;
}
if (videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) {
videoAssetOrientation_ = UIImageOrientationDown;
}
[videolayerInstruction setTransform:videoAssetTrack.preferredTransform atTime:kCMTimeZero];
[videolayerInstruction setOpacity:0.0 atTime:videoAsset.duration];
// 3.3 - Add instructions
mainInstruction.layerInstructions = [NSArray arrayWithObjects:videolayerInstruction,nil];
AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
CGSize naturalSize;
if(isVideoAssetPortrait_){
naturalSize = CGSizeMake(videoAssetTrack.naturalSize.height, videoAssetTrack.naturalSize.width);
} else {
naturalSize = videoAssetTrack.naturalSize;
}
float renderWidth, renderHeight;
renderWidth = naturalSize.width;
renderHeight = naturalSize.height;
mainCompositionInst.renderSize = CGSizeMake(renderWidth, renderHeight);
mainCompositionInst.instructions = [NSArray arrayWithObject:mainInstruction];
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
// Watermark Layers
[self applyVideoEffectsToComposition:mainCompositionInst size:naturalSize];
// 4 - Get path
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:
[NSString stringWithFormat:#"FinalVideo-%d.mov",arc4random() % 1000]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
// NSURL * url = TempVideoURL();
// 5 - Create exporter
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeMPEG4;
exporter.shouldOptimizeForNetworkUse = YES;
exporter.videoComposition = mainCompositionInst;
[exporter exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
NSDate * endDate = [NSDate date];
NSTimeInterval interval = [endDate timeIntervalSinceDate:begin];
NSLog(#"completed %f senconds",interval);
ALAssetsLibrary *assetsLibrary = [[ALAssetsLibrary alloc] init];
if ([assetsLibrary videoAtPathIsCompatibleWithSavedPhotosAlbum:exporter.outputURL]) {
[assetsLibrary writeVideoAtPathToSavedPhotosAlbum:exporter.outputURL completionBlock:NULL];
}
});
}];
}
- (void)applyVideoEffectsToComposition:(AVMutableVideoComposition *)composition size:(CGSize)size
{
// - set up the parent layer
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, size.width, size.height);
videoLayer.frame = CGRectMake(0, 0, size.width, size.height);
[parentLayer addSublayer:videoLayer];
size.width = 100;
size.height = 100;
// - set up the overlay
CALayer *overlayLayer = [CALayer layer];
overlayLayer.frame = CGRectMake(0, 100, size.width, size.height);
NSURL *fileUrl = [[NSBundle mainBundle] URLForResource:#"jiafei" withExtension:#"gif"];
[self startGifAnimationWithURL:fileUrl inLayer:overlayLayer];
// UIImage * image = [UIImage imageNamed:#"gifImage.gif"];
// [overlayLayer setContents:(id)[image CGImage]];
// [overlayLayer setMasksToBounds:YES];
[parentLayer addSublayer:overlayLayer];
// - apply magic
composition.animationTool = [AVVideoCompositionCoreAnimationTool
videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
}
- (void)startGifAnimationWithURL:(NSURL *)url inLayer:(CALayer *)layer {
CAKeyframeAnimation * animation = [self animationForGifWithURL:url];
[layer addAnimation:animation forKey:#"contents"];
}
- (CAKeyframeAnimation *)animationForGifWithURL:(NSURL *)url {
CAKeyframeAnimation *animation = [CAKeyframeAnimation animationWithKeyPath:#"contents"];
NSMutableArray * frames = [NSMutableArray new];
NSMutableArray *delayTimes = [NSMutableArray new];
CGFloat totalTime = 0.0;
CGFloat gifWidth;
CGFloat gifHeight;
CGImageSourceRef gifSource = CGImageSourceCreateWithURL((CFURLRef)url, NULL);
// get frame count
size_t frameCount = CGImageSourceGetCount(gifSource);
for (size_t i = 0; i < frameCount; ++i) {
// get each frame
CGImageRef frame = CGImageSourceCreateImageAtIndex(gifSource, i, NULL);
[frames addObject:(__bridge id)frame];
CGImageRelease(frame);
// get gif info with each frame
NSDictionary *dict = (NSDictionary*)CFBridgingRelease(CGImageSourceCopyPropertiesAtIndex(gifSource, i, NULL));
NSLog(#"kCGImagePropertyGIFDictionary %#", [dict valueForKey:(NSString*)kCGImagePropertyGIFDictionary]);
// get gif size
gifWidth = [[dict valueForKey:(NSString*)kCGImagePropertyPixelWidth] floatValue];
gifHeight = [[dict valueForKey:(NSString*)kCGImagePropertyPixelHeight] floatValue];
// kCGImagePropertyGIFDictionary中kCGImagePropertyGIFDelayTime,kCGImagePropertyGIFUnclampedDelayTime值是一样的
NSDictionary *gifDict = [dict valueForKey:(NSString*)kCGImagePropertyGIFDictionary];
[delayTimes addObject:[gifDict valueForKey:(NSString*)kCGImagePropertyGIFDelayTime]];
totalTime = totalTime + [[gifDict valueForKey:(NSString*)kCGImagePropertyGIFDelayTime] floatValue];
CFRelease((__bridge CFTypeRef)(dict));
}
if (gifSource) {
CFRelease(gifSource);
}
NSMutableArray *times = [NSMutableArray arrayWithCapacity:3];
CGFloat currentTime = 0;
NSInteger count = delayTimes.count;
for (int i = 0; i < count; ++i) {
[times addObject:[NSNumber numberWithFloat:(currentTime / totalTime)]];
currentTime += [[delayTimes objectAtIndex:i] floatValue];
}
NSMutableArray *images = [NSMutableArray arrayWithCapacity:3];
for (int i = 0; i < count; ++i) {
[images addObject:[frames objectAtIndex:i]];
}
animation.keyTimes = times;
animation.values = images;
animation.timingFunction = [CAMediaTimingFunction functionWithName:kCAMediaTimingFunctionLinear];
animation.duration = totalTime;
animation.repeatCount = HUGE_VALF;
animation.beginTime = AVCoreAnimationBeginTimeAtZero;
animation.removedOnCompletion = NO;
return animation;
}
Here is the swift version of #Jitendra Modi's answer and it worked like a charm.
Swift 5.2:
func animationForGif(with url: URL) -> CAKeyframeAnimation? {
let animation = CAKeyframeAnimation(keyPath: #keyPath(CALayer.contents))
var frames: [CGImage] = []
var delayTimes: [CGFloat] = []
var totalTime: CGFloat = 0.0
// var gifWidth: CGFloat, gifHeight: CGFloat
guard let gifSource = CGImageSourceCreateWithURL(url as CFURL, nil) else {
print("Can not get image source from the gif: \(url)")
return nil
}
// get frame
let frameCount = CGImageSourceGetCount(gifSource)
for i in 0..<frameCount {
guard let frame = CGImageSourceCreateImageAtIndex(gifSource, i, nil) else {
continue
}
guard let dic = CGImageSourceCopyPropertiesAtIndex(gifSource, i, nil) as? [AnyHashable: Any] else { continue }
// gifWidth = dic[kCGImagePropertyPixelWidth] as? CGFloat ?? 0
// gifHeight = dic[kCGImagePropertyPixelHeight] as? CGFloat ?? 0
guard let gifDic: [AnyHashable: Any] = dic[kCGImagePropertyGIFDictionary] as? [AnyHashable: Any] else { continue }
let delayTime = gifDic[kCGImagePropertyGIFDelayTime] as? CGFloat ?? 0
frames.append(frame)
delayTimes.append(delayTime)
totalTime += delayTime
}
if frames.count == 0 {
return nil
}
assert(frames.count == delayTimes.count)
var times: [NSNumber] = []
var currentTime: CGFloat = 0
for i in 0..<delayTimes.count {
times.append(NSNumber(value: Double(currentTime / totalTime)))
currentTime += delayTimes[i]
}
animation.keyTimes = times
animation.values = frames
animation.timingFunction = CAMediaTimingFunction(name: CAMediaTimingFunctionName.linear)
animation.duration = Double(totalTime)
animation.repeatCount = .greatestFiniteMagnitude
animation.beginTime = AVCoreAnimationBeginTimeAtZero
animation.isRemovedOnCompletion = false
return animation
}
And you can use this animation:
let gifLayer = CALayer()
gifLayer.frame = CGRect(x: 0, y: 0, width: 300, height: 300)
if let animation = animationForGif(with: gifUrl) {
gifLayer.add(animation, forKey: "contents")
}
parentLayer.addSublayer(gifLayer)
You can try any of the below code for Screen Recording. It will merger your video and GIF.
You can download sample from the link below provided by Apple. https://developer.apple.com/library/mac/samplecode/AVScreenShack/Introduction/Intro.html
https://github.com/alskipp/ASScreenRecorder
http://codethink.no-ip.org/wordpress/archives/673
Hope this help you..

Square video orientation won't change from landscape to portrait using AVAsset/AVCaptureSession

I am trying to create a square video using AVCaptureSession and I am successfully capture video but issue is that if my device is portrait mode and I am capture video then its Orientation record correct but if my device is landscape and I capture video I wanted to make this video orientation change to portrait. Following code I use for crop a video after capture:
-(void)cropView:(NSURL*)outputfile
{
AVAsset *asset = [AVAsset assetWithURL:outputfile];
AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableVideoComposition* videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.frameDuration = CMTimeMake(1, 30);
videoComposition.renderSize =CGSizeMake(clipVideoTrack.naturalSize.height, clipVideoTrack.naturalSize.height);
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(60, 30));
// rotate to portrait
AVMutableVideoCompositionLayerInstruction* transformer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:clipVideoTrack];
CGAffineTransform t1 = CGAffineTransformMakeTranslation(clipVideoTrack.naturalSize.height, -(clipVideoTrack.naturalSize.width - clipVideoTrack.naturalSize.height) /2 );
CGAffineTransform t2 = CGAffineTransformRotate(t1, M_PI_2);
CGAffineTransform finalTransform = t2;
[transformer setTransform:finalTransform atTime:kCMTimeZero];
instruction.layerInstructions = [NSArray arrayWithObject:transformer];
videoComposition.instructions = [NSArray arrayWithObject: instruction];
NSString *outputPath = [NSString stringWithFormat:#"%#%#", NSTemporaryDirectory(), #"video.mp4"];
NSURL *exportUrl = [NSURL fileURLWithPath:outputPath];
[[NSFileManager defaultManager] removeItemAtURL:exportUrl error:nil];
//Export
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetHighestQuality] ;
exporter.videoComposition = videoComposition;
exporter.outputURL = exportUrl;
exporter.outputFileType = AVFileTypeMPEG4;
[exporter exportAsynchronouslyWithCompletionHandler:^
{
dispatch_async(dispatch_get_main_queue(), ^{
//Call when finished
[self exportDidFinish:exporter];
});
}];
}
I just fix the issue by using follow code and steps:
First my device orientation is lock and my app is only support portrait orientation so i think i get only portrait orientation however i capture video by landscape mode so using Core-motion i get device orientation using following code
#import <CoreMotion/CoreMotion.h>
#interface ViewController ()
{
AVCaptureVideoOrientation orientationLast, orientationAfterProcess;
CMMotionManager *motionManager;
}
#implementation ViewController
- (void)initializeMotionManager{
motionManager = [[CMMotionManager alloc] init];
motionManager.accelerometerUpdateInterval = .2;
motionManager.gyroUpdateInterval = .2;
[motionManager startAccelerometerUpdatesToQueue:[NSOperationQueue currentQueue]
withHandler:^(CMAccelerometerData *accelerometerData, NSError *error) {
if (!error) {
[self outputAccelertionData:accelerometerData.acceleration];
}
else{
NSLog(#"%#", error);
}
}];
}
- (void)outputAccelertionData:(CMAcceleration)acceleration{
AVCaptureVideoOrientation orientationNew;
if (acceleration.x >= 0.75) {
orientationNew = AVCaptureVideoOrientationLandscapeLeft;
}
else if (acceleration.x <= -0.75) {
orientationNew =AVCaptureVideoOrientationLandscapeRight;
}
else if (acceleration.y <= -0.75) {
orientationNew =AVCaptureVideoOrientationPortrait;
}
else if (acceleration.y >= 0.75) {
orientationNew =AVCaptureVideoOrientationPortraitUpsideDown;
}
else {
// Consider same as last time
return;
}
if (orientationNew == orientationLast)
return;
orientationLast = orientationNew;
}
so based on device rotation that orientationLast update device orientation. after that when i tap button for record video i set the AVCaptureConnection orientation.
AVCaptureConnection *CaptureConnection = [MovieFileOutput connectionWithMediaType:AVMediaTypeVideo];
if ([CaptureConnection isVideoOrientationSupported])
{
[CaptureConnection setVideoOrientation:orientationLast];
}
Now after capture video. crop time i did following code and that works perfect.
-(void)cropView:(NSURL*)outputfile
{
AVAsset *asset = [AVAsset assetWithURL:outputfile];
AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableVideoComposition* videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.frameDuration = CMTimeMake(1, 30);
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(60, 30));
CGSize videoSize = [[[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] naturalSize];
float scaleFactor;
if (videoSize.width > videoSize.height) {
scaleFactor = videoSize.height/320;
}
else if (videoSize.width == videoSize.height){
scaleFactor = videoSize.height/320;
}
else{
scaleFactor = videoSize.width/320;
}
CGFloat cropOffX = 0;
CGFloat cropOffY = 0;
CGFloat cropWidth = 320 *scaleFactor;
CGFloat cropHeight = 320 *scaleFactor;
videoComposition.renderSize = CGSizeMake(cropWidth, cropHeight);
AVMutableVideoCompositionLayerInstruction* transformer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:clipVideoTrack];
UIImageOrientation videoOrientation = [self getVideoOrientationFromAsset:asset];
CGAffineTransform t1 = CGAffineTransformIdentity;
CGAffineTransform t2 = CGAffineTransformIdentity;
switch (videoOrientation) {
case UIImageOrientationUp:
t1 = CGAffineTransformMakeTranslation(clipVideoTrack.naturalSize.height - cropOffX, 0 - cropOffY );
t2 = CGAffineTransformRotate(t1, M_PI_2 );
break;
case UIImageOrientationDown:
t1 = CGAffineTransformMakeTranslation(0 - cropOffX, clipVideoTrack.naturalSize.width - cropOffY ); // not fixed width is the real height in upside down
t2 = CGAffineTransformRotate(t1, - M_PI_2 );
break;
case UIImageOrientationRight:
t1 = CGAffineTransformMakeTranslation(0 - cropOffX, 0 - cropOffY );
t2 = CGAffineTransformRotate(t1, 0 );
break;
case UIImageOrientationLeft:
t1 = CGAffineTransformMakeTranslation(clipVideoTrack.naturalSize.width - cropOffX, clipVideoTrack.naturalSize.height - cropOffY );
t2 = CGAffineTransformRotate(t1, M_PI );
break;
default:
NSLog(#"no supported orientation has been found in this video");
break;
}
CGAffineTransform finalTransform = t2;
[transformer setTransform:finalTransform atTime:kCMTimeZero];
//add the transformer layer instructions, then add to video composition
instruction.layerInstructions = [NSArray arrayWithObject:transformer];
videoComposition.instructions = [NSArray arrayWithObject: instruction];
NSString *outputPath = [NSString stringWithFormat:#"%#%#", NSTemporaryDirectory(), #"video.mp4"];
NSURL *exportUrl = [NSURL fileURLWithPath:outputPath];
[[NSFileManager defaultManager] removeItemAtURL:exportUrl error:nil];
//Export
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetHighestQuality] ;
exporter.videoComposition = videoComposition;
exporter.outputURL = exportUrl;
exporter.outputFileType = AVFileTypeMPEG4;
[exporter exportAsynchronouslyWithCompletionHandler:^
{
dispatch_async(dispatch_get_main_queue(), ^{
//Call when finished
[self exportDidFinish:exporter];
});
}];
}
- (UIImageOrientation)getVideoOrientationFromAsset:(AVAsset *)asset
{
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
CGSize size = [videoTrack naturalSize];
CGAffineTransform txf = [videoTrack preferredTransform];
if (size.width == txf.tx && size.height == txf.ty)
return UIImageOrientationLeft; //return UIInterfaceOrientationLandscapeLeft;
else if (txf.tx == 0 && txf.ty == 0)
return UIImageOrientationRight; //return UIInterfaceOrientationLandscapeRight;
else if (txf.tx == 0 && txf.ty == size.width)
return UIImageOrientationDown; //return UIInterfaceOrientationPortraitUpsideDown;
else
return UIImageOrientationUp; //return UIInterfaceOrientationPortrait;
}

Add GIF watermark on a video in iOS

I need to accomplish this function: There is a GIF overlay on a video, hoping to composition this video and GIF to a new video. I'm using the following code, but result is only the video without GIF:
- (void)mixVideoAsset:(AVAsset *)videoAsset {
LLog(#"Begining");
NSDate * begin = [NSDate date];
// 2 - Create AVMutableComposition object. This object will hold your AVMutableCompositionTrack instances.
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
// 3 - Video track
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
[videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
// - Audio
AVMutableCompositionTrack *audioCompositionTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *audioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[audioCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioTrack.timeRange.duration) ofTrack:audioTrack atTime:kCMTimeZero error:nil];
// 3.1 - Create AVMutableVideoCompositionInstruction
AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
// 3.2 - Create an AVMutableVideoCompositionLayerInstruction for the video track and fix the orientation.
AVMutableVideoCompositionLayerInstruction *videolayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
AVAssetTrack *videoAssetTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation videoAssetOrientation_ = UIImageOrientationUp;
BOOL isVideoAssetPortrait_ = NO;
CGAffineTransform videoTransform = videoAssetTrack.preferredTransform;
if (videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationRight;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationLeft;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) {
videoAssetOrientation_ = UIImageOrientationUp;
}
if (videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) {
videoAssetOrientation_ = UIImageOrientationDown;
}
[videolayerInstruction setTransform:videoAssetTrack.preferredTransform atTime:kCMTimeZero];
[videolayerInstruction setOpacity:0.0 atTime:videoAsset.duration];
// 3.3 - Add instructions
mainInstruction.layerInstructions = [NSArray arrayWithObjects:videolayerInstruction,nil];
AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
CGSize naturalSize;
if(isVideoAssetPortrait_){
naturalSize = CGSizeMake(videoAssetTrack.naturalSize.height, videoAssetTrack.naturalSize.width);
} else {
naturalSize = videoAssetTrack.naturalSize;
}
float renderWidth, renderHeight;
renderWidth = naturalSize.width;
renderHeight = naturalSize.height;
mainCompositionInst.renderSize = CGSizeMake(renderWidth, renderHeight);
mainCompositionInst.instructions = [NSArray arrayWithObject:mainInstruction];
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
// Watermark Layers
[self applyVideoEffectsToComposition:mainCompositionInst size:naturalSize];
// 4 - Get path
// NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
// NSString *documentsDirectory = [paths objectAtIndex:0];
// NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:
// [NSString stringWithFormat:#"FinalVideo-%d.mov",arc4random() % 1000]];
// NSURL *url = [NSURL fileURLWithPath:myPathDocs];
NSURL * url = TempVideoURL();
// 5 - Create exporter
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeMPEG4;
exporter.shouldOptimizeForNetworkUse = YES;
exporter.videoComposition = mainCompositionInst;
[exporter exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
NSDate * endDate = [NSDate date];
NSTimeInterval interval = [endDate timeIntervalSinceDate:begin];
LLog(#"completed %f senconds",interval);
ALAssetsLibrary *assetsLibrary = [[ALAssetsLibrary alloc] init];
if ([assetsLibrary videoAtPathIsCompatibleWithSavedPhotosAlbum:exporter.outputURL]) {
[assetsLibrary writeVideoAtPathToSavedPhotosAlbum:exporter.outputURL completionBlock:NULL];
}
});
}];
}
Add Gif Watermark
- (void)applyVideoEffectsToComposition:(AVMutableVideoComposition *)composition size:(CGSize)size
{
// - set up the parent layer
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, size.width, size.height);
videoLayer.frame = CGRectMake(0, 0, size.width, size.height);
[parentLayer addSublayer:videoLayer];
size.width = 100;
size.height = 100;
// - set up the overlay
CALayer *overlayLayer = [CALayer layer];
overlayLayer.frame = CGRectMake(0, 100, size.width, size.height);
NSURL *fileUrl = [[NSBundle mainBundle] URLForResource:#"jiafei" withExtension:#"gif"];
[BBGifManager startGifAnimationWithURL:fileUrl inLayer:overlayLayer];
// UIImage * image = [UIImage imageNamed:#"gifImage.gif"];
// [overlayLayer setContents:(id)[image CGImage]];
// [overlayLayer setMasksToBounds:YES];
[parentLayer addSublayer:overlayLayer];
// - apply magic
composition.animationTool = [AVVideoCompositionCoreAnimationTool
videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
}
Add CALayer Animations
+ (void)startGifAnimationWithURL:(NSURL *)url inLayer:(CALayer *)layer {
CAKeyframeAnimation * animation = [self animationForGifWithURL:url];
[layer addAnimation:animation forKey:#"contents"];
}
Create CAKeyFrameAnimation
+ (CAKeyframeAnimation *)animationForGifWithURL:(NSURL *)url {
CAKeyframeAnimation *animation = [CAKeyframeAnimation animationWithKeyPath:#"contents"];
NSMutableArray * frames = [NSMutableArray new];
NSMutableArray *delayTimes = [NSMutableArray new];
CGFloat totalTime = 0.0;
CGFloat gifWidth;
CGFloat gifHeight;
CGImageSourceRef gifSource = CGImageSourceCreateWithURL((CFURLRef)url, NULL);
// get frame count
size_t frameCount = CGImageSourceGetCount(gifSource);
for (size_t i = 0; i < frameCount; ++i) {
// get each frame
CGImageRef frame = CGImageSourceCreateImageAtIndex(gifSource, i, NULL);
[frames addObject:(__bridge id)frame];
CGImageRelease(frame);
// get gif info with each frame
NSDictionary *dict = (NSDictionary*)CFBridgingRelease(CGImageSourceCopyPropertiesAtIndex(gifSource, i, NULL));
NSLog(#"kCGImagePropertyGIFDictionary %#", [dict valueForKey:(NSString*)kCGImagePropertyGIFDictionary]);
// get gif size
gifWidth = [[dict valueForKey:(NSString*)kCGImagePropertyPixelWidth] floatValue];
gifHeight = [[dict valueForKey:(NSString*)kCGImagePropertyPixelHeight] floatValue];
// kCGImagePropertyGIFDictionary中kCGImagePropertyGIFDelayTime,kCGImagePropertyGIFUnclampedDelayTime值是一样的
NSDictionary *gifDict = [dict valueForKey:(NSString*)kCGImagePropertyGIFDictionary];
[delayTimes addObject:[gifDict valueForKey:(NSString*)kCGImagePropertyGIFDelayTime]];
totalTime = totalTime + [[gifDict valueForKey:(NSString*)kCGImagePropertyGIFDelayTime] floatValue];
CFRelease((__bridge CFTypeRef)(dict));
}
if (gifSource) {
CFRelease(gifSource);
}
NSMutableArray *times = [NSMutableArray arrayWithCapacity:3];
CGFloat currentTime = 0;
NSInteger count = delayTimes.count;
for (int i = 0; i < count; ++i) {
[times addObject:[NSNumber numberWithFloat:(currentTime / totalTime)]];
currentTime += [[delayTimes objectAtIndex:i] floatValue];
}
NSMutableArray *images = [NSMutableArray arrayWithCapacity:3];
for (int i = 0; i < count; ++i) {
[images addObject:[frames objectAtIndex:i]];
}
animation.keyTimes = times;
animation.values = images;
animation.timingFunction = [CAMediaTimingFunction functionWithName:kCAMediaTimingFunctionLinear];
animation.duration = totalTime;
animation.repeatCount = HUGE_VALF;
return animation;
}
You should adjust your animation settings for CoreAnimation:
animation.beginTime = AVCoreAnimationBeginTimeAtZero;
animation.removedOnCompletion = NO;
Just in case here's the example on swift3 how to do the same - insert animated frames/images into the video (not exactly the gif but array of Images). It uses AVAssetExportSession and AVMutableVideoComposition together with AVMutableVideoCompositionInstruction, and CAKeyframeAnimation to animate the frames.

Resources