video recording issue in ios - ios

I am working on a Video making app.
In that I need to record a video in first View and after that display in second View.For recording a video I followed this tutorial.
In that I have made some changes as per my need in didFinishRecordingToOutputFileAtURL method.
Here is my updated method.
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error
{
NSLog(#"didFinishRecordingToOutputFileAtURL - enter");
BOOL RecordedSuccessfully = YES;
if ([error code] != noErr)
{
// A problem occurred: Find out if the recording was successful.
id value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey];
if (value)
{
RecordedSuccessfully = [value boolValue];
}
}
else {
NSLog(#"didFinishRecordingToOutputFileAtURL error:%#",error);
}
if (RecordedSuccessfully)
{
//----- RECORDED SUCESSFULLY -----
NSLog(#"didFinishRecordingToOutputFileAtURL - success");
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputFileURL])
{
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
AVMutableCompositionTrack *track = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVAsset *asset = [AVAsset assetWithURL:outputFileURL];
[track insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:[[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:CMTimeMake(0, 1) error:nil];
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:
[NSString stringWithFormat:#"%#%d.mov",NSBundle.mainBundle.infoDictionary[#"CFBundleExecutable"],++videoCounter]];
[[NSFileManager defaultManager] removeItemAtPath:myPathDocs error:nil];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:track];
AVAssetTrack *videoAssetTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation videoAssetOrientation_ = UIImageOrientationUp;
BOOL isVideoAssetPortrait_ = NO;
CGAffineTransform videoTransform = videoAssetTrack.preferredTransform;
if (videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationRight;
isVideoAssetPortrait_ = YES;
if ([[[NSUserDefaults standardUserDefaults] stringForKey:#"orientation"] isEqualToString:#"landscape"]) {
videoAssetOrientation_ = UIImageOrientationUp;
}
}
if (videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationLeft;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) {
videoAssetOrientation_ = UIImageOrientationUp;
}
if (videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) {
videoAssetOrientation_ = UIImageOrientationDown;
}
CGSize naturalSize;
if(isVideoAssetPortrait_){
naturalSize = CGSizeMake(videoAssetTrack.naturalSize.height, videoAssetTrack.naturalSize.width);
} else {
naturalSize = videoAssetTrack.naturalSize;
}
float renderWidth, renderHeight;
if (![self.ratioLabel.text isEqualToString:#"16:9"]) {
renderWidth = naturalSize.width;
renderHeight = naturalSize.width;
NSLog(#"Video:: width=%f height=%f",naturalSize.width,naturalSize.height);
}
else {
renderWidth = naturalSize.width;
renderHeight = naturalSize.height;
NSLog(#"Video:: width=%f height=%f",naturalSize.width,naturalSize.height);
}
if (![self.ratioLabel.text isEqualToString:#"16:9"])
{
CGAffineTransform t1 = CGAffineTransformMakeTranslation(videoAssetTrack.naturalSize.height, -(videoAssetTrack.naturalSize.width - videoAssetTrack.naturalSize.height) /2);
CGAffineTransform t2 = CGAffineTransformRotate(t1, M_PI_2);
[layerInstruction setTransform:t2 atTime:kCMTimeZero];
}
else
{
CGAffineTransform t2 = CGAffineTransformMakeRotation( M_PI_2);
[layerInstruction setTransform:t2 atTime:kCMTimeZero];
}
AVCaptureDevicePosition position = [[VideoInputDevice device] position];
if (position == AVCaptureDevicePositionFront)
{
/* For front camera only */
CGAffineTransform t = CGAffineTransformMakeScale(-1.0f, 1.0f);
t = CGAffineTransformTranslate(t, -videoAssetTrack.naturalSize.width, 0);
t = CGAffineTransformRotate(t, (DEGREES_TO_RADIANS(90.0)));
t = CGAffineTransformTranslate(t, 0.0f, -videoAssetTrack.naturalSize.width);
[layerInstruction setTransform:t atTime:kCMTimeZero];
/* For front camera only */
}
[layerInstruction setOpacity:0.0 atTime:asset.duration];
instruction.layerInstructions = [NSArray arrayWithObjects:layerInstruction,nil];
AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
mainCompositionInst.renderSize = CGSizeMake(renderWidth, renderHeight);
mainCompositionInst.instructions = [NSArray arrayWithObject:instruction];
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
AVAssetExportSession *exporter;
exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPreset1280x720];
exporter.videoComposition = mainCompositionInst;
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
self.doneButton.userInteractionEnabled = YES;
if(videoAddr==nil)
{
videoAddr = [[NSMutableArray alloc] init];
}
[videoAddr addObject:exporter.outputURL];
[[PreviewLayer connection] setEnabled:YES];
AVAsset *asset = [AVAsset assetWithURL:exporter.outputURL];
NSLog(#"remaining seconds before:%f",lastSecond);
double assetDuration = CMTimeGetSeconds(asset.duration);
if (assetDuration>3.0)
assetDuration = 3.0;
lastSecond = lastSecond- assetDuration;
NSLog(#"remaining seconds after:%f",lastSecond);
self.secondsLabel.text = [NSString stringWithFormat:#"%0.1fs",lastSecond];
self.secondsLabel.hidden = NO;
NSData *data = [NSKeyedArchiver archivedDataWithRootObject:videoAddr];
[[NSUserDefaults standardUserDefaults] setObject:data forKey:#"videoAddr"];
[[NSUserDefaults standardUserDefaults] synchronize];
videoURL = outputFileURL;
flagAutorotate = NO;
self.cancelButton.hidden = self.doneButton.hidden = NO;
imgCancel.hidden = imgDone.hidden = NO;
if ([[NSUserDefaults standardUserDefaults] boolForKey:#"Vibration"])
AudioServicesPlayAlertSound(kSystemSoundID_Vibrate);
[[UIApplication sharedApplication] endIgnoringInteractionEvents];
});
}];
}
else {
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:#"Error" message:[NSString stringWithFormat:#"Video can not be saved\nPlease free some storage space"] delegate:self cancelButtonTitle:nil otherButtonTitles:nil, nil];
[alert show];
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(2.0 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
[alert dismissWithClickedButtonIndex:0 animated:YES];
});
}
}
}
But here Is the issue.Video is not being recorded exactly shown in preview.
See these 2 screenShots.
Video recording preview
Video Playing View

The reason is because your iPad screen aspect ratio is not the same as camera aspect ratio.
You can modify camera preview size by setting videoGravity property of AVCaptureVideoPreviewLayer,
which influences how content is viewed relative to the layer bounds:
layer.videoGravity = AVLayerVideoGravityResizeAspect;
But in that case preview won't be fullscreen.
If you want the video with the same aspect ratio as on preview fullscreen, you will have to crop it. Cropping process explained here:
Exporting AVCaptureSession video in a size that matches the preview layer
Video capture with 1:1 aspect ratio in iOS

Related

Video Orientation Issue

I am actually working with the AVFoundation framework.
I am trying to play two videos simultaneously, one with filter and other the normal video.
I have done the whole task. I'm just stuck on setting the orientation at the end while saving entire video in gallery.
I have already tried a lot but could not get any thing fruitful.
Please help me in setting the orientations.
//Save action
#IBAction func saveAction(_ sender: UIButton) {
HelperClass.shared().applyFilter(self.firstAsset, andSecondAsset:self.secondAsset, onviewController:self, andcompos:self.composition, completion: { (value,error,url) in
if(url != nil){
print("url",url!)
}
})
MBProgressHUD.showAdded(to:self.view, animated: true)
}
#pragma mark - Overlay task is happening here,filtered video that I am getting from applyfilter method is overlayed with foregroundvideo
-(void)saveVideosToPhotoAlbum:(AVURLAsset*)filteredasset andOriginalAsset:(AVURLAsset*)origAsset onViewController:(UIViewController*)vc{
AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];
//----first track---//
AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, origAsset.duration) ofTrack:[[origAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
//----second track---//
AVMutableCompositionTrack *secondTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[secondTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, filteredasset.duration) ofTrack:[[filteredasset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionInstruction * MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, origAsset.duration);
//----FirstlayerInstruction---//
AVMutableVideoCompositionLayerInstruction *FirstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack];
CGAffineTransform Scale = CGAffineTransformMakeScale(0.8f,1.0f);
CGAffineTransform Move = CGAffineTransformMakeTranslation(40,0);
[FirstlayerInstruction setTransform:CGAffineTransformConcat(Scale,Move) atTime:kCMTimeZero];
//----FirstlayerInstruction---//
AVMutableVideoCompositionLayerInstruction *SecondlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:secondTrack];
//
CGAffineTransform SecondScale = CGAffineTransformMakeScale(1.5f,1.5f);
CGAffineTransform SecondMove = CGAffineTransformMakeTranslation(0,0);;
[SecondlayerInstruction setTransform:CGAffineTransformConcat(SecondScale,SecondMove) atTime:kCMTimeZero];
MainInstruction.layerInstructions = [NSArray arrayWithObjects:FirstlayerInstruction,SecondlayerInstruction,nil];;
AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];
MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
MainCompositionInst.frameDuration = CMTimeMake(1, 50);
MainCompositionInst.renderSize = CGSizeMake(firstTrack.naturalSize.width, firstTrack.naturalSize.height);
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:#"overlapVideotest.mov"];
if([[NSFileManager defaultManager] fileExistsAtPath:myPathDocs])
{
[[NSFileManager defaultManager] removeItemAtPath:myPathDocs error:nil];
}
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPreset1280x720];
exporter.outputURL=url;
[exporter setVideoComposition:MainCompositionInst];
exporter.outputFileType = AVFileTypeQuickTimeMovie;
[exporter exportAsynchronouslyWithCompletionHandler:^
{
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:exporter onViewController:vc];
});
}];
}
#pragma mark - Saving the final video to photo gallery here
- (void)exportDidFinish:(AVAssetExportSession*)session onViewController:(UIViewController*)vc {
if(session.status == AVAssetExportSessionStatusCompleted) {
NSURL *outputURL = session.outputURL;
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputURL]) {
[library writeVideoAtPathToSavedPhotosAlbum:outputURL
completionBlock:^(NSURL *assetURL, NSError *error) {
dispatch_async(dispatch_get_main_queue(), ^{
[MBProgressHUD hideHUDForView:vc.view animated:true];
if (error) {
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:#"Error" message:#"Video Saving Failed" delegate:nil cancelButtonTitle:#"Ok" otherButtonTitles: nil, nil];
[alert show];
} else {
UIAlertView *alert = [[UIAlertView alloc]
initWithTitle:#"Video Saved" message:#"Saved To Photo Album" delegate:self cancelButtonTitle:#"Ok" otherButtonTitles: nil];
[alert show];
// [self loadMoviePlayer:outputURL];
}
});
}];
}
}else{
[MBProgressHUD hideHUDForView:vc.view animated:true];
NSLog(#"found an issue %#",session.error);
}
}
You will have to play with the transformation of input video track. First, you have to get input videotrack orientation and find relative transformation according to it for your AVMutableCompositionLayerInstruction.
Here is the proper way to do it. I mimic your saveVideo method with my own code base. Also i changed render size to 720 x 720 to make it look better for all different resolution videos.
Take orientation and zoomOrientation object in .h file to use it globally in .m file. Like AVCaptureVideoOrientation zoomOrientation, AVCaptureVideoOrientation orientation.
-(void)saveVideosToPhotoAlbum:(AVURLAsset*)filteredasset andOriginalAsset:(AVURLAsset*)origAsset onViewController:(UIViewController*)vc{
AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];
//----first track---//
AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, origAsset.duration) ofTrack:[[origAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
//----second track---//
AVMutableCompositionTrack *secondTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[secondTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, filteredasset.duration) ofTrack:[[filteredasset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionInstruction * MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, origAsset.duration);
//----FirstlayerInstruction---//
AVMutableVideoCompositionLayerInstruction *FirstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack];
orientation = [self videoOrientation:origAsset];
zoomOrientation = [self videoOrientation:filteredasset];
BOOL isPortrait = NO;
switch (orientation) {
case AVCaptureVideoOrientationLandscapeRight:
isPortrait = NO;
break;
case AVCaptureVideoOrientationLandscapeLeft:
isPortrait = NO;
break;
case AVCaptureVideoOrientationPortrait:
isPortrait = YES;
break;
case AVCaptureVideoOrientationPortraitUpsideDown:
isPortrait = YES;
break;
default:
break;
}
CGSize naturalSize = firstTrack.naturalSize;
if(isPortrait){
naturalSize = CGSizeMake(naturalSize.height,naturalSize.width);
}
BOOL isZoomPortrait = NO;
switch (zoomOrientation) {
case AVCaptureVideoOrientationLandscapeRight:
isZoomPortrait = NO;
break;
case AVCaptureVideoOrientationLandscapeLeft:
isZoomPortrait = NO;
break;
case AVCaptureVideoOrientationPortrait:
isZoomPortrait = YES;
break;
case AVCaptureVideoOrientationPortraitUpsideDown:
isZoomPortrait = YES;
break;
default:
break;
}
CGSize zoomNaturalSize = secondTrack.naturalSize;
if(isZoomPortrait){
zoomNaturalSize = CGSizeMake(zoomNaturalSize.height,zoomNaturalSize.width);
}
CGFloat aspectWidth = 720/naturalSize.width;
CGFloat aspectheight = 720/naturalSize.height;
CGFloat zoomAspectWidth = 720/zoomNaturalSize.width;
CGFloat zoomAspectheight = 720/zoomNaturalSize.height;
CGFloat scale = MIN(aspectWidth, aspectheight);
CGFloat zoomScale = MAX(zoomAspectWidth, zoomAspectheight);
CGAffineTransform transform = [self transformFromOrientationWithVideoSizeWithAspect:naturalSize scale1:scale];
CGAffineTransform zoomTransform = [self zoomTransformFromOrientationWithVideoSizeWithAspect:zoomNaturalSize scale1:zoomScale];
[FirstlayerInstruction setTransform:transform atTime:kCMTimeZero];
//----FirstlayerInstruction---//
AVMutableVideoCompositionLayerInstruction *SecondlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:secondTrack];
[SecondlayerInstruction setTransform:zoomTransform atTime:kCMTimeZero];
MainInstruction.layerInstructions = [NSArray arrayWithObjects:FirstlayerInstruction,SecondlayerInstruction,nil];;
AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];
MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
MainCompositionInst.frameDuration = CMTimeMake(1, 30);
MainCompositionInst.renderSize = CGSizeMake(720, 720);
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:#"overlapVideotest.mov"];
if([[NSFileManager defaultManager] fileExistsAtPath:myPathDocs])
{
[[NSFileManager defaultManager] removeItemAtPath:myPathDocs error:nil];
}
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
[exporter setVideoComposition:MainCompositionInst];
exporter.outputFileType = AVFileTypeQuickTimeMovie;
[exporter exportAsynchronouslyWithCompletionHandler:^
{
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:exporter onViewController:vc];
});
}];
}
- (CGAffineTransform)transformFromOrientationWithVideoSizeWithAspect:(CGSize)naturalSize scale1:(float)scale
{
CGAffineTransform transform = CGAffineTransformIdentity;
transform = CGAffineTransformScale(transform,scale,scale);
CGSize size = CGSizeMake(720.0, 720.0);
switch (orientation) {
case AVCaptureVideoOrientationLandscapeRight:
transform = CGAffineTransformTranslate(transform, (size.width/scale) / 2.0 - naturalSize.width/2.0, (size.height/scale) / 2.0 - naturalSize.height/2.0);
break;
case AVCaptureVideoOrientationLandscapeLeft:
transform = CGAffineTransformTranslate(transform,
naturalSize.width,
naturalSize.height);
transform = CGAffineTransformRotate(transform, M_PI);
if (naturalSize.width > naturalSize.height){
transform = CGAffineTransformTranslate(transform, 0, -((size.height/scale) / 2.0 - naturalSize.height/2.0));
}else{
//transform = transform.translatedBy(x: -((size.width/scale) / 2.0 - naturalSize.width/2.0), y: 0)
transform = CGAffineTransformTranslate(transform, -((size.width/scale) / 2.0 - naturalSize.width/2.0), 0);
}
break;
case AVCaptureVideoOrientationPortrait:
transform = CGAffineTransformTranslate(transform, naturalSize.width, 0);
transform = CGAffineTransformRotate(transform, M_PI_2);
//transform = transform.translatedBy(x: 0, y: -((size.width/scale) / 2.0 - naturalSize.width/2.0))
transform = CGAffineTransformTranslate(transform, 0, -((size.width/scale) / 2.0 - naturalSize.width/2.0));
break;
case AVCaptureVideoOrientationPortraitUpsideDown:
transform = CGAffineTransformTranslate(transform, 0, naturalSize.height);
// transform = CGAffineTransformTranslate(transform, 0, (naturalSize.width - naturalSize.height)/2);
transform = CGAffineTransformRotate(transform, -M_PI_2);
break;
default:
break;
}
return transform;
}
- (CGAffineTransform)zoomTransformFromOrientationWithVideoSizeWithAspect:(CGSize)naturalSize scale1:(float)scale
{
CGAffineTransform transform = CGAffineTransformIdentity;
transform = CGAffineTransformScale(transform,scale,scale);
CGSize size = CGSizeMake(720.0, 720.0);
switch (zoomOrientation) {
case AVCaptureVideoOrientationLandscapeRight:
transform = CGAffineTransformTranslate(transform, (size.width/scale) / 2.0 - naturalSize.width/2.0, (size.height/scale) / 2.0 - naturalSize.height/2.0);
break;
case AVCaptureVideoOrientationLandscapeLeft:
transform = CGAffineTransformTranslate(transform,
naturalSize.width,
naturalSize.height);
transform = CGAffineTransformRotate(transform, M_PI);
if (naturalSize.width > naturalSize.height){
transform = CGAffineTransformTranslate(transform, 0, -((size.height/scale) / 2.0 - naturalSize.height/2.0));
}else{
//transform = transform.translatedBy(x: -((size.width/scale) / 2.0 - naturalSize.width/2.0), y: 0)
transform = CGAffineTransformTranslate(transform, -((size.width/scale) / 2.0 - naturalSize.width/2.0), 0);
}
break;
case AVCaptureVideoOrientationPortrait:
transform = CGAffineTransformTranslate(transform, naturalSize.width, 0);
transform = CGAffineTransformRotate(transform, M_PI_2);
//transform = transform.translatedBy(x: 0, y: -((size.width/scale) / 2.0 - naturalSize.width/2.0))
transform = CGAffineTransformTranslate(transform, 0, -((size.width/scale) / 2.0 - naturalSize.width/2.0));
break;
case AVCaptureVideoOrientationPortraitUpsideDown:
transform = CGAffineTransformTranslate(transform, 0, naturalSize.height);
// transform = CGAffineTransformTranslate(transform, 0, (naturalSize.width - naturalSize.height)/2);
transform = CGAffineTransformRotate(transform, -M_PI_2);
break;
default:
break;
}
return transform;
}
-(AVCaptureVideoOrientation)videoOrientation:(AVAsset *)asset
{
AVCaptureVideoOrientation result = 0;
NSArray *tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
if([tracks count] > 0) {
AVAssetTrack *videoTrack = [tracks objectAtIndex:0];
CGAffineTransform t = videoTrack.preferredTransform;
// Portrait
if(t.a == 0 && t.b == 1.0 && t.c == -1.0 && t.d == 0)
{
result = AVCaptureVideoOrientationPortrait;
}
// PortraitUpsideDown
if(t.a == 0 && t.b == -1.0 && t.c == 1.0 && t.d == 0) {
result = AVCaptureVideoOrientationPortraitUpsideDown;
}
// LandscapeRight
if(t.a == 1.0 && t.b == 0 && t.c == 0 && t.d == 1.0)
{
result = AVCaptureVideoOrientationLandscapeRight;
}
// LandscapeLeft
if(t.a == -1.0 && t.b == 0 && t.c == 0 && t.d == -1.0)
{
result = AVCaptureVideoOrientationLandscapeLeft;
}
}
return result;
}

CGAffineTransform the AVAsset to fit device screen

I have an AVAsset with AVAssetTrack, which have a size (for example - (width = 1920, height = 1080)). What I need - fit this asset into given screen size (for example (width = 320, height = 568)), if asset in landscape - rotate it (90 degrees), if square - add black stripes on top and bottom. I tried this:
- (void)changeAsset:(AVAsset*)asset savetoURL:(NSURL*)toURL withSize:(CGSize)toSize offsetRatioPoint:(CGPoint*)offsetRatioPoint completion:(void (^)(NSURL* in_url, NSError* error))handler
{
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject];
if (!videoTrack)
{
if (handler)
handler(nil, [NSError errorWithDomain:#"com.myapp.error" code:-1 userInfo:#{#"message" : #"there are no video tracks in asset"}]);
return;
}
CGFloat newHeight = [videoTrack naturalSize].height/3*4;
CGFloat newWidth = [videoTrack naturalSize].width/3*4;
const CGFloat videoAspectRatio = newWidth / newHeight;
const CGFloat toSizeAspectRatio = toSize.width / toSize.height;
CGFloat scale = 1.f;
if (videoAspectRatio > toSizeAspectRatio)
{
scale = toSize.height / newHeight;
}
else
{
scale = toSize.width /newWidth;
}
CGAffineTransform scaleTrans = CGAffineTransformMakeScale(scale, scale);
CGAffineTransform translateTrans = CGAffineTransformIdentity;
if (videoAspectRatio > toSizeAspectRatio)
{
if (offsetRatioPoint)
{
const CGFloat dx = offsetRatioPoint->x * newWidth * scale;
translateTrans = CGAffineTransformMakeTranslation(-dx, 0.f);
}
else
{
const CGFloat dx = 0.5f * (newWidth * scale - toSize.width);
translateTrans = CGAffineTransformMakeTranslation(-dx, 0.f);
}
}
else
{
if (offsetRatioPoint)
{
const CGFloat dy = offsetRatioPoint->y * newHeight * scale;
translateTrans = CGAffineTransformMakeTranslation(0.f, -dy);
}
else
{
const CGFloat dy = 0.5f * (newHeight * scale - toSize.height);
translateTrans = CGAffineTransformMakeTranslation(0.f, -dy);
}
}
CGAffineTransform t1 = CGAffineTransformTranslate(translateTrans, toSize.height, -scale*toSize.width);
// Rotate transformation
CGAffineTransform t2 = CGAffineTransformRotate(t1, M_PI_2);
CGAffineTransform finalTrans = CGAffineTransformConcat(scaleTrans, t2);
AVMutableVideoComposition *videoComposition = [[AVMutableVideoComposition alloc] init];
videoComposition.renderSize = toSize;
int32_t frameRate = 30;
videoComposition.frameDuration = CMTimeMake(1, frameRate);
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
[layerInstruction setTransform:finalTrans atTime:kCMTimeZero];
instruction.layerInstructions = #[layerInstruction];
videoComposition.instructions = #[instruction];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPreset640x480];
exporter.videoComposition = videoComposition;
exporter.shouldOptimizeForNetworkUse = YES;
NSString *exportPath = [NSTemporaryDirectory() stringByAppendingPathComponent:kCroppedFileName];
if (toURL)
exportPath = toURL.path;
if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath] == YES)
[[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil];
__block NSURL* outURL = [NSURL fileURLWithPath: exportPath];
exporter.outputURL = outURL;
exporter.outputFileType = AVFileTypeMPEG4;
exporter.timeRange = instruction.timeRange;
NSLog(#"%#", exportPath);
[exporter exportAsynchronouslyWithCompletionHandler:^(void) {
if (!toURL)
{
if ([[NSFileManager defaultManager] fileExistsAtPath:[GlobalConst fullMoviePath]] == YES)
[[NSFileManager defaultManager] removeItemAtPath:[GlobalConst fullMoviePath] error:nil];
NSError *error;
if (![[NSFileManager defaultManager] moveItemAtPath: exportPath toPath:[GlobalConst fullMoviePath] error:&error]) {
NSLog(#"Error %#", error);
}
outURL = [NSURL fileURLWithPath: [GlobalConst fullMoviePath] ];
}
NSLog(#"%#", outURL);
handler(outURL, nil);
}];
}
It's almost do first step - rotate landscape asset, but the result asset is little bit zoomed. Thanks in advance for all advices.
i solve this prblm usrin this code. in this code i rotate landscape to video Portrait . make it square also add image as watermark.
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0), ^(){
// input clip
AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
int videoDimention;
// make it square
UIInterfaceOrientation orientation = [self orientationForTrack:asset];
BOOL isPortrait = (orientation == UIInterfaceOrientationPortrait || orientation == UIInterfaceOrientationPortraitUpsideDown) ? YES: NO;
CGSize videoSize;
NSUserDefaults *userDefault=[NSUserDefaults standardUserDefaults];
if(isPortrait) {
//videoSize = CGSizeMake(complimentSize*.7,clipVideoTrack.naturalSize.height );
videoSize = CGSizeMake(clipVideoTrack.naturalSize.height,clipVideoTrack.naturalSize.width*.7 );
[userDefault setDouble:videoSize.width forKey:VIDEO_WIDTH_OUTPUT];
[userDefault setDouble:videoSize.height forKey:VIDEO_HEIGHT_OUTPUT];
videoDimention=0;// for Portrait
} else {
videoSize = CGSizeMake(clipVideoTrack.naturalSize.width, clipVideoTrack.naturalSize.height);
videoDimention=1;// for Landscape
[userDefault setDouble:videoSize.width forKey:VIDEO_WIDTH_OUTPUT];
[userDefault setDouble:videoSize.height forKey:VIDEO_HEIGHT_OUTPUT];
}
AVMutableVideoComposition* videoComposition = [AVMutableVideoComposition videoComposition];
if([[NSUserDefaults standardUserDefaults] integerForKey:SIZE]==0){
videoComposition.renderSize = CGSizeMake(clipVideoTrack.naturalSize.height , clipVideoTrack.naturalSize.height);
videoDimention=2; // for squre
double height=clipVideoTrack.naturalSize.height;
[userDefault setDouble:height forKey:VIDEO_WIDTH_OUTPUT];
[userDefault setDouble:height forKey:VIDEO_HEIGHT_OUTPUT];
}
else{
videoComposition.renderSize =videoSize;
}
// videoComposition.renderScale=.5;
if([[NSUserDefaults standardUserDefaults] integerForKey:FPS]==0){
videoComposition.frameDuration = CMTimeMake(1, 15);
}
else if ([[NSUserDefaults standardUserDefaults] integerForKey:FPS]==1){
videoComposition.frameDuration = CMTimeMake(1, 20);
}
else if ([[NSUserDefaults standardUserDefaults] integerForKey:FPS]==2){
videoComposition.frameDuration = CMTimeMake(1, 25);
}
else{
videoComposition.frameDuration = CMTimeMake(1, 30);
}
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [asset duration] );;
AVMutableVideoCompositionLayerInstruction* transformer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:clipVideoTrack];
// rotate to portrait
if([self orientationForTrack:asset]==UIInterfaceOrientationPortrait){
CGAffineTransform t1 = CGAffineTransformMakeTranslation(clipVideoTrack.naturalSize.height, -(clipVideoTrack.naturalSize.width - clipVideoTrack.naturalSize.height) /2 );
CGAffineTransform t2 = CGAffineTransformRotate(t1, M_PI_2);
CGAffineTransform finalTransform = t2;
[transformer setTransform:finalTransform atTime:kCMTimeZero];
}
//for water mark
CGSize sizeOfVideo=[asset naturalSize];
//Image of watermark
UIImage *myImage=[UIImage imageNamed:#"watermark"];
CALayer *layerCa = [CALayer layer];
layerCa.contents = (id)myImage.CGImage;
if([[NSUserDefaults standardUserDefaults] integerForKey:SIZE]==0){
layerCa.frame = CGRectMake(videoSize.height-(videoSize.height/6), 0, videoSize.height/6, (videoSize.height/6)/4);
}
else{
layerCa.frame = CGRectMake(videoSize.width-(videoSize.width/6), 0, videoSize.width/6, (videoSize.width/6)/4);
}
// layerCa.frame = CGRectMake(videoSize.width-200, 0, 200, 60);
layerCa.opacity = 1.0;
CALayer *parentLayer=[CALayer layer];
CALayer *videoLayer=[CALayer layer];
parentLayer.frame=CGRectMake(0, 0, videoSize.width, videoSize.height);
videoLayer.frame=CGRectMake(0, 0, videoSize.width, videoSize.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:layerCa];
instruction.layerInstructions = [NSArray arrayWithObject:transformer];
videoComposition.instructions = [NSArray arrayWithObject: instruction];
if([[NSUserDefaults standardUserDefaults] boolForKey:UP_PID]==NO){
videoComposition.animationTool=[AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
}
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetHighestQuality];
// AVMutableComposition *composition = [AVMutableComposition composition];
// [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
// e.g .mov type
exportSession.outputURL = fileURL;
exportSession.videoComposition = videoComposition;
// [exportSession addObserver: forKeyPath:#"progress" options:NSKeyValueObservingOptionNew context:NULL];
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
AVAssetExportSessionStatus status = [exportSession status];
[exportSession exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
VideoEditVC *controller=[[VideoEditVC alloc] init];
controller.isFirst=YES;
controller.videoSize=videoDimention;
[self.navigationController pushViewController:controller animated:YES];
self.delegate=controller;
});
}];
});
you also need to implement this one :
- (UIInterfaceOrientation)orientationForTrack:(AVAsset *)asset
{
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
CGSize size = [videoTrack naturalSize];
CGAffineTransform txf = [videoTrack preferredTransform];
if (size.width == txf.tx && size.height == txf.ty)
return UIInterfaceOrientationLandscapeRight;
else if (txf.tx == 0 && txf.ty == 0)
return UIInterfaceOrientationLandscapeLeft;
else if (txf.tx == 0 && txf.ty == size.width)
return UIInterfaceOrientationPortraitUpsideDown;
else
return UIInterfaceOrientationPortrait;
}

Add GIF watermark on a video in iOS

I need to accomplish this function: There is a GIF overlay on a video, hoping to composition this video and GIF to a new video. I'm using the following code, but result is only the video without GIF:
- (void)mixVideoAsset:(AVAsset *)videoAsset {
LLog(#"Begining");
NSDate * begin = [NSDate date];
// 2 - Create AVMutableComposition object. This object will hold your AVMutableCompositionTrack instances.
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
// 3 - Video track
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
[videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
// - Audio
AVMutableCompositionTrack *audioCompositionTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *audioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[audioCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioTrack.timeRange.duration) ofTrack:audioTrack atTime:kCMTimeZero error:nil];
// 3.1 - Create AVMutableVideoCompositionInstruction
AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
// 3.2 - Create an AVMutableVideoCompositionLayerInstruction for the video track and fix the orientation.
AVMutableVideoCompositionLayerInstruction *videolayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
AVAssetTrack *videoAssetTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation videoAssetOrientation_ = UIImageOrientationUp;
BOOL isVideoAssetPortrait_ = NO;
CGAffineTransform videoTransform = videoAssetTrack.preferredTransform;
if (videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationRight;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationLeft;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) {
videoAssetOrientation_ = UIImageOrientationUp;
}
if (videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) {
videoAssetOrientation_ = UIImageOrientationDown;
}
[videolayerInstruction setTransform:videoAssetTrack.preferredTransform atTime:kCMTimeZero];
[videolayerInstruction setOpacity:0.0 atTime:videoAsset.duration];
// 3.3 - Add instructions
mainInstruction.layerInstructions = [NSArray arrayWithObjects:videolayerInstruction,nil];
AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
CGSize naturalSize;
if(isVideoAssetPortrait_){
naturalSize = CGSizeMake(videoAssetTrack.naturalSize.height, videoAssetTrack.naturalSize.width);
} else {
naturalSize = videoAssetTrack.naturalSize;
}
float renderWidth, renderHeight;
renderWidth = naturalSize.width;
renderHeight = naturalSize.height;
mainCompositionInst.renderSize = CGSizeMake(renderWidth, renderHeight);
mainCompositionInst.instructions = [NSArray arrayWithObject:mainInstruction];
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
// Watermark Layers
[self applyVideoEffectsToComposition:mainCompositionInst size:naturalSize];
// 4 - Get path
// NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
// NSString *documentsDirectory = [paths objectAtIndex:0];
// NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:
// [NSString stringWithFormat:#"FinalVideo-%d.mov",arc4random() % 1000]];
// NSURL *url = [NSURL fileURLWithPath:myPathDocs];
NSURL * url = TempVideoURL();
// 5 - Create exporter
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeMPEG4;
exporter.shouldOptimizeForNetworkUse = YES;
exporter.videoComposition = mainCompositionInst;
[exporter exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
NSDate * endDate = [NSDate date];
NSTimeInterval interval = [endDate timeIntervalSinceDate:begin];
LLog(#"completed %f senconds",interval);
ALAssetsLibrary *assetsLibrary = [[ALAssetsLibrary alloc] init];
if ([assetsLibrary videoAtPathIsCompatibleWithSavedPhotosAlbum:exporter.outputURL]) {
[assetsLibrary writeVideoAtPathToSavedPhotosAlbum:exporter.outputURL completionBlock:NULL];
}
});
}];
}
Add Gif Watermark
- (void)applyVideoEffectsToComposition:(AVMutableVideoComposition *)composition size:(CGSize)size
{
// - set up the parent layer
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, size.width, size.height);
videoLayer.frame = CGRectMake(0, 0, size.width, size.height);
[parentLayer addSublayer:videoLayer];
size.width = 100;
size.height = 100;
// - set up the overlay
CALayer *overlayLayer = [CALayer layer];
overlayLayer.frame = CGRectMake(0, 100, size.width, size.height);
NSURL *fileUrl = [[NSBundle mainBundle] URLForResource:#"jiafei" withExtension:#"gif"];
[BBGifManager startGifAnimationWithURL:fileUrl inLayer:overlayLayer];
// UIImage * image = [UIImage imageNamed:#"gifImage.gif"];
// [overlayLayer setContents:(id)[image CGImage]];
// [overlayLayer setMasksToBounds:YES];
[parentLayer addSublayer:overlayLayer];
// - apply magic
composition.animationTool = [AVVideoCompositionCoreAnimationTool
videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
}
Add CALayer Animations
+ (void)startGifAnimationWithURL:(NSURL *)url inLayer:(CALayer *)layer {
CAKeyframeAnimation * animation = [self animationForGifWithURL:url];
[layer addAnimation:animation forKey:#"contents"];
}
Create CAKeyFrameAnimation
+ (CAKeyframeAnimation *)animationForGifWithURL:(NSURL *)url {
CAKeyframeAnimation *animation = [CAKeyframeAnimation animationWithKeyPath:#"contents"];
NSMutableArray * frames = [NSMutableArray new];
NSMutableArray *delayTimes = [NSMutableArray new];
CGFloat totalTime = 0.0;
CGFloat gifWidth;
CGFloat gifHeight;
CGImageSourceRef gifSource = CGImageSourceCreateWithURL((CFURLRef)url, NULL);
// get frame count
size_t frameCount = CGImageSourceGetCount(gifSource);
for (size_t i = 0; i < frameCount; ++i) {
// get each frame
CGImageRef frame = CGImageSourceCreateImageAtIndex(gifSource, i, NULL);
[frames addObject:(__bridge id)frame];
CGImageRelease(frame);
// get gif info with each frame
NSDictionary *dict = (NSDictionary*)CFBridgingRelease(CGImageSourceCopyPropertiesAtIndex(gifSource, i, NULL));
NSLog(#"kCGImagePropertyGIFDictionary %#", [dict valueForKey:(NSString*)kCGImagePropertyGIFDictionary]);
// get gif size
gifWidth = [[dict valueForKey:(NSString*)kCGImagePropertyPixelWidth] floatValue];
gifHeight = [[dict valueForKey:(NSString*)kCGImagePropertyPixelHeight] floatValue];
// kCGImagePropertyGIFDictionary中kCGImagePropertyGIFDelayTime,kCGImagePropertyGIFUnclampedDelayTime值是一样的
NSDictionary *gifDict = [dict valueForKey:(NSString*)kCGImagePropertyGIFDictionary];
[delayTimes addObject:[gifDict valueForKey:(NSString*)kCGImagePropertyGIFDelayTime]];
totalTime = totalTime + [[gifDict valueForKey:(NSString*)kCGImagePropertyGIFDelayTime] floatValue];
CFRelease((__bridge CFTypeRef)(dict));
}
if (gifSource) {
CFRelease(gifSource);
}
NSMutableArray *times = [NSMutableArray arrayWithCapacity:3];
CGFloat currentTime = 0;
NSInteger count = delayTimes.count;
for (int i = 0; i < count; ++i) {
[times addObject:[NSNumber numberWithFloat:(currentTime / totalTime)]];
currentTime += [[delayTimes objectAtIndex:i] floatValue];
}
NSMutableArray *images = [NSMutableArray arrayWithCapacity:3];
for (int i = 0; i < count; ++i) {
[images addObject:[frames objectAtIndex:i]];
}
animation.keyTimes = times;
animation.values = images;
animation.timingFunction = [CAMediaTimingFunction functionWithName:kCAMediaTimingFunctionLinear];
animation.duration = totalTime;
animation.repeatCount = HUGE_VALF;
return animation;
}
You should adjust your animation settings for CoreAnimation:
animation.beginTime = AVCoreAnimationBeginTimeAtZero;
animation.removedOnCompletion = NO;
Just in case here's the example on swift3 how to do the same - insert animated frames/images into the video (not exactly the gif but array of Images). It uses AVAssetExportSession and AVMutableVideoComposition together with AVMutableVideoCompositionInstruction, and CAKeyframeAnimation to animate the frames.

How to record video with overlay view

Hi I am trying to record video with overlay.
I have written:
-(void)addOvelayViewToVideo:(NSURL *)videoURL
to add overlay view on recorded video but it is not working.
I written the code to record video in viewDidLoad using AVCaptureSession.
//In ViewDidLoad
//CONFIGURE DISPLAY OUTPUT
self.previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession];
[self.previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
self.previewLayer.frame = self.view.frame;
[self.view.layer addSublayer:self.previewLayer];
-(void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error
{
if(error.code != noErr)
{
id value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey];
if(value)
{
isSuccess = [value boolValue];
}
}
if(isSuccess)
{
ALAssetsLibrary *assetsLibrary = [[ALAssetsLibrary alloc] init];
if([assetsLibrary videoAtPathIsCompatibleWithSavedPhotosAlbum:outputFileURL])
{
[self addOverviewToVideo:outputFileURL];
}
else{
NSLog(#"could not saved to photos album.");
}
}
}
-(void)addOvelayViewToVideo:(NSURL *)videoURL
{
AVAsset *asset = [AVAsset assetWithURL:videoURL];
AVMutableComposition *composition = [[AVMutableComposition alloc] init];
AVMutableCompositionTrack *compositionTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:[[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionInstruction *compositionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
compositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
AVMutableVideoCompositionLayerInstruction *videoLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionTrack];
AVAssetTrack *assetTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation videoAssetOrientation_ = UIImageOrientationUp;
BOOL isVideoAssetPortrait_ = NO;
CGAffineTransform videoTransform = assetTrack.preferredTransform;
if (videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationRight;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationLeft;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) {
videoAssetOrientation_ = UIImageOrientationUp;
}
if (videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) {
videoAssetOrientation_ = UIImageOrientationDown;
}
[videoLayerInstruction setTransform:assetTrack.preferredTransform atTime:kCMTimeZero];
[videoLayerInstruction setOpacity:0.0 atTime:asset.duration];
compositionInstruction.layerInstructions = [NSArray arrayWithObject:videoLayerInstruction];
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
CGSize naturalSize = CGSizeMake(assetTrack.naturalSize.height, assetTrack.naturalSize.width);
float renderWidth, renderHeight;
renderWidth = naturalSize.width;
renderHeight = naturalSize.height;
videoComposition.renderSize = CGSizeMake(renderWidth, renderHeight);
videoComposition.instructions = [NSArray arrayWithObject:compositionInstruction];
videoComposition.frameDuration = CMTimeMake(1, 30);
CALayer *overlayLayer = [CALayer layer];
UIImage *overlayImage = [UIImage imageNamed:#"sampleHUD"];
[overlayLayer setContents:(id)[overlayImage CGImage]];
overlayLayer.frame = CGRectMake(0, 0, naturalSize.width, naturalSize.height);
[overlayLayer setMasksToBounds:YES];
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, naturalSize.width, naturalSize.height);
videoLayer.frame = CGRectMake(0, 0, naturalSize.width, naturalSize.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:overlayLayer];
videoComposition.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
NSLog(#"renderSize:%f,%f", videoComposition.renderSize.width, videoComposition.renderSize.height);
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetHighestQuality];
exportSession.outputURL = videoURL;
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
exportSession.shouldOptimizeForNetworkUse = YES;
exportSession.videoComposition = videoComposition;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
//save the video in photos album
});
}];
}
I am still unable to figure out what is going wrong here. Need guidance on this.
Can I add overlay while recording video?
Any help will be appreciated.

Cropping AVAsset video with AVFoundation

I am using AVCaptureMovieFileOutput to record some video. I have the preview layer displayed using AVLayerVideoGravityResizeAspectFill which zooms in slightly. The problem I have is that the final video is larger, containing extra image that didn't fit on the screen during preview.
This is the preview and resulting video
Is there a way I can specify a CGRect that I want to cut from the video using AVAssetExportSession?
EDIT ----
When I apply a CGAffineTransformScale to the AVAssetTrack it zooms into the video, and with the AVMutableVideoComposition renderSize set to view.bounds it crops off the ends. Great, there's just 1 problem left. The width of the video does not stretch to the correct width, it just gets filled with black.
EDIT 2 ----
The suggested question/answer is incomplete..
Some of my code:
In my - (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error method I have this to crop and resize the video.
- (void)flipAndSave:(NSURL *)videoURL withCompletionBlock:(void(^)(NSURL *returnURL))completionBlock
{
AVURLAsset *firstAsset = [AVURLAsset assetWithURL:videoURL];
// 1 - Create AVMutableComposition object. This object will hold your AVMutableCompositionTrack instances.
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
// 2 - Video track
AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
[firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration)
ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
// 2.1 - Create AVMutableVideoCompositionInstruction
AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
mainInstruction.timeRange = CMTimeRangeMake(CMTimeMakeWithSeconds(0, 600), firstAsset.duration);
// 2.2 - Create an AVMutableVideoCompositionLayerInstruction for the first track
AVMutableVideoCompositionLayerInstruction *firstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack];
AVAssetTrack *firstAssetTrack = [[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation firstAssetOrientation_ = UIImageOrientationUp;
BOOL isFirstAssetPortrait_ = NO;
CGAffineTransform firstTransform = firstAssetTrack.preferredTransform;
if (firstTransform.a == 0 && firstTransform.b == 1.0 && firstTransform.c == -1.0 && firstTransform.d == 0) {
firstAssetOrientation_ = UIImageOrientationRight;
isFirstAssetPortrait_ = YES;
}
if (firstTransform.a == 0 && firstTransform.b == -1.0 && firstTransform.c == 1.0 && firstTransform.d == 0) {
firstAssetOrientation_ = UIImageOrientationLeft;
isFirstAssetPortrait_ = YES;
}
if (firstTransform.a == 1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == 1.0) {
firstAssetOrientation_ = UIImageOrientationUp;
}
if (firstTransform.a == -1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == -1.0) {
firstAssetOrientation_ = UIImageOrientationDown;
}
// [firstlayerInstruction setTransform:firstAssetTrack.preferredTransform atTime:kCMTimeZero];
// [firstlayerInstruction setCropRectangle:self.view.bounds atTime:kCMTimeZero];
CGFloat scale = [self getScaleFromAsset:firstAssetTrack];
firstTransform = CGAffineTransformScale(firstTransform, scale, scale);
[firstlayerInstruction setTransform:firstTransform atTime:kCMTimeZero];
// 2.4 - Add instructions
mainInstruction.layerInstructions = [NSArray arrayWithObjects:firstlayerInstruction,nil];
AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
mainCompositionInst.instructions = [NSArray arrayWithObject:mainInstruction];
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
// CGSize videoSize = firstAssetTrack.naturalSize;
CGSize videoSize = self.view.bounds.size;
BOOL isPortrait_ = [self isVideoPortrait:firstAsset];
if(isPortrait_) {
videoSize = CGSizeMake(videoSize.height, videoSize.width);
}
NSLog(#"%#", NSStringFromCGSize(videoSize));
mainCompositionInst.renderSize = videoSize;
// 3 - Audio track
AVMutableCompositionTrack *AudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
[AudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration)
ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:nil];
// 4 - Get path
NSString *outputPath = [[NSString alloc] initWithFormat:#"%#%#", NSTemporaryDirectory(), #"cutoutput.mov"];
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];
NSFileManager *manager = [[NSFileManager alloc] init];
if ([manager fileExistsAtPath:outputPath])
{
[manager removeItemAtPath:outputPath error:nil];
}
// 5 - Create exporter
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=outputURL;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.shouldOptimizeForNetworkUse = YES;
exporter.videoComposition = mainCompositionInst;
[exporter exportAsynchronouslyWithCompletionHandler:^{
switch ([exporter status])
{
case AVAssetExportSessionStatusFailed:
NSLog(#"Export failed: %# : %#", [[exporter error] localizedDescription], [exporter error]);
completionBlock(nil);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export canceled");
completionBlock(nil);
break;
default: {
NSURL *outputURL = exporter.outputURL;
dispatch_async(dispatch_get_main_queue(), ^{
completionBlock(outputURL);
});
break;
}
}
}];
}
Here is my interpretation of your question: You are capturing video on a device with a screen ratio of 4:3, thus your AVCaptureVideoPreviewLayer is 4:3, but the video input device captures video in 16:9 so the resulting video is 'larger' than seen in the preview.
If you are simply looking to crop the extra pixels not caught by the preview then check out this http://www.netwalk.be/article/record-square-video-ios. This article shows how to crop the video into a square. However you'll only need a few modifications to crop to 4:3. I've gone and tested this, here are the changes I made:
Once you have the AVAssetTrack for the video you will need to calculate a new height.
// we convert the captured height i.e. 1080 to a 4:3 screen ratio and get the new height
CGFloat newHeight = clipVideoTrack.naturalSize.height/3*4;
Then modify these two lines, using newHeight.
videoComposition.renderSize = CGSizeMake(clipVideoTrack.naturalSize.height, newHeight);
CGAffineTransform t1 = CGAffineTransformMakeTranslation(clipVideoTrack.naturalSize.height, -(clipVideoTrack.naturalSize.width - newHeight)/2 );
So what we've done here is set the renderSize to a 4:3 ratio - the exact dimension are based on the input device. We then use a CGAffineTransform to translate the video position so that what we saw in the AVCaptureVideoPreviewLayer is what is rendered to our file.
Edit: If you want to put it all together and crop a video based on the device's screen ratio (3:2, 4:3, 16:9) and take the video orientation into mind we need to add a few things.
First here is the modified sample code with a few critical alterations:
// output file
NSString* docFolder = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) lastObject];
NSString* outputPath = [docFolder stringByAppendingPathComponent:#"output2.mov"];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputPath])
[[NSFileManager defaultManager] removeItemAtPath:outputPath error:nil];
// input file
AVAsset* asset = [AVAsset assetWithURL:outputFileURL];
AVMutableComposition *composition = [AVMutableComposition composition];
[composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
// input clip
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
// crop clip to screen ratio
UIInterfaceOrientation orientation = [self orientationForTrack:asset];
BOOL isPortrait = (orientation == UIInterfaceOrientationPortrait || orientation == UIInterfaceOrientationPortraitUpsideDown) ? YES: NO;
CGFloat complimentSize = [self getComplimentSize:videoTrack.naturalSize.height];
CGSize videoSize;
if(isPortrait) {
videoSize = CGSizeMake(videoTrack.naturalSize.height, complimentSize);
} else {
videoSize = CGSizeMake(complimentSize, videoTrack.naturalSize.height);
}
AVMutableVideoComposition* videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.renderSize = videoSize;
videoComposition.frameDuration = CMTimeMake(1, 30);
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(60, 30) );
// rotate and position video
AVMutableVideoCompositionLayerInstruction* transformer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
CGFloat tx = (videoTrack.naturalSize.width-complimentSize)/2;
if (orientation == UIInterfaceOrientationPortrait || orientation == UIInterfaceOrientationLandscapeRight) {
// invert translation
tx *= -1;
}
// t1: rotate and position video since it may have been cropped to screen ratio
CGAffineTransform t1 = CGAffineTransformTranslate(videoTrack.preferredTransform, tx, 0);
// t2/t3: mirror video horizontally
CGAffineTransform t2 = CGAffineTransformTranslate(t1, isPortrait?0:videoTrack.naturalSize.width, isPortrait?videoTrack.naturalSize.height:0);
CGAffineTransform t3 = CGAffineTransformScale(t2, isPortrait?1:-1, isPortrait?-1:1);
[transformer setTransform:t3 atTime:kCMTimeZero];
instruction.layerInstructions = [NSArray arrayWithObject: transformer];
videoComposition.instructions = [NSArray arrayWithObject: instruction];
// export
exporter = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetHighestQuality] ;
exporter.videoComposition = videoComposition;
exporter.outputURL=[NSURL fileURLWithPath:outputPath];
exporter.outputFileType=AVFileTypeQuickTimeMovie;
[exporter exportAsynchronouslyWithCompletionHandler:^(void){
NSLog(#"Exporting done!");
// added export to library for testing
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:[NSURL fileURLWithPath:outputPath]]) {
[library writeVideoAtPathToSavedPhotosAlbum:[NSURL fileURLWithPath:outputPath]
completionBlock:^(NSURL *assetURL, NSError *error) {
NSLog(#"Saved to album");
if (error) {
}
}];
}
}];
What we added here is a call to get the new render size of the video based on cropping its dimensions to the screen ratio. Once we crop the size down, we need to translate the position to recenter the video. So we grab its orientation to move it in the proper direction. This will fix the off-center issue we saw with UIInterfaceOrientationLandscapeLeft. Finally CGAffineTransform t2, t3 mirror the video horizontally.
And here are the two new methods that make this happen:
- (CGFloat)getComplimentSize:(CGFloat)size {
CGRect screenRect = [[UIScreen mainScreen] bounds];
CGFloat ratio = screenRect.size.height / screenRect.size.width;
// we have to adjust the ratio for 16:9 screens
if (ratio == 1.775) ratio = 1.77777777777778;
return size * ratio;
}
- (UIInterfaceOrientation)orientationForTrack:(AVAsset *)asset {
UIInterfaceOrientation orientation = UIInterfaceOrientationPortrait;
NSArray *tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
if([tracks count] > 0) {
AVAssetTrack *videoTrack = [tracks objectAtIndex:0];
CGAffineTransform t = videoTrack.preferredTransform;
// Portrait
if(t.a == 0 && t.b == 1.0 && t.c == -1.0 && t.d == 0) {
orientation = UIInterfaceOrientationPortrait;
}
// PortraitUpsideDown
if(t.a == 0 && t.b == -1.0 && t.c == 1.0 && t.d == 0) {
orientation = UIInterfaceOrientationPortraitUpsideDown;
}
// LandscapeRight
if(t.a == 1.0 && t.b == 0 && t.c == 0 && t.d == 1.0) {
orientation = UIInterfaceOrientationLandscapeRight;
}
// LandscapeLeft
if(t.a == -1.0 && t.b == 0 && t.c == 0 && t.d == -1.0) {
orientation = UIInterfaceOrientationLandscapeLeft;
}
}
return orientation;
}
These are pretty straight forward. The only thing to note is that in the getComplimentSize: method we have to manually adjust the ratio for 16:9 since the iPhone5+ resolution is mathematically shy of true 16:9.
AVCaptureVideoDataOutput is a concrete sub-class of AVCaptureOutput you use to process uncompressed frames from the video being captured, or to access compressed frames.
An instance of AVCaptureVideoDataOutput produces video frames you can process using other media APIs. You can access the frames with the captureOutput:didOutputSampleBuffer:fromConnection: delegate method.
Configuring a Session
You use a preset on the session to specify the image quality and resolution you want. A preset is a constant that identifies one of a number of possible configurations; in some cases the actual configuration is device-specific:
https://developer.apple.com/library/mac/documentation/AudioVideo/Conceptual/AVFoundationPG/Articles/04_MediaCapture.html
the actual values these presets represent for various devices, see “Saving to a Movie File” and “Capturing Still Images.”
If you want to set a size-specific configuration, you should check whether it is supported before setting it:
if ([session canSetSessionPreset:AVCaptureSessionPreset1280x720]) {
session.sessionPreset = AVCaptureSessionPreset1280x720;
}
else {
// Handle the failure.
}

Resources