AVAssetExportSession exportAsynchronouslyWithCompletionHandler not work sometimes - ios

I want to export and compress a video from iPod-Library,but the "exportAsynchronouslyWithCompletionHandler" can work correctly for some video,and not work for some video.it's not work and no throw exception.
But it is strange that if I comment out the method "setVideoComposition:",the "exportAsynchronouslyWithCompletionHandler" can work normally.
Here is my code:
AVAsset *_videoAsset = [AVAsset assetWithURL:[NSURL URLWithString:filmElementModel.alassetUrl]];
CMTime assetTime = [_videoAsset duration];
AVAssetTrack *avAssetTrack = [[_videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
Float64 duration = CMTimeGetSeconds(assetTime);
AVMutableComposition *avMutableComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *avMutableCompositionTrack = [avMutableComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
NSError *error = nil;
[avMutableCompositionTrack insertTimeRange:CMTimeRangeMake(CMTimeMakeWithSeconds(0.0f, 30), CMTimeMakeWithSeconds(duration>8.0f?8.0f:duration, 30))
ofTrack:avAssetTrack
atTime:kCMTimeZero
error:&error];
AVMutableVideoComposition *avMutableVideoComposition = [AVMutableVideoComposition videoComposition];
avMutableVideoComposition.frameDuration = CMTimeMake(1, 30);
AVMutableVideoCompositionLayerInstruction *layerInstruciton = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:avMutableComposition.tracks[0]];
[layerInstruciton setTransform:[[[_videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] preferredTransform] atTime:kCMTimeZero];
[layerInstruciton setOpacity:0.0f atTime:[_videoAsset duration]];
AVMutableVideoCompositionInstruction *avMutableVideoCompositionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
[avMutableVideoCompositionInstruction setTimeRange:CMTimeRangeMake(kCMTimeZero, [avMutableComposition duration])];
avMutableVideoCompositionInstruction.layerInstructions = [NSArray arrayWithObject:layerInstruciton];
if (avAssetTrack.preferredTransform.a) {
NSLog(#"横向");
avMutableVideoComposition.renderSize = CGSizeMake(avAssetTrack.naturalSize.width, avAssetTrack.naturalSize.height);
}else
{
avMutableVideoComposition.renderSize = CGSizeMake(avAssetTrack.naturalSize.height, avAssetTrack.naturalSize.width);
}
avMutableVideoComposition.instructions = [NSArray arrayWithObject:avMutableVideoCompositionInstruction];
// the url for save video
NSString *outUrlString = ITTPathForBabyShotResource([NSString stringWithFormat:#"%#/%#.mp4",DATA_ENV.userModel.userId,filmElementModel.filmElementId]);
NSFileManager *fm = [[NSFileManager alloc] init];
if ([fm fileExistsAtPath:outUrlString]) {
NSLog(#"video is have. then delete that");
if ([fm removeItemAtPath:outUrlString error:&error]) {
NSLog(#"delete is ok");
}else {
NSLog(#"delete is no error = %#",error.description);
}
}
CGSize renderSize = CGSizeMake(1280, 720);
if (MIN(avAssetTrack.naturalSize.width, avAssetTrack.naturalSize.height)<720) {
renderSize =avAssetTrack.naturalSize;
}
long long fileLimite =renderSize.width*renderSize.height*(duration>8.0f?8.0f:duration)/2;
_avAssetExportSession = [[AVAssetExportSession alloc] initWithAsset:avMutableComposition presetName:AVAssetExportPreset1280x720];
[_avAssetExportSession setVideoComposition:avMutableVideoComposition];
[_avAssetExportSession setOutputURL:[NSURL fileURLWithPath:outUrlString]];
[_avAssetExportSession setOutputFileType:AVFileTypeQuickTimeMovie];
[_avAssetExportSession setFileLengthLimit: fileLimite];
[_avAssetExportSession setShouldOptimizeForNetworkUse:YES];
[_avAssetExportSession exportAsynchronouslyWithCompletionHandler:^(void){
switch (_avAssetExportSession.status) {
case AVAssetExportSessionStatusFailed:
{
}
break;
case AVAssetExportSessionStatusCompleted:
{
}
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"export cancelled");
break;
case AVAssetExportSessionStatusExporting:
NSLog(#"AVAssetExportSessionStatusExporting");
break;
case AVAssetExportSessionStatusWaiting:
NSLog(#"AVAssetExportSessionStatusWaiting");
break;
}
}];
if (_avAssetExportSession.status != AVAssetExportSessionStatusCompleted){
NSLog(#"Retry export");
}

I solved this problem!
[avMutableCompositionTrack insertTimeRange:CMTimeRangeMake(CMTimeMakeWithSeconds(0.0f, 30), CMTimeMakeWithSeconds(duration>8.0f?8.0f:duration, 30))
ofTrack:avAssetTrack
atTime:kCMTimeZero
error:&error];
I replace the CMTimeRangeMake(CMTimeMakeWithSeconds(0.0f, 30) with CMTimeRangeMake(CMTimeMakeWithSeconds(0.1f, 30).
But I don't know why it can Work properly。

Related

Video Orientation Issue

I am actually working with the AVFoundation framework.
I am trying to play two videos simultaneously, one with filter and other the normal video.
I have done the whole task. I'm just stuck on setting the orientation at the end while saving entire video in gallery.
I have already tried a lot but could not get any thing fruitful.
Please help me in setting the orientations.
//Save action
#IBAction func saveAction(_ sender: UIButton) {
HelperClass.shared().applyFilter(self.firstAsset, andSecondAsset:self.secondAsset, onviewController:self, andcompos:self.composition, completion: { (value,error,url) in
if(url != nil){
print("url",url!)
}
})
MBProgressHUD.showAdded(to:self.view, animated: true)
}
#pragma mark - Overlay task is happening here,filtered video that I am getting from applyfilter method is overlayed with foregroundvideo
-(void)saveVideosToPhotoAlbum:(AVURLAsset*)filteredasset andOriginalAsset:(AVURLAsset*)origAsset onViewController:(UIViewController*)vc{
AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];
//----first track---//
AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, origAsset.duration) ofTrack:[[origAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
//----second track---//
AVMutableCompositionTrack *secondTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[secondTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, filteredasset.duration) ofTrack:[[filteredasset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionInstruction * MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, origAsset.duration);
//----FirstlayerInstruction---//
AVMutableVideoCompositionLayerInstruction *FirstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack];
CGAffineTransform Scale = CGAffineTransformMakeScale(0.8f,1.0f);
CGAffineTransform Move = CGAffineTransformMakeTranslation(40,0);
[FirstlayerInstruction setTransform:CGAffineTransformConcat(Scale,Move) atTime:kCMTimeZero];
//----FirstlayerInstruction---//
AVMutableVideoCompositionLayerInstruction *SecondlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:secondTrack];
//
CGAffineTransform SecondScale = CGAffineTransformMakeScale(1.5f,1.5f);
CGAffineTransform SecondMove = CGAffineTransformMakeTranslation(0,0);;
[SecondlayerInstruction setTransform:CGAffineTransformConcat(SecondScale,SecondMove) atTime:kCMTimeZero];
MainInstruction.layerInstructions = [NSArray arrayWithObjects:FirstlayerInstruction,SecondlayerInstruction,nil];;
AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];
MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
MainCompositionInst.frameDuration = CMTimeMake(1, 50);
MainCompositionInst.renderSize = CGSizeMake(firstTrack.naturalSize.width, firstTrack.naturalSize.height);
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:#"overlapVideotest.mov"];
if([[NSFileManager defaultManager] fileExistsAtPath:myPathDocs])
{
[[NSFileManager defaultManager] removeItemAtPath:myPathDocs error:nil];
}
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPreset1280x720];
exporter.outputURL=url;
[exporter setVideoComposition:MainCompositionInst];
exporter.outputFileType = AVFileTypeQuickTimeMovie;
[exporter exportAsynchronouslyWithCompletionHandler:^
{
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:exporter onViewController:vc];
});
}];
}
#pragma mark - Saving the final video to photo gallery here
- (void)exportDidFinish:(AVAssetExportSession*)session onViewController:(UIViewController*)vc {
if(session.status == AVAssetExportSessionStatusCompleted) {
NSURL *outputURL = session.outputURL;
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputURL]) {
[library writeVideoAtPathToSavedPhotosAlbum:outputURL
completionBlock:^(NSURL *assetURL, NSError *error) {
dispatch_async(dispatch_get_main_queue(), ^{
[MBProgressHUD hideHUDForView:vc.view animated:true];
if (error) {
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:#"Error" message:#"Video Saving Failed" delegate:nil cancelButtonTitle:#"Ok" otherButtonTitles: nil, nil];
[alert show];
} else {
UIAlertView *alert = [[UIAlertView alloc]
initWithTitle:#"Video Saved" message:#"Saved To Photo Album" delegate:self cancelButtonTitle:#"Ok" otherButtonTitles: nil];
[alert show];
// [self loadMoviePlayer:outputURL];
}
});
}];
}
}else{
[MBProgressHUD hideHUDForView:vc.view animated:true];
NSLog(#"found an issue %#",session.error);
}
}
You will have to play with the transformation of input video track. First, you have to get input videotrack orientation and find relative transformation according to it for your AVMutableCompositionLayerInstruction.
Here is the proper way to do it. I mimic your saveVideo method with my own code base. Also i changed render size to 720 x 720 to make it look better for all different resolution videos.
Take orientation and zoomOrientation object in .h file to use it globally in .m file. Like AVCaptureVideoOrientation zoomOrientation, AVCaptureVideoOrientation orientation.
-(void)saveVideosToPhotoAlbum:(AVURLAsset*)filteredasset andOriginalAsset:(AVURLAsset*)origAsset onViewController:(UIViewController*)vc{
AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];
//----first track---//
AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, origAsset.duration) ofTrack:[[origAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
//----second track---//
AVMutableCompositionTrack *secondTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[secondTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, filteredasset.duration) ofTrack:[[filteredasset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionInstruction * MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, origAsset.duration);
//----FirstlayerInstruction---//
AVMutableVideoCompositionLayerInstruction *FirstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack];
orientation = [self videoOrientation:origAsset];
zoomOrientation = [self videoOrientation:filteredasset];
BOOL isPortrait = NO;
switch (orientation) {
case AVCaptureVideoOrientationLandscapeRight:
isPortrait = NO;
break;
case AVCaptureVideoOrientationLandscapeLeft:
isPortrait = NO;
break;
case AVCaptureVideoOrientationPortrait:
isPortrait = YES;
break;
case AVCaptureVideoOrientationPortraitUpsideDown:
isPortrait = YES;
break;
default:
break;
}
CGSize naturalSize = firstTrack.naturalSize;
if(isPortrait){
naturalSize = CGSizeMake(naturalSize.height,naturalSize.width);
}
BOOL isZoomPortrait = NO;
switch (zoomOrientation) {
case AVCaptureVideoOrientationLandscapeRight:
isZoomPortrait = NO;
break;
case AVCaptureVideoOrientationLandscapeLeft:
isZoomPortrait = NO;
break;
case AVCaptureVideoOrientationPortrait:
isZoomPortrait = YES;
break;
case AVCaptureVideoOrientationPortraitUpsideDown:
isZoomPortrait = YES;
break;
default:
break;
}
CGSize zoomNaturalSize = secondTrack.naturalSize;
if(isZoomPortrait){
zoomNaturalSize = CGSizeMake(zoomNaturalSize.height,zoomNaturalSize.width);
}
CGFloat aspectWidth = 720/naturalSize.width;
CGFloat aspectheight = 720/naturalSize.height;
CGFloat zoomAspectWidth = 720/zoomNaturalSize.width;
CGFloat zoomAspectheight = 720/zoomNaturalSize.height;
CGFloat scale = MIN(aspectWidth, aspectheight);
CGFloat zoomScale = MAX(zoomAspectWidth, zoomAspectheight);
CGAffineTransform transform = [self transformFromOrientationWithVideoSizeWithAspect:naturalSize scale1:scale];
CGAffineTransform zoomTransform = [self zoomTransformFromOrientationWithVideoSizeWithAspect:zoomNaturalSize scale1:zoomScale];
[FirstlayerInstruction setTransform:transform atTime:kCMTimeZero];
//----FirstlayerInstruction---//
AVMutableVideoCompositionLayerInstruction *SecondlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:secondTrack];
[SecondlayerInstruction setTransform:zoomTransform atTime:kCMTimeZero];
MainInstruction.layerInstructions = [NSArray arrayWithObjects:FirstlayerInstruction,SecondlayerInstruction,nil];;
AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];
MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
MainCompositionInst.frameDuration = CMTimeMake(1, 30);
MainCompositionInst.renderSize = CGSizeMake(720, 720);
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:#"overlapVideotest.mov"];
if([[NSFileManager defaultManager] fileExistsAtPath:myPathDocs])
{
[[NSFileManager defaultManager] removeItemAtPath:myPathDocs error:nil];
}
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
[exporter setVideoComposition:MainCompositionInst];
exporter.outputFileType = AVFileTypeQuickTimeMovie;
[exporter exportAsynchronouslyWithCompletionHandler:^
{
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:exporter onViewController:vc];
});
}];
}
- (CGAffineTransform)transformFromOrientationWithVideoSizeWithAspect:(CGSize)naturalSize scale1:(float)scale
{
CGAffineTransform transform = CGAffineTransformIdentity;
transform = CGAffineTransformScale(transform,scale,scale);
CGSize size = CGSizeMake(720.0, 720.0);
switch (orientation) {
case AVCaptureVideoOrientationLandscapeRight:
transform = CGAffineTransformTranslate(transform, (size.width/scale) / 2.0 - naturalSize.width/2.0, (size.height/scale) / 2.0 - naturalSize.height/2.0);
break;
case AVCaptureVideoOrientationLandscapeLeft:
transform = CGAffineTransformTranslate(transform,
naturalSize.width,
naturalSize.height);
transform = CGAffineTransformRotate(transform, M_PI);
if (naturalSize.width > naturalSize.height){
transform = CGAffineTransformTranslate(transform, 0, -((size.height/scale) / 2.0 - naturalSize.height/2.0));
}else{
//transform = transform.translatedBy(x: -((size.width/scale) / 2.0 - naturalSize.width/2.0), y: 0)
transform = CGAffineTransformTranslate(transform, -((size.width/scale) / 2.0 - naturalSize.width/2.0), 0);
}
break;
case AVCaptureVideoOrientationPortrait:
transform = CGAffineTransformTranslate(transform, naturalSize.width, 0);
transform = CGAffineTransformRotate(transform, M_PI_2);
//transform = transform.translatedBy(x: 0, y: -((size.width/scale) / 2.0 - naturalSize.width/2.0))
transform = CGAffineTransformTranslate(transform, 0, -((size.width/scale) / 2.0 - naturalSize.width/2.0));
break;
case AVCaptureVideoOrientationPortraitUpsideDown:
transform = CGAffineTransformTranslate(transform, 0, naturalSize.height);
// transform = CGAffineTransformTranslate(transform, 0, (naturalSize.width - naturalSize.height)/2);
transform = CGAffineTransformRotate(transform, -M_PI_2);
break;
default:
break;
}
return transform;
}
- (CGAffineTransform)zoomTransformFromOrientationWithVideoSizeWithAspect:(CGSize)naturalSize scale1:(float)scale
{
CGAffineTransform transform = CGAffineTransformIdentity;
transform = CGAffineTransformScale(transform,scale,scale);
CGSize size = CGSizeMake(720.0, 720.0);
switch (zoomOrientation) {
case AVCaptureVideoOrientationLandscapeRight:
transform = CGAffineTransformTranslate(transform, (size.width/scale) / 2.0 - naturalSize.width/2.0, (size.height/scale) / 2.0 - naturalSize.height/2.0);
break;
case AVCaptureVideoOrientationLandscapeLeft:
transform = CGAffineTransformTranslate(transform,
naturalSize.width,
naturalSize.height);
transform = CGAffineTransformRotate(transform, M_PI);
if (naturalSize.width > naturalSize.height){
transform = CGAffineTransformTranslate(transform, 0, -((size.height/scale) / 2.0 - naturalSize.height/2.0));
}else{
//transform = transform.translatedBy(x: -((size.width/scale) / 2.0 - naturalSize.width/2.0), y: 0)
transform = CGAffineTransformTranslate(transform, -((size.width/scale) / 2.0 - naturalSize.width/2.0), 0);
}
break;
case AVCaptureVideoOrientationPortrait:
transform = CGAffineTransformTranslate(transform, naturalSize.width, 0);
transform = CGAffineTransformRotate(transform, M_PI_2);
//transform = transform.translatedBy(x: 0, y: -((size.width/scale) / 2.0 - naturalSize.width/2.0))
transform = CGAffineTransformTranslate(transform, 0, -((size.width/scale) / 2.0 - naturalSize.width/2.0));
break;
case AVCaptureVideoOrientationPortraitUpsideDown:
transform = CGAffineTransformTranslate(transform, 0, naturalSize.height);
// transform = CGAffineTransformTranslate(transform, 0, (naturalSize.width - naturalSize.height)/2);
transform = CGAffineTransformRotate(transform, -M_PI_2);
break;
default:
break;
}
return transform;
}
-(AVCaptureVideoOrientation)videoOrientation:(AVAsset *)asset
{
AVCaptureVideoOrientation result = 0;
NSArray *tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
if([tracks count] > 0) {
AVAssetTrack *videoTrack = [tracks objectAtIndex:0];
CGAffineTransform t = videoTrack.preferredTransform;
// Portrait
if(t.a == 0 && t.b == 1.0 && t.c == -1.0 && t.d == 0)
{
result = AVCaptureVideoOrientationPortrait;
}
// PortraitUpsideDown
if(t.a == 0 && t.b == -1.0 && t.c == 1.0 && t.d == 0) {
result = AVCaptureVideoOrientationPortraitUpsideDown;
}
// LandscapeRight
if(t.a == 1.0 && t.b == 0 && t.c == 0 && t.d == 1.0)
{
result = AVCaptureVideoOrientationLandscapeRight;
}
// LandscapeLeft
if(t.a == -1.0 && t.b == 0 && t.c == 0 && t.d == -1.0)
{
result = AVCaptureVideoOrientationLandscapeLeft;
}
}
return result;
}

CGAffineTransform the AVAsset to fit device screen

I have an AVAsset with AVAssetTrack, which have a size (for example - (width = 1920, height = 1080)). What I need - fit this asset into given screen size (for example (width = 320, height = 568)), if asset in landscape - rotate it (90 degrees), if square - add black stripes on top and bottom. I tried this:
- (void)changeAsset:(AVAsset*)asset savetoURL:(NSURL*)toURL withSize:(CGSize)toSize offsetRatioPoint:(CGPoint*)offsetRatioPoint completion:(void (^)(NSURL* in_url, NSError* error))handler
{
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject];
if (!videoTrack)
{
if (handler)
handler(nil, [NSError errorWithDomain:#"com.myapp.error" code:-1 userInfo:#{#"message" : #"there are no video tracks in asset"}]);
return;
}
CGFloat newHeight = [videoTrack naturalSize].height/3*4;
CGFloat newWidth = [videoTrack naturalSize].width/3*4;
const CGFloat videoAspectRatio = newWidth / newHeight;
const CGFloat toSizeAspectRatio = toSize.width / toSize.height;
CGFloat scale = 1.f;
if (videoAspectRatio > toSizeAspectRatio)
{
scale = toSize.height / newHeight;
}
else
{
scale = toSize.width /newWidth;
}
CGAffineTransform scaleTrans = CGAffineTransformMakeScale(scale, scale);
CGAffineTransform translateTrans = CGAffineTransformIdentity;
if (videoAspectRatio > toSizeAspectRatio)
{
if (offsetRatioPoint)
{
const CGFloat dx = offsetRatioPoint->x * newWidth * scale;
translateTrans = CGAffineTransformMakeTranslation(-dx, 0.f);
}
else
{
const CGFloat dx = 0.5f * (newWidth * scale - toSize.width);
translateTrans = CGAffineTransformMakeTranslation(-dx, 0.f);
}
}
else
{
if (offsetRatioPoint)
{
const CGFloat dy = offsetRatioPoint->y * newHeight * scale;
translateTrans = CGAffineTransformMakeTranslation(0.f, -dy);
}
else
{
const CGFloat dy = 0.5f * (newHeight * scale - toSize.height);
translateTrans = CGAffineTransformMakeTranslation(0.f, -dy);
}
}
CGAffineTransform t1 = CGAffineTransformTranslate(translateTrans, toSize.height, -scale*toSize.width);
// Rotate transformation
CGAffineTransform t2 = CGAffineTransformRotate(t1, M_PI_2);
CGAffineTransform finalTrans = CGAffineTransformConcat(scaleTrans, t2);
AVMutableVideoComposition *videoComposition = [[AVMutableVideoComposition alloc] init];
videoComposition.renderSize = toSize;
int32_t frameRate = 30;
videoComposition.frameDuration = CMTimeMake(1, frameRate);
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
[layerInstruction setTransform:finalTrans atTime:kCMTimeZero];
instruction.layerInstructions = #[layerInstruction];
videoComposition.instructions = #[instruction];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPreset640x480];
exporter.videoComposition = videoComposition;
exporter.shouldOptimizeForNetworkUse = YES;
NSString *exportPath = [NSTemporaryDirectory() stringByAppendingPathComponent:kCroppedFileName];
if (toURL)
exportPath = toURL.path;
if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath] == YES)
[[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil];
__block NSURL* outURL = [NSURL fileURLWithPath: exportPath];
exporter.outputURL = outURL;
exporter.outputFileType = AVFileTypeMPEG4;
exporter.timeRange = instruction.timeRange;
NSLog(#"%#", exportPath);
[exporter exportAsynchronouslyWithCompletionHandler:^(void) {
if (!toURL)
{
if ([[NSFileManager defaultManager] fileExistsAtPath:[GlobalConst fullMoviePath]] == YES)
[[NSFileManager defaultManager] removeItemAtPath:[GlobalConst fullMoviePath] error:nil];
NSError *error;
if (![[NSFileManager defaultManager] moveItemAtPath: exportPath toPath:[GlobalConst fullMoviePath] error:&error]) {
NSLog(#"Error %#", error);
}
outURL = [NSURL fileURLWithPath: [GlobalConst fullMoviePath] ];
}
NSLog(#"%#", outURL);
handler(outURL, nil);
}];
}
It's almost do first step - rotate landscape asset, but the result asset is little bit zoomed. Thanks in advance for all advices.
i solve this prblm usrin this code. in this code i rotate landscape to video Portrait . make it square also add image as watermark.
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0), ^(){
// input clip
AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
int videoDimention;
// make it square
UIInterfaceOrientation orientation = [self orientationForTrack:asset];
BOOL isPortrait = (orientation == UIInterfaceOrientationPortrait || orientation == UIInterfaceOrientationPortraitUpsideDown) ? YES: NO;
CGSize videoSize;
NSUserDefaults *userDefault=[NSUserDefaults standardUserDefaults];
if(isPortrait) {
//videoSize = CGSizeMake(complimentSize*.7,clipVideoTrack.naturalSize.height );
videoSize = CGSizeMake(clipVideoTrack.naturalSize.height,clipVideoTrack.naturalSize.width*.7 );
[userDefault setDouble:videoSize.width forKey:VIDEO_WIDTH_OUTPUT];
[userDefault setDouble:videoSize.height forKey:VIDEO_HEIGHT_OUTPUT];
videoDimention=0;// for Portrait
} else {
videoSize = CGSizeMake(clipVideoTrack.naturalSize.width, clipVideoTrack.naturalSize.height);
videoDimention=1;// for Landscape
[userDefault setDouble:videoSize.width forKey:VIDEO_WIDTH_OUTPUT];
[userDefault setDouble:videoSize.height forKey:VIDEO_HEIGHT_OUTPUT];
}
AVMutableVideoComposition* videoComposition = [AVMutableVideoComposition videoComposition];
if([[NSUserDefaults standardUserDefaults] integerForKey:SIZE]==0){
videoComposition.renderSize = CGSizeMake(clipVideoTrack.naturalSize.height , clipVideoTrack.naturalSize.height);
videoDimention=2; // for squre
double height=clipVideoTrack.naturalSize.height;
[userDefault setDouble:height forKey:VIDEO_WIDTH_OUTPUT];
[userDefault setDouble:height forKey:VIDEO_HEIGHT_OUTPUT];
}
else{
videoComposition.renderSize =videoSize;
}
// videoComposition.renderScale=.5;
if([[NSUserDefaults standardUserDefaults] integerForKey:FPS]==0){
videoComposition.frameDuration = CMTimeMake(1, 15);
}
else if ([[NSUserDefaults standardUserDefaults] integerForKey:FPS]==1){
videoComposition.frameDuration = CMTimeMake(1, 20);
}
else if ([[NSUserDefaults standardUserDefaults] integerForKey:FPS]==2){
videoComposition.frameDuration = CMTimeMake(1, 25);
}
else{
videoComposition.frameDuration = CMTimeMake(1, 30);
}
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [asset duration] );;
AVMutableVideoCompositionLayerInstruction* transformer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:clipVideoTrack];
// rotate to portrait
if([self orientationForTrack:asset]==UIInterfaceOrientationPortrait){
CGAffineTransform t1 = CGAffineTransformMakeTranslation(clipVideoTrack.naturalSize.height, -(clipVideoTrack.naturalSize.width - clipVideoTrack.naturalSize.height) /2 );
CGAffineTransform t2 = CGAffineTransformRotate(t1, M_PI_2);
CGAffineTransform finalTransform = t2;
[transformer setTransform:finalTransform atTime:kCMTimeZero];
}
//for water mark
CGSize sizeOfVideo=[asset naturalSize];
//Image of watermark
UIImage *myImage=[UIImage imageNamed:#"watermark"];
CALayer *layerCa = [CALayer layer];
layerCa.contents = (id)myImage.CGImage;
if([[NSUserDefaults standardUserDefaults] integerForKey:SIZE]==0){
layerCa.frame = CGRectMake(videoSize.height-(videoSize.height/6), 0, videoSize.height/6, (videoSize.height/6)/4);
}
else{
layerCa.frame = CGRectMake(videoSize.width-(videoSize.width/6), 0, videoSize.width/6, (videoSize.width/6)/4);
}
// layerCa.frame = CGRectMake(videoSize.width-200, 0, 200, 60);
layerCa.opacity = 1.0;
CALayer *parentLayer=[CALayer layer];
CALayer *videoLayer=[CALayer layer];
parentLayer.frame=CGRectMake(0, 0, videoSize.width, videoSize.height);
videoLayer.frame=CGRectMake(0, 0, videoSize.width, videoSize.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:layerCa];
instruction.layerInstructions = [NSArray arrayWithObject:transformer];
videoComposition.instructions = [NSArray arrayWithObject: instruction];
if([[NSUserDefaults standardUserDefaults] boolForKey:UP_PID]==NO){
videoComposition.animationTool=[AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
}
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetHighestQuality];
// AVMutableComposition *composition = [AVMutableComposition composition];
// [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
// e.g .mov type
exportSession.outputURL = fileURL;
exportSession.videoComposition = videoComposition;
// [exportSession addObserver: forKeyPath:#"progress" options:NSKeyValueObservingOptionNew context:NULL];
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
AVAssetExportSessionStatus status = [exportSession status];
[exportSession exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
VideoEditVC *controller=[[VideoEditVC alloc] init];
controller.isFirst=YES;
controller.videoSize=videoDimention;
[self.navigationController pushViewController:controller animated:YES];
self.delegate=controller;
});
}];
});
you also need to implement this one :
- (UIInterfaceOrientation)orientationForTrack:(AVAsset *)asset
{
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
CGSize size = [videoTrack naturalSize];
CGAffineTransform txf = [videoTrack preferredTransform];
if (size.width == txf.tx && size.height == txf.ty)
return UIInterfaceOrientationLandscapeRight;
else if (txf.tx == 0 && txf.ty == 0)
return UIInterfaceOrientationLandscapeLeft;
else if (txf.tx == 0 && txf.ty == size.width)
return UIInterfaceOrientationPortraitUpsideDown;
else
return UIInterfaceOrientationPortrait;
}

Square video orientation won't change from landscape to portrait using AVAsset/AVCaptureSession

I am trying to create a square video using AVCaptureSession and I am successfully capture video but issue is that if my device is portrait mode and I am capture video then its Orientation record correct but if my device is landscape and I capture video I wanted to make this video orientation change to portrait. Following code I use for crop a video after capture:
-(void)cropView:(NSURL*)outputfile
{
AVAsset *asset = [AVAsset assetWithURL:outputfile];
AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableVideoComposition* videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.frameDuration = CMTimeMake(1, 30);
videoComposition.renderSize =CGSizeMake(clipVideoTrack.naturalSize.height, clipVideoTrack.naturalSize.height);
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(60, 30));
// rotate to portrait
AVMutableVideoCompositionLayerInstruction* transformer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:clipVideoTrack];
CGAffineTransform t1 = CGAffineTransformMakeTranslation(clipVideoTrack.naturalSize.height, -(clipVideoTrack.naturalSize.width - clipVideoTrack.naturalSize.height) /2 );
CGAffineTransform t2 = CGAffineTransformRotate(t1, M_PI_2);
CGAffineTransform finalTransform = t2;
[transformer setTransform:finalTransform atTime:kCMTimeZero];
instruction.layerInstructions = [NSArray arrayWithObject:transformer];
videoComposition.instructions = [NSArray arrayWithObject: instruction];
NSString *outputPath = [NSString stringWithFormat:#"%#%#", NSTemporaryDirectory(), #"video.mp4"];
NSURL *exportUrl = [NSURL fileURLWithPath:outputPath];
[[NSFileManager defaultManager] removeItemAtURL:exportUrl error:nil];
//Export
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetHighestQuality] ;
exporter.videoComposition = videoComposition;
exporter.outputURL = exportUrl;
exporter.outputFileType = AVFileTypeMPEG4;
[exporter exportAsynchronouslyWithCompletionHandler:^
{
dispatch_async(dispatch_get_main_queue(), ^{
//Call when finished
[self exportDidFinish:exporter];
});
}];
}
I just fix the issue by using follow code and steps:
First my device orientation is lock and my app is only support portrait orientation so i think i get only portrait orientation however i capture video by landscape mode so using Core-motion i get device orientation using following code
#import <CoreMotion/CoreMotion.h>
#interface ViewController ()
{
AVCaptureVideoOrientation orientationLast, orientationAfterProcess;
CMMotionManager *motionManager;
}
#implementation ViewController
- (void)initializeMotionManager{
motionManager = [[CMMotionManager alloc] init];
motionManager.accelerometerUpdateInterval = .2;
motionManager.gyroUpdateInterval = .2;
[motionManager startAccelerometerUpdatesToQueue:[NSOperationQueue currentQueue]
withHandler:^(CMAccelerometerData *accelerometerData, NSError *error) {
if (!error) {
[self outputAccelertionData:accelerometerData.acceleration];
}
else{
NSLog(#"%#", error);
}
}];
}
- (void)outputAccelertionData:(CMAcceleration)acceleration{
AVCaptureVideoOrientation orientationNew;
if (acceleration.x >= 0.75) {
orientationNew = AVCaptureVideoOrientationLandscapeLeft;
}
else if (acceleration.x <= -0.75) {
orientationNew =AVCaptureVideoOrientationLandscapeRight;
}
else if (acceleration.y <= -0.75) {
orientationNew =AVCaptureVideoOrientationPortrait;
}
else if (acceleration.y >= 0.75) {
orientationNew =AVCaptureVideoOrientationPortraitUpsideDown;
}
else {
// Consider same as last time
return;
}
if (orientationNew == orientationLast)
return;
orientationLast = orientationNew;
}
so based on device rotation that orientationLast update device orientation. after that when i tap button for record video i set the AVCaptureConnection orientation.
AVCaptureConnection *CaptureConnection = [MovieFileOutput connectionWithMediaType:AVMediaTypeVideo];
if ([CaptureConnection isVideoOrientationSupported])
{
[CaptureConnection setVideoOrientation:orientationLast];
}
Now after capture video. crop time i did following code and that works perfect.
-(void)cropView:(NSURL*)outputfile
{
AVAsset *asset = [AVAsset assetWithURL:outputfile];
AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableVideoComposition* videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.frameDuration = CMTimeMake(1, 30);
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(60, 30));
CGSize videoSize = [[[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] naturalSize];
float scaleFactor;
if (videoSize.width > videoSize.height) {
scaleFactor = videoSize.height/320;
}
else if (videoSize.width == videoSize.height){
scaleFactor = videoSize.height/320;
}
else{
scaleFactor = videoSize.width/320;
}
CGFloat cropOffX = 0;
CGFloat cropOffY = 0;
CGFloat cropWidth = 320 *scaleFactor;
CGFloat cropHeight = 320 *scaleFactor;
videoComposition.renderSize = CGSizeMake(cropWidth, cropHeight);
AVMutableVideoCompositionLayerInstruction* transformer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:clipVideoTrack];
UIImageOrientation videoOrientation = [self getVideoOrientationFromAsset:asset];
CGAffineTransform t1 = CGAffineTransformIdentity;
CGAffineTransform t2 = CGAffineTransformIdentity;
switch (videoOrientation) {
case UIImageOrientationUp:
t1 = CGAffineTransformMakeTranslation(clipVideoTrack.naturalSize.height - cropOffX, 0 - cropOffY );
t2 = CGAffineTransformRotate(t1, M_PI_2 );
break;
case UIImageOrientationDown:
t1 = CGAffineTransformMakeTranslation(0 - cropOffX, clipVideoTrack.naturalSize.width - cropOffY ); // not fixed width is the real height in upside down
t2 = CGAffineTransformRotate(t1, - M_PI_2 );
break;
case UIImageOrientationRight:
t1 = CGAffineTransformMakeTranslation(0 - cropOffX, 0 - cropOffY );
t2 = CGAffineTransformRotate(t1, 0 );
break;
case UIImageOrientationLeft:
t1 = CGAffineTransformMakeTranslation(clipVideoTrack.naturalSize.width - cropOffX, clipVideoTrack.naturalSize.height - cropOffY );
t2 = CGAffineTransformRotate(t1, M_PI );
break;
default:
NSLog(#"no supported orientation has been found in this video");
break;
}
CGAffineTransform finalTransform = t2;
[transformer setTransform:finalTransform atTime:kCMTimeZero];
//add the transformer layer instructions, then add to video composition
instruction.layerInstructions = [NSArray arrayWithObject:transformer];
videoComposition.instructions = [NSArray arrayWithObject: instruction];
NSString *outputPath = [NSString stringWithFormat:#"%#%#", NSTemporaryDirectory(), #"video.mp4"];
NSURL *exportUrl = [NSURL fileURLWithPath:outputPath];
[[NSFileManager defaultManager] removeItemAtURL:exportUrl error:nil];
//Export
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetHighestQuality] ;
exporter.videoComposition = videoComposition;
exporter.outputURL = exportUrl;
exporter.outputFileType = AVFileTypeMPEG4;
[exporter exportAsynchronouslyWithCompletionHandler:^
{
dispatch_async(dispatch_get_main_queue(), ^{
//Call when finished
[self exportDidFinish:exporter];
});
}];
}
- (UIImageOrientation)getVideoOrientationFromAsset:(AVAsset *)asset
{
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
CGSize size = [videoTrack naturalSize];
CGAffineTransform txf = [videoTrack preferredTransform];
if (size.width == txf.tx && size.height == txf.ty)
return UIImageOrientationLeft; //return UIInterfaceOrientationLandscapeLeft;
else if (txf.tx == 0 && txf.ty == 0)
return UIImageOrientationRight; //return UIInterfaceOrientationLandscapeRight;
else if (txf.tx == 0 && txf.ty == size.width)
return UIImageOrientationDown; //return UIInterfaceOrientationPortraitUpsideDown;
else
return UIImageOrientationUp; //return UIInterfaceOrientationPortrait;
}

Change camera and device orientation

I have an application that is basically a view and when the user click a button, camera visualization starts.
I want to allow all orientations when the camera isn't showed, but when the camera is showed, I need to force the application to portrait mode because if it isn't on Portrait, the video is rotated. When the camera is closed, the app may rotate again.
Do you know if I can solve the problem with video orientation?
Or how can I force the app to portrait mode? I know that on earlier ios version, you can use [UIDevice setOrientation:], but it's deprecated for lastest ios.
How can I do this for ios 5 and ios 6?
I've tried with:
[self presentViewController:[UIViewController new] animated:NO
completion:^{ [self dismissViewControllerAnimated:NO completion:nil]; }];
And in shouldAutorotateToInterfaceOrientation method:
if (state == CAMERA) {
return (interfaceOrientation == UIInterfaceOrientationPortrait);
}else{
return YES;
}
It works ok and force the app to portrait. But when the camera is closed, it doesn't work properly, it doesn't rotate well.
I mean, when the camera is closed this is what happens:
The app is on portrait
If I try to rotate the app, the device is rotated but not the application. I can see that status bar of ipad with info about time, battery, etc is rotated but the app not.
If I go to portrait again and then I rotate the device, it works ok.
Do you know what could be the problem?
Thanks in advance.
I think I've found a solution for the problem with change camera and device orientation at the same time.
I call this code when I initialize the camera, and also in shouldAutorotateToInterfaceOrientation method, where I allow all orientations.
AVCaptureVideoOrientation newOrientation;
UIInterfaceOrientation deviceOrientation = [UIApplication sharedApplication].statusBarOrientation;
NSLog(#"deviceOrientation %c",deviceOrientation);
switch (deviceOrientation)
{
case UIInterfaceOrientationPortrait:
NSLog(#"UIInterfaceOrientationPortrait");
newOrientation = AVCaptureVideoOrientationPortrait;
break;
case UIInterfaceOrientationLandscapeRight:
NSLog(#"UIInterfaceOrientationLandscapeRight");
newOrientation = AVCaptureVideoOrientationLandscapeRight;
break;
case UIInterfaceOrientationLandscapeLeft:
NSLog(#"UIInterfaceOrientationLandscapeLeft");
newOrientation = AVCaptureVideoOrientationLandscapeLeft;
break;
default:
NSLog(#"default");
newOrientation = AVCaptureVideoOrientationPortrait;
break;
}
if ([self.prevLayer respondsToSelector:#selector(connection)]){
if ([self.prevLayer.connection isVideoOrientationSupported]){
self.prevLayer.connection.videoOrientation = newOrientation;
}else{
NSLog(#"NO respond to selector connection");
}
}else{
if ([self.prevLayer isOrientationSupported]){
self.prevLayer.orientation = newOrientation;
}else{
NSLog(#"NO isOrientationSupported");
}
}
Please Try Following code for the Orientation so i think your problem may be solved.
- (void)encodeVideoOrientation:(NSURL *)anOutputFileURL
{
CGAffineTransform rotationTransform;
CGAffineTransform rotateTranslate;
CGSize renderSize;
switch (self.recordingOrientation)
{
// set these 3 values based on orientation
}
AVURLAsset * videoAsset = [[AVURLAsset alloc]initWithURL:anOutputFileURL options:nil];
AVAssetTrack *sourceVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVAssetTrack *sourceAudioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
AVMutableComposition* composition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:sourceVideoTrack
atTime:kCMTimeZero error:nil];
[compositionVideoTrack setPreferredTransform:sourceVideoTrack.preferredTransform];
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:sourceAudioTrack
atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTrack];
[layerInstruction setTransform:rotateTranslate atTime:kCMTimeZero];
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.frameDuration = CMTimeMake(1,30);
videoComposition.renderScale = 1.0;
videoComposition.renderSize = renderSize;
instruction.layerInstructions = [NSArray arrayWithObject: layerInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
videoComposition.instructions = [NSArray arrayWithObject: instruction];
AVAssetExportSession * assetExport = [[AVAssetExportSession alloc] initWithAsset:composition
presetName:AVAssetExportPresetMediumQuality];
NSString* videoName = #"export.mov";
NSString *exportPath = [NSTemporaryDirectory() stringByAppendingPathComponent:videoName];
NSURL * exportUrl = [NSURL fileURLWithPath:exportPath];
if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath])
{
[[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil];
}
assetExport.outputFileType = AVFileTypeMPEG4;
assetExport.outputURL = exportUrl;
assetExport.shouldOptimizeForNetworkUse = YES;
assetExport.videoComposition = videoComposition;
[assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
switch (assetExport.status)
{
case AVAssetExportSessionStatusCompleted:
// export complete
NSLog(#"Export Complete");
break;
case AVAssetExportSessionStatusFailed:
NSLog(#"Export Failed");
NSLog(#"ExportSessionError: %#", [assetExport.error localizedDescription]);
// export error (see exportSession.error)
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export Failed");
NSLog(#"ExportSessionError: %#", [assetExport.error localizedDescription]);
// export cancelled
break;
}
}];
}
Hope this helps you.!!

iOS AVFoundation: Setting Orientation of Video

I've been struggling with several dimensions to the problem of controlling video orientation during and after capture on an iOS device. Thanks to previous answers and documentation from Apple I've been able to figure it out. However, now that I want to push some video to a web site, I'm running into particular problems. I've outlined this problem in particular in this question, and the proposed solution turns out to require orientation options to be set during video encoding.
That may be, but I have no clue how to go about doing this. The documentation around setting orientation is in respect to setting it correctly for display on the device, and I've implemented the advice found here. However, this advice does not address setting the orientation properly for non-Apple software, such as VLC or the Chrome browser.
Can anyone provide insight into how to set orientation properly on the device such that it displays correctly for all viewing software?
Finally,based on the answers of #Aaron Vegh and #Prince, I figured out my resolution:
//Converting video
+(void)convertMOVToMp4:(NSString *)movFilePath completion:(void (^)(NSString *mp4FilePath))block{
AVURLAsset * videoAsset = [[AVURLAsset alloc]initWithURL:[NSURL fileURLWithPath:movFilePath] options:nil];
AVAssetTrack *sourceAudioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
AVMutableComposition* composition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:sourceAudioTrack
atTime:kCMTimeZero error:nil];
AVAssetExportSession * assetExport = [[AVAssetExportSession alloc] initWithAsset:composition
presetName:AVAssetExportPresetMediumQuality];
NSString *exportPath = [movFilePath stringByReplacingOccurrencesOfString:#".MOV" withString:#".mp4"];
NSURL * exportUrl = [NSURL fileURLWithPath:exportPath];
assetExport.outputFileType = AVFileTypeMPEG4;
assetExport.outputURL = exportUrl;
assetExport.shouldOptimizeForNetworkUse = YES;
assetExport.videoComposition = [self getVideoComposition:videoAsset composition:composition];
[assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
switch (assetExport.status)
{
case AVAssetExportSessionStatusCompleted:
// export complete
if (block) {
block(exportPath);
}
break;
case AVAssetExportSessionStatusFailed:
block(nil);
break;
case AVAssetExportSessionStatusCancelled:
block(nil);
break;
}
}];
}
//get current orientation
+(AVMutableVideoComposition *) getVideoComposition:(AVAsset *)asset composition:( AVMutableComposition*)composition{
BOOL isPortrait_ = [self isVideoPortrait:asset];
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:videoTrack atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionLayerInstruction *layerInst = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTrack];
CGAffineTransform transform = videoTrack.preferredTransform;
[layerInst setTransform:transform atTime:kCMTimeZero];
AVMutableVideoCompositionInstruction *inst = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
inst.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
inst.layerInstructions = [NSArray arrayWithObject:layerInst];
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.instructions = [NSArray arrayWithObject:inst];
CGSize videoSize = videoTrack.naturalSize;
if(isPortrait_) {
NSLog(#"video is portrait ");
videoSize = CGSizeMake(videoSize.height, videoSize.width);
}
videoComposition.renderSize = videoSize;
videoComposition.frameDuration = CMTimeMake(1,30);
videoComposition.renderScale = 1.0;
return videoComposition;
}
//get video
+(BOOL) isVideoPortrait:(AVAsset *)asset{
BOOL isPortrait = FALSE;
NSArray *tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
if([tracks count] > 0) {
AVAssetTrack *videoTrack = [tracks objectAtIndex:0];
CGAffineTransform t = videoTrack.preferredTransform;
// Portrait
if(t.a == 0 && t.b == 1.0 && t.c == -1.0 && t.d == 0)
{
isPortrait = YES;
}
// PortraitUpsideDown
if(t.a == 0 && t.b == -1.0 && t.c == 1.0 && t.d == 0) {
isPortrait = YES;
}
// LandscapeRight
if(t.a == 1.0 && t.b == 0 && t.c == 0 && t.d == 1.0)
{
isPortrait = FALSE;
}
// LandscapeLeft
if(t.a == -1.0 && t.b == 0 && t.c == 0 && t.d == -1.0)
{
isPortrait = FALSE;
}
}
return isPortrait;
}
In Apple's documentation here it states:
Clients may now receive physically rotated CVPixelBuffers in their AVCaptureVideoDataOutput -captureOutput:didOutputSampleBuffer:fromConnection: delegate callback. In previous iOS versions, the front-facing camera would always deliver buffers in AVCaptureVideoOrientationLandscapeLeft and the back-facing camera would always deliver buffers in AVCaptureVideoOrientationLandscapeRight. All 4 AVCaptureVideoOrientations are supported, and rotation is hardware accelerated. To request buffer rotation, a client calls -setVideoOrientation: on the AVCaptureVideoDataOutput's video AVCaptureConnection. Note that physically rotating buffers does come with a performance cost, so only request rotation if it's necessary. If, for instance, you want rotated video written to a QuickTime movie file using AVAssetWriter, it is preferable to set the -transform property on the AVAssetWriterInput rather than physically rotate the buffers in AVCaptureVideoDataOutput.
So the posted solution by Aaron Vegh that uses an AVAssetExportSession works, but is not needed. Like the Apple doc's say, if you'd like to have the orientation set correctly so that it plays in non-apple quicktime players like VLC or on the web using Chrome, you must set the video orientation on the AVCaptureConnection for the AVCaptureVideoDataOutput. If you try to set it for the AVAssetWriterInput you will get an incorrect orientation for players like VLC and Chrome.
Here is my code where I set it during setting up the capture session:
// DECLARED AS PROPERTIES ABOVE
#property (strong,nonatomic) AVCaptureDeviceInput *audioIn;
#property (strong,nonatomic) AVCaptureAudioDataOutput *audioOut;
#property (strong,nonatomic) AVCaptureDeviceInput *videoIn;
#property (strong,nonatomic) AVCaptureVideoDataOutput *videoOut;
#property (strong,nonatomic) AVCaptureConnection *audioConnection;
#property (strong,nonatomic) AVCaptureConnection *videoConnection;
------------------------------------------------------------------
------------------------------------------------------------------
-(void)setupCaptureSession{
// Setup Session
self.session = [[AVCaptureSession alloc]init];
[self.session setSessionPreset:AVCaptureSessionPreset640x480];
// Create Audio connection ----------------------------------------
self.audioIn = [[AVCaptureDeviceInput alloc]initWithDevice:[self getAudioDevice] error:nil];
if ([self.session canAddInput:self.audioIn]) {
[self.session addInput:self.audioIn];
}
self.audioOut = [[AVCaptureAudioDataOutput alloc]init];
dispatch_queue_t audioCaptureQueue = dispatch_queue_create("Audio Capture Queue", DISPATCH_QUEUE_SERIAL);
[self.audioOut setSampleBufferDelegate:self queue:audioCaptureQueue];
if ([self.session canAddOutput:self.audioOut]) {
[self.session addOutput:self.audioOut];
}
self.audioConnection = [self.audioOut connectionWithMediaType:AVMediaTypeAudio];
// Create Video connection ----------------------------------------
self.videoIn = [[AVCaptureDeviceInput alloc]initWithDevice:[self videoDeviceWithPosition:AVCaptureDevicePositionBack] error:nil];
if ([self.session canAddInput:self.videoIn]) {
[self.session addInput:self.videoIn];
}
self.videoOut = [[AVCaptureVideoDataOutput alloc]init];
[self.videoOut setAlwaysDiscardsLateVideoFrames:NO];
[self.videoOut setVideoSettings:nil];
dispatch_queue_t videoCaptureQueue = dispatch_queue_create("Video Capture Queue", DISPATCH_QUEUE_SERIAL);
[self.videoOut setSampleBufferDelegate:self queue:videoCaptureQueue];
if ([self.session canAddOutput:self.videoOut]) {
[self.session addOutput:self.videoOut];
}
self.videoConnection = [self.videoOut connectionWithMediaType:AVMediaTypeVideo];
// SET THE ORIENTATION HERE -------------------------------------------------
[self.videoConnection setVideoOrientation:AVCaptureVideoOrientationPortrait];
// --------------------------------------------------------------------------
// Create Preview Layer -------------------------------------------
AVCaptureVideoPreviewLayer *previewLayer = [[AVCaptureVideoPreviewLayer alloc]initWithSession:self.session];
CGRect bounds = self.videoView.bounds;
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
previewLayer.bounds = bounds;
previewLayer.position=CGPointMake(CGRectGetMidX(bounds), CGRectGetMidY(bounds));
[self.videoView.layer addSublayer:previewLayer];
// Start session
[self.session startRunning];
}
In case anyone else is looking for this answer as well, this is the method that I cooked up (modified a bit to simplify):
- (void)encodeVideoOrientation:(NSURL *)anOutputFileURL
{
CGAffineTransform rotationTransform;
CGAffineTransform rotateTranslate;
CGSize renderSize;
switch (self.recordingOrientation)
{
// set these 3 values based on orientation
}
AVURLAsset * videoAsset = [[AVURLAsset alloc]initWithURL:anOutputFileURL options:nil];
AVAssetTrack *sourceVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVAssetTrack *sourceAudioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
AVMutableComposition* composition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:sourceVideoTrack
atTime:kCMTimeZero error:nil];
[compositionVideoTrack setPreferredTransform:sourceVideoTrack.preferredTransform];
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:sourceAudioTrack
atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTrack];
[layerInstruction setTransform:rotateTranslate atTime:kCMTimeZero];
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.frameDuration = CMTimeMake(1,30);
videoComposition.renderScale = 1.0;
videoComposition.renderSize = renderSize;
instruction.layerInstructions = [NSArray arrayWithObject: layerInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
videoComposition.instructions = [NSArray arrayWithObject: instruction];
AVAssetExportSession * assetExport = [[AVAssetExportSession alloc] initWithAsset:composition
presetName:AVAssetExportPresetMediumQuality];
NSString* videoName = #"export.mov";
NSString *exportPath = [NSTemporaryDirectory() stringByAppendingPathComponent:videoName];
NSURL * exportUrl = [NSURL fileURLWithPath:exportPath];
if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath])
{
[[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil];
}
assetExport.outputFileType = AVFileTypeMPEG4;
assetExport.outputURL = exportUrl;
assetExport.shouldOptimizeForNetworkUse = YES;
assetExport.videoComposition = videoComposition;
[assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
switch (assetExport.status)
{
case AVAssetExportSessionStatusCompleted:
// export complete
NSLog(#"Export Complete");
break;
case AVAssetExportSessionStatusFailed:
NSLog(#"Export Failed");
NSLog(#"ExportSessionError: %#", [assetExport.error localizedDescription]);
// export error (see exportSession.error)
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export Failed");
NSLog(#"ExportSessionError: %#", [assetExport.error localizedDescription]);
// export cancelled
break;
}
}];
}
This stuff is poorly documented, unfortunately, but by stringing examples together from other SO questions and reading the header files, I was able to get this working. Hope this helps anyone else!
Use these below method to set correct orientation according to video asset orientation in AVMutableVideoComposition
-(AVMutableVideoComposition *) getVideoComposition:(AVAsset *)asset
{
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableComposition *composition = [AVMutableComposition composition];
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
CGSize videoSize = videoTrack.naturalSize;
BOOL isPortrait_ = [self isVideoPortrait:asset];
if(isPortrait_) {
NSLog(#"video is portrait ");
videoSize = CGSizeMake(videoSize.height, videoSize.width);
}
composition.naturalSize = videoSize;
videoComposition.renderSize = videoSize;
// videoComposition.renderSize = videoTrack.naturalSize; //
videoComposition.frameDuration = CMTimeMakeWithSeconds( 1 / videoTrack.nominalFrameRate, 600);
AVMutableCompositionTrack *compositionVideoTrack;
compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:videoTrack atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionLayerInstruction *layerInst;
layerInst = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
[layerInst setTransform:videoTrack.preferredTransform atTime:kCMTimeZero];
AVMutableVideoCompositionInstruction *inst = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
inst.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
inst.layerInstructions = [NSArray arrayWithObject:layerInst];
videoComposition.instructions = [NSArray arrayWithObject:inst];
return videoComposition;
}
-(BOOL) isVideoPortrait:(AVAsset *)asset
{
BOOL isPortrait = FALSE;
NSArray *tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
if([tracks count] > 0) {
AVAssetTrack *videoTrack = [tracks objectAtIndex:0];
CGAffineTransform t = videoTrack.preferredTransform;
// Portrait
if(t.a == 0 && t.b == 1.0 && t.c == -1.0 && t.d == 0)
{
isPortrait = YES;
}
// PortraitUpsideDown
if(t.a == 0 && t.b == -1.0 && t.c == 1.0 && t.d == 0) {
isPortrait = YES;
}
// LandscapeRight
if(t.a == 1.0 && t.b == 0 && t.c == 0 && t.d == 1.0)
{
isPortrait = FALSE;
}
// LandscapeLeft
if(t.a == -1.0 && t.b == 0 && t.c == 0 && t.d == -1.0)
{
isPortrait = FALSE;
}
}
return isPortrait;
}
Since iOS 5 you can request rotated CVPixelBuffers using AVCaptureVideoDataOutput documented here. This gives you the correct orientation without having to reprocess the video again with AVAssetExportSession.
Here is the latest swift version of #Jagie code.
extension AVURLAsset
{
func exportVideo(presetName: String = AVAssetExportPresetHighestQuality, outputFileType: AVFileType = .mp4, fileExtension: String = "mp4", then completion: #escaping (URL?) -> Void)
{
let filename = url.deletingPathExtension().appendingPathExtension(fileExtension).lastPathComponent
let outputURL = FileManager.default.temporaryDirectory.appendingPathComponent(filename)
do { // delete old video, if already exists
try FileManager.default.removeItem(at: outputURL)
} catch {
print(error.localizedDescription)
}
guard let sourceAudioTrack = self.tracks(withMediaType: .audio).first else { return }
let composition = AVMutableComposition()
let compositionAudioTrack = composition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid)
do{
try compositionAudioTrack?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: self.duration), of: sourceAudioTrack, at: CMTime.zero)
}catch(let e){
print("error: \(e)")
}
if let session = AVAssetExportSession(asset: composition, presetName: presetName) {
session.outputURL = outputURL
session.outputFileType = outputFileType
let start = CMTimeMakeWithSeconds(0.0, preferredTimescale: 0)
let range = CMTimeRangeMake(start: start, duration: duration)
session.timeRange = range
session.shouldOptimizeForNetworkUse = true
session.videoComposition = getVideoComposition(asset: self, composition: composition)
session.exportAsynchronously {
switch session.status {
case .completed:
completion(outputURL)
case .cancelled:
debugPrint("Video export cancelled.")
completion(nil)
case .failed:
let errorMessage = session.error?.localizedDescription ?? "n/a"
debugPrint("Video export failed with error: \(errorMessage)")
completion(nil)
default:
break
}
}
} else {
completion(nil)
}
}
private func getVideoComposition(asset: AVAsset, composition: AVMutableComposition) -> AVMutableVideoComposition{
let isPortrait = isVideoPortrait()
let compositionVideoTrack: AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: .video, preferredTrackID:kCMPersistentTrackID_Invalid)!
let videoTrack:AVAssetTrack = asset.tracks(withMediaType: .video).first!
do{
try compositionVideoTrack.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: asset.duration), of: videoTrack, at: CMTime.zero)
}catch(let e){
print("Error: \(e)")
}
let layerInst:AVMutableVideoCompositionLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: compositionVideoTrack)
let transform = videoTrack.preferredTransform
layerInst.setTransform(transform, at: CMTime.zero)
let inst:AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()
inst.timeRange = CMTimeRangeMake(start: CMTime.zero, duration: asset.duration);
inst.layerInstructions = [layerInst]
let videoComposition: AVMutableVideoComposition = AVMutableVideoComposition()
videoComposition.instructions = [inst]
var videoSize:CGSize = videoTrack.naturalSize;
if(isPortrait) {
print("video is portrait")
videoSize = CGSize(width: videoSize.height, height: videoSize.width)
}else{
print("video is landscape")
}
videoComposition.renderSize = videoSize;
videoComposition.frameDuration = CMTimeMake(value: 1,timescale: 30);
videoComposition.renderScale = 1.0;
return videoComposition;
}
func isVideoPortrait() -> Bool{
var isPortrait = false
let tracks = self.tracks(withMediaType: .video)
if(tracks.count > 0) {
let videoTrack:AVAssetTrack = tracks.first!;
let t:CGAffineTransform = videoTrack.preferredTransform;
// Portrait
if(t.a == 0 && t.b == 1.0 && t.c == -1.0 && t.d == 0)
{
isPortrait = true;
}
// PortraitUpsideDown
if(t.a == 0 && t.b == -1.0 && t.c == 1.0 && t.d == 0) {
isPortrait = true;
}
// LandscapeRight
if(t.a == 1.0 && t.b == 0 && t.c == 0 && t.d == 1.0)
{
isPortrait = false;
}
// LandscapeLeft
if(t.a == -1.0 && t.b == 0 && t.c == 0 && t.d == -1.0)
{
isPortrait = false;
}
}
return isPortrait;
}
}

Resources