Video orientation issues when exporting with AVMutableVideoComposition - ios

Here is the function I used to export video:
- (void) videoOutput
{
//1 - Early exit if there's no video file selected
if (!self.videoAsset) {
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:#"Error" message:#"Please Load a Video Asset First"
delegate:nil cancelButtonTitle:#"OK" otherButtonTitles:nil];
[alert show];
return;
}
// 2 - Create AVMutableComposition object. This object will hold your AVMutableCompositionTrack instances.
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
// 3 - Video track
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
[videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, self.videoAsset.duration)
ofTrack:[[self.videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
// 3.1 - Create AVMutableVideoCompositionInstruction
AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, self.videoAsset.duration);
// 3.2 - Create an AVMutableVideoCompositionLayerInstruction for the video track and fix the orientation.
AVMutableVideoCompositionLayerInstruction *videolayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
AVAssetTrack *videoAssetTrack = [[self.videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
BOOL isVideoAssetPortrait_ = NO;
CGAffineTransform videoTransform = videoAssetTrack.preferredTransform;
if (videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) {
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) {
isVideoAssetPortrait_ = NO;
}
if (videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) {
isVideoAssetPortrait_ = NO;
}
[videolayerInstruction setTransform:videoAssetTrack.preferredTransform atTime:kCMTimeZero];
[videolayerInstruction setOpacity:0.0 atTime:self.videoAsset.duration];
// 3.3 - Add instructions
mainInstruction.layerInstructions = [NSArray arrayWithObjects:videolayerInstruction,nil];
AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
CGSize naturalSize;
if(isVideoAssetPortrait_){
naturalSize = CGSizeMake(videoAssetTrack.naturalSize.height, videoAssetTrack.naturalSize.width);
} else {
naturalSize = videoAssetTrack.naturalSize;
}
mainCompositionInst.renderSize = naturalSize;
mainCompositionInst.instructions = [NSArray arrayWithObject:mainInstruction];
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
// 4 - Get path
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:
[NSString stringWithFormat:#"FinalVideo-%d.mov",arc4random() % 1000]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
// 5 - Create exporter
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.shouldOptimizeForNetworkUse = YES;
exporter.videoComposition = mainCompositionInst;
[exporter exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:exporter];
});
}];
}
The problem is that the first time I use this function to export a portrait video, the variable videoTransform (videoAssetTrack.preferredTransform) are:
videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0
And the variable isVideoAssetPortrait_ equals YES. Everything is right. However, after exporting completed and saved to Camera Roll, I used this function reload the result video. This time, the videoTransform changed:
videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0
And isVideoAssetPortrait_ equals NO. It means that after one time export, the videoTransform has changed it's values (orientation from portrait -> landscape)
I googled many questions about video orientation of AV Foundation, but there hasn't found the solution yet.
Thank you for reading my long explanation. If you have any question, please let me know.

There is no such concept as portrait/landscape regarding the video tracks. It has just it's dimensions and a transformation applied in order to present it properly. By default the "portrait" video is encoded as it is produced by the camera (let say landscape) and a 90 degrees rotation used to present it properly.
When you export it, it's orientation is not changed, it is just recoded physically rotated, so no rotation is needed to present it properly.
This is why you get the identity matrix on the second time (it not means that it was changed from portrait to landscape), but this time the it's natural size is also swapped, and everything should be okay based on your code.
Please specify what's wrong in the later case.

Related

Portrait Video plays in black screen

Update
Don't use simulator to test video translations. But I have codec issue
asked in another question here. Any help is appreciated.
I am using AVURLAsset to create my videos and they work fine as long the videos picked from gallery are in landscape mode. But when I use a portrait video it either plays in black screen(audio plays) or the frames are twisted (see image).
Update:
I tried using the CGAffineTransform still no luck.
Here's the code:
-(void) createVideo{
AVMutableComposition* mixComposition = [AVMutableComposition composition];
NSDictionary *options = #{AVURLAssetPreferPreciseDurationAndTimingKey:#YES};
_videoAsset = [[AVURLAsset alloc]initWithURL:video_url options:options];
CMTime startTimeV=CMTimeMakeWithSeconds(videoStartTime.floatValue, 1);
CMTime endTimeV=CMTimeMakeWithSeconds(videoEndTime.floatValue, 1);
CMTimeRange video_timeRange = CMTimeRangeMake(startTimeV,endTimeV);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[_videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, self.videoAsset.duration);
AVMutableVideoCompositionLayerInstruction *videolayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:a_compositionVideoTrack];
AVAssetTrack *videoAssetTrack = [[self.videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
CGSize trackDimensions = {
.width = 0.0,
.height = 0.0,
};
trackDimensions = [videoAssetTrack naturalSize];
int width = trackDimensions.width;
int height = trackDimensions.height;
UIImageOrientation videoAssetOrientation_ = UIImageOrientationUp;
BOOL isVideoAssetPortrait_ = NO;
CGAffineTransform videoTransform = videoAssetTrack.preferredTransform;
if (videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationRight;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationLeft;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) {
videoAssetOrientation_ = UIImageOrientationUp;
}
if (videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) {
videoAssetOrientation_ = UIImageOrientationDown;
}
// CGAffineTransform transformToApply=CGAffineTransformMakeRotation(DEGREES_TO_RADIANS(90.0));
[videolayerInstruction setTransform:videoAssetTrack.preferredTransform atTime:kCMTimeZero];
[videolayerInstruction setOpacity:0.0 atTime:self.videoAsset.duration];
mainInstruction.layerInstructions = [NSArray arrayWithObjects:videolayerInstruction,nil];
AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
CGSize naturalSize;
if(isVideoAssetPortrait_){
naturalSize = CGSizeMake(videoAssetTrack.naturalSize.height, videoAssetTrack.naturalSize.width);
} else {
naturalSize = videoAssetTrack.naturalSize;
}
float renderWidth, renderHeight;
renderWidth = naturalSize.width;
renderHeight = naturalSize.height;
mainCompositionInst.renderSize = CGSizeMake(renderWidth, renderHeight);
mainCompositionInst.instructions = [NSArray arrayWithObject:mainInstruction];
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
And the export :
-(void)export{
NSArray *dirPaths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *docsDir = [dirPaths objectAtIndex:0];
NSString* fileName=[NSString stringWithFormat:#"myvideo%lld.mp4",[#(floor([[NSDate date] timeIntervalSince1970])) longLongValue]+1];
NSString *outputFilePath = [docsDir stringByAppendingPathComponent:[NSString stringWithFormat:#"%#",fileName]];
NSURL *outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
_assetExport.outputFileType = AVFileTypeMPEG4;
_assetExport.outputURL = outputFileUrl;
_assetExport.videoComposition = mainCompositionInst;
ShareViewController *newViewController = [self.storyboard instantiateViewControllerWithIdentifier:#"vidShare"];
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
dispatch_async(dispatch_get_main_queue(), ^{
newViewController.videoFilePath=outputFileUrl;
[self.navigationController pushViewController:newViewController animated:YES];
});
}
];
}
Landscape = OK
Portrait = FAIL
I've had this issue before. Set videolayerInstruction's transform using videoAssetTrack.preferredTransform works in most cases. But sometimes preferredTransform may lack of value of tx(ty) (you should check CGAffineTransform In Apple API Reference if you don't know what is tx(ty)) or tx(ty) has inaccurate value which results in wrong video positioning(such as playing in black screen).
So the point is: you should use preferredTransform to determine the origin video orientation and make the transform of your own.
here is the code of getting the orientation of track:
- (UIInterfaceOrientation)orientationForTrack:(AVAsset *)asset {
UIInterfaceOrientation orientation = UIInterfaceOrientationPortrait;
NSArray *tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
if([tracks count] > 0) {
AVAssetTrack *videoTrack = [tracks objectAtIndex:0];
CGAffineTransform t = videoTrack.preferredTransform;
// Portrait
if(t.a == 0 && t.b == 1.0 && t.c == -1.0 && t.d == 0) {
orientation = UIInterfaceOrientationPortrait;
}
// PortraitUpsideDown
if(t.a == 0 && t.b == -1.0 && t.c == 1.0 && t.d == 0) {
orientation = UIInterfaceOrientationPortraitUpsideDown;
}
// LandscapeRight
if(t.a == 1.0 && t.b == 0 && t.c == 0 && t.d == 1.0) {
orientation = UIInterfaceOrientationLandscapeRight;
}
// LandscapeLeft
if(t.a == -1.0 && t.b == 0 && t.c == 0 && t.d == -1.0) {
orientation = UIInterfaceOrientationLandscapeLeft;
}
}
return orientation;
}
And code for getting the needed transform to apply to the videolayerInstruction's transform:
- (CGAffineTransform)transformBasedOnAsset:(AVAsset *)asset {
UIInterfaceOrientation orientation = [AVUtilities orientationForTrack:asset];
AVAssetTrack *assetTrack = [asset tracksWithMediaType:AVMediaTypeVideo][0];
CGSize naturalSize = assetTrack.naturalSize;
CGAffineTransform finalTranform;
switch (orientation) {
case UIInterfaceOrientationLandscapeLeft:
finalTranform = CGAffineTransformMake(-1, 0, 0, -1, naturalSize.width, naturalSize.height);
break;
case UIInterfaceOrientationLandscapeRight:
finalTranform = CGAffineTransformMake(1, 0, 0, 1, 0, 0);
break;
case UIInterfaceOrientationPortrait:
finalTranform = CGAffineTransformMake(0, 1, -1, 0, naturalSize.height, 0);
break;
case UIInterfaceOrientationPortraitUpsideDown:
finalTranform = CGAffineTransformMake(0, -1, 1, 0, 0, naturalSize.width);
break;
default:
break;
}
return finalTranform;
}
Hope it works for you.

video recording issue in ios

I am working on a Video making app.
In that I need to record a video in first View and after that display in second View.For recording a video I followed this tutorial.
In that I have made some changes as per my need in didFinishRecordingToOutputFileAtURL method.
Here is my updated method.
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error
{
NSLog(#"didFinishRecordingToOutputFileAtURL - enter");
BOOL RecordedSuccessfully = YES;
if ([error code] != noErr)
{
// A problem occurred: Find out if the recording was successful.
id value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey];
if (value)
{
RecordedSuccessfully = [value boolValue];
}
}
else {
NSLog(#"didFinishRecordingToOutputFileAtURL error:%#",error);
}
if (RecordedSuccessfully)
{
//----- RECORDED SUCESSFULLY -----
NSLog(#"didFinishRecordingToOutputFileAtURL - success");
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputFileURL])
{
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
AVMutableCompositionTrack *track = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVAsset *asset = [AVAsset assetWithURL:outputFileURL];
[track insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:[[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:CMTimeMake(0, 1) error:nil];
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:
[NSString stringWithFormat:#"%#%d.mov",NSBundle.mainBundle.infoDictionary[#"CFBundleExecutable"],++videoCounter]];
[[NSFileManager defaultManager] removeItemAtPath:myPathDocs error:nil];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:track];
AVAssetTrack *videoAssetTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation videoAssetOrientation_ = UIImageOrientationUp;
BOOL isVideoAssetPortrait_ = NO;
CGAffineTransform videoTransform = videoAssetTrack.preferredTransform;
if (videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationRight;
isVideoAssetPortrait_ = YES;
if ([[[NSUserDefaults standardUserDefaults] stringForKey:#"orientation"] isEqualToString:#"landscape"]) {
videoAssetOrientation_ = UIImageOrientationUp;
}
}
if (videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationLeft;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) {
videoAssetOrientation_ = UIImageOrientationUp;
}
if (videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) {
videoAssetOrientation_ = UIImageOrientationDown;
}
CGSize naturalSize;
if(isVideoAssetPortrait_){
naturalSize = CGSizeMake(videoAssetTrack.naturalSize.height, videoAssetTrack.naturalSize.width);
} else {
naturalSize = videoAssetTrack.naturalSize;
}
float renderWidth, renderHeight;
if (![self.ratioLabel.text isEqualToString:#"16:9"]) {
renderWidth = naturalSize.width;
renderHeight = naturalSize.width;
NSLog(#"Video:: width=%f height=%f",naturalSize.width,naturalSize.height);
}
else {
renderWidth = naturalSize.width;
renderHeight = naturalSize.height;
NSLog(#"Video:: width=%f height=%f",naturalSize.width,naturalSize.height);
}
if (![self.ratioLabel.text isEqualToString:#"16:9"])
{
CGAffineTransform t1 = CGAffineTransformMakeTranslation(videoAssetTrack.naturalSize.height, -(videoAssetTrack.naturalSize.width - videoAssetTrack.naturalSize.height) /2);
CGAffineTransform t2 = CGAffineTransformRotate(t1, M_PI_2);
[layerInstruction setTransform:t2 atTime:kCMTimeZero];
}
else
{
CGAffineTransform t2 = CGAffineTransformMakeRotation( M_PI_2);
[layerInstruction setTransform:t2 atTime:kCMTimeZero];
}
AVCaptureDevicePosition position = [[VideoInputDevice device] position];
if (position == AVCaptureDevicePositionFront)
{
/* For front camera only */
CGAffineTransform t = CGAffineTransformMakeScale(-1.0f, 1.0f);
t = CGAffineTransformTranslate(t, -videoAssetTrack.naturalSize.width, 0);
t = CGAffineTransformRotate(t, (DEGREES_TO_RADIANS(90.0)));
t = CGAffineTransformTranslate(t, 0.0f, -videoAssetTrack.naturalSize.width);
[layerInstruction setTransform:t atTime:kCMTimeZero];
/* For front camera only */
}
[layerInstruction setOpacity:0.0 atTime:asset.duration];
instruction.layerInstructions = [NSArray arrayWithObjects:layerInstruction,nil];
AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
mainCompositionInst.renderSize = CGSizeMake(renderWidth, renderHeight);
mainCompositionInst.instructions = [NSArray arrayWithObject:instruction];
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
AVAssetExportSession *exporter;
exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPreset1280x720];
exporter.videoComposition = mainCompositionInst;
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
self.doneButton.userInteractionEnabled = YES;
if(videoAddr==nil)
{
videoAddr = [[NSMutableArray alloc] init];
}
[videoAddr addObject:exporter.outputURL];
[[PreviewLayer connection] setEnabled:YES];
AVAsset *asset = [AVAsset assetWithURL:exporter.outputURL];
NSLog(#"remaining seconds before:%f",lastSecond);
double assetDuration = CMTimeGetSeconds(asset.duration);
if (assetDuration>3.0)
assetDuration = 3.0;
lastSecond = lastSecond- assetDuration;
NSLog(#"remaining seconds after:%f",lastSecond);
self.secondsLabel.text = [NSString stringWithFormat:#"%0.1fs",lastSecond];
self.secondsLabel.hidden = NO;
NSData *data = [NSKeyedArchiver archivedDataWithRootObject:videoAddr];
[[NSUserDefaults standardUserDefaults] setObject:data forKey:#"videoAddr"];
[[NSUserDefaults standardUserDefaults] synchronize];
videoURL = outputFileURL;
flagAutorotate = NO;
self.cancelButton.hidden = self.doneButton.hidden = NO;
imgCancel.hidden = imgDone.hidden = NO;
if ([[NSUserDefaults standardUserDefaults] boolForKey:#"Vibration"])
AudioServicesPlayAlertSound(kSystemSoundID_Vibrate);
[[UIApplication sharedApplication] endIgnoringInteractionEvents];
});
}];
}
else {
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:#"Error" message:[NSString stringWithFormat:#"Video can not be saved\nPlease free some storage space"] delegate:self cancelButtonTitle:nil otherButtonTitles:nil, nil];
[alert show];
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(2.0 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
[alert dismissWithClickedButtonIndex:0 animated:YES];
});
}
}
}
But here Is the issue.Video is not being recorded exactly shown in preview.
See these 2 screenShots.
Video recording preview
Video Playing View
The reason is because your iPad screen aspect ratio is not the same as camera aspect ratio.
You can modify camera preview size by setting videoGravity property of AVCaptureVideoPreviewLayer,
which influences how content is viewed relative to the layer bounds:
layer.videoGravity = AVLayerVideoGravityResizeAspect;
But in that case preview won't be fullscreen.
If you want the video with the same aspect ratio as on preview fullscreen, you will have to crop it. Cropping process explained here:
Exporting AVCaptureSession video in a size that matches the preview layer
Video capture with 1:1 aspect ratio in iOS

Issue in iOS8 while merging two videos

My application merges two videos.
I am using following code to merge two videos using AVVideoComposition
- (void)buildSequenceComposition:(AVMutableComposition *)mixComposition andVideoComposition:(AVMutableVideoComposition *)videoComposition withAudioMix:(AVMutableAudioMix *)audioMix
{
CMTime nextClipStartTime = kCMTimeZero;
NSInteger i;
// No transitions: place clips into one video track and one audio track in composition.
AVMutableVideoCompositionInstruction * MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
NSMutableArray*arrLayerInstruction = [NSMutableArray array];
for (i = 0; i < [_clips count]; i++ )
{
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVURLAsset *asset = [[_clips objectAtIndex:i] objectForKey:#"videoURL"];
CMTimeRange timeRangeInAsset;
timeRangeInAsset = CMTimeRangeMake(kCMTimeZero, [asset duration]);
AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
NSError*err = nil;
[compositionVideoTrack insertTimeRange:timeRangeInAsset ofTrack:clipVideoTrack atTime:nextClipStartTime error:&err];
if ([[asset tracksWithMediaType:AVMediaTypeAudio] count] != 0)
{
AVAssetTrack *clipAudioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[compositionAudioTrack insertTimeRange:timeRangeInAsset ofTrack:clipAudioTrack atTime:nextClipStartTime error:nil];
AVMutableAudioMixInputParameters *exportAudioMixInputParameters = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:[[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]];
[exportAudioMixInputParameters setVolume:[[[_clips objectAtIndex:i] objectForKey:#"videoSoundLevel"] floatValue] atTime:nextClipStartTime];
exportAudioMixInputParameters.trackID = compositionAudioTrack.trackID;
audioMix.inputParameters=[NSArray arrayWithObject:exportAudioMixInputParameters];
}
//FIXING ORIENTATION//
AVMutableVideoCompositionLayerInstruction *FirstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTrack];
UIImageOrientation FirstAssetOrientation_ = UIImageOrientationUp;
BOOL isFirstAssetPortrait_ = NO;
CGAffineTransform firstTransform = clipVideoTrack.preferredTransform;
if(firstTransform.a == 0 && firstTransform.b == 1.0 && firstTransform.c == -1.0 && firstTransform.d == 0)
{
FirstAssetOrientation_= UIImageOrientationRight;
isFirstAssetPortrait_ = YES;
}
if(firstTransform.a == 0 && firstTransform.b == -1.0 && firstTransform.c == 1.0 && firstTransform.d == 0)
{
FirstAssetOrientation_ = UIImageOrientationLeft;
isFirstAssetPortrait_ = YES;
}
if(firstTransform.a == 1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == 1.0)
{
FirstAssetOrientation_ = UIImageOrientationUp;
}
if(firstTransform.a == -1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == -1.0)
{
FirstAssetOrientation_ = UIImageOrientationDown;
}
CGFloat tHeight = [clipVideoTrack naturalSize].height;
CGFloat tWidth = [clipVideoTrack naturalSize].width;
if(isFirstAssetPortrait_)
{
tHeight = [clipVideoTrack naturalSize].height;
tWidth = [clipVideoTrack naturalSize].width;
CGFloat temp = tHeight;
tHeight = tWidth;
tWidth = temp;
}
CGFloat FirstAssetScaleToFitRatioWidth = [mixComposition naturalSize].width/tWidth;
CGFloat FirstAssetScaleToFitRatioHeight = [mixComposition naturalSize].height/tHeight;
CGFloat FirstAssetScaleToFitRatio = FirstAssetScaleToFitRatioWidth>FirstAssetScaleToFitRatioHeight?FirstAssetScaleToFitRatioHeight:FirstAssetScaleToFitRatioWidth;
CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
CGSize naturalSize = CGSizeApplyAffineTransform(CGSizeMake(tWidth, tHeight), FirstAssetScaleFactor);
CGAffineTransform transform = CGAffineTransformIdentity;
CGSize translateSize = CGSizeMake(0, 0);
if (FirstAssetScaleToFitRatioWidth<FirstAssetScaleToFitRatioHeight)
{
transform = CGAffineTransformMakeTranslation(0, ([mixComposition naturalSize].height-naturalSize.height)/2);
translateSize.height = ([mixComposition naturalSize].height-naturalSize.height)/2;
}
else if (FirstAssetScaleToFitRatioWidth==FirstAssetScaleToFitRatioHeight)
{
}
else
{
transform = CGAffineTransformMakeTranslation(([mixComposition naturalSize].width-naturalSize.width)/2, 0);
translateSize.width = ([mixComposition naturalSize].width-naturalSize.width)/2;
}
[FirstlayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(clipVideoTrack.preferredTransform, FirstAssetScaleFactor),transform) atTime:kCMTimeZero];
[FirstlayerInstruction setOpacity:0.0 atTime:CMTimeAdd(nextClipStartTime, timeRangeInAsset.duration)];
[FirstlayerInstruction setOpacity:1.0 atTime:nextClipStartTime];
[arrLayerInstruction addObject:FirstlayerInstruction];
nextClipStartTime = CMTimeAdd(nextClipStartTime, timeRangeInAsset.duration);
}
MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, nextClipStartTime);
MainInstruction.layerInstructions = arrLayerInstruction;;
videoComposition.instructions = [NSArray arrayWithObject:MainInstruction];
}
Although it works fine with iOS7, while exporting video with AVVideoCompositon in iOS8, it gives me following error:
Title :Error Domain=AVFoundationErrorDomain Code=-11821 “Cannot Decode” { NSLocalizedFailureReason=The media data could not be decoded. It may be damaged.}
It works fine with iOS7 and other prior to iOS versions, but not in iOS8.
I have also tried Apple's sample code from AVSampleEditor and it also gives me same error while exporting video in iOS8.
Kindly help me to solve the problem. Thanks.
Check this demo code.
Working for me

Cropping AVAsset video with AVFoundation

I am using AVCaptureMovieFileOutput to record some video. I have the preview layer displayed using AVLayerVideoGravityResizeAspectFill which zooms in slightly. The problem I have is that the final video is larger, containing extra image that didn't fit on the screen during preview.
This is the preview and resulting video
Is there a way I can specify a CGRect that I want to cut from the video using AVAssetExportSession?
EDIT ----
When I apply a CGAffineTransformScale to the AVAssetTrack it zooms into the video, and with the AVMutableVideoComposition renderSize set to view.bounds it crops off the ends. Great, there's just 1 problem left. The width of the video does not stretch to the correct width, it just gets filled with black.
EDIT 2 ----
The suggested question/answer is incomplete..
Some of my code:
In my - (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error method I have this to crop and resize the video.
- (void)flipAndSave:(NSURL *)videoURL withCompletionBlock:(void(^)(NSURL *returnURL))completionBlock
{
AVURLAsset *firstAsset = [AVURLAsset assetWithURL:videoURL];
// 1 - Create AVMutableComposition object. This object will hold your AVMutableCompositionTrack instances.
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
// 2 - Video track
AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
[firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration)
ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
// 2.1 - Create AVMutableVideoCompositionInstruction
AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
mainInstruction.timeRange = CMTimeRangeMake(CMTimeMakeWithSeconds(0, 600), firstAsset.duration);
// 2.2 - Create an AVMutableVideoCompositionLayerInstruction for the first track
AVMutableVideoCompositionLayerInstruction *firstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack];
AVAssetTrack *firstAssetTrack = [[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation firstAssetOrientation_ = UIImageOrientationUp;
BOOL isFirstAssetPortrait_ = NO;
CGAffineTransform firstTransform = firstAssetTrack.preferredTransform;
if (firstTransform.a == 0 && firstTransform.b == 1.0 && firstTransform.c == -1.0 && firstTransform.d == 0) {
firstAssetOrientation_ = UIImageOrientationRight;
isFirstAssetPortrait_ = YES;
}
if (firstTransform.a == 0 && firstTransform.b == -1.0 && firstTransform.c == 1.0 && firstTransform.d == 0) {
firstAssetOrientation_ = UIImageOrientationLeft;
isFirstAssetPortrait_ = YES;
}
if (firstTransform.a == 1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == 1.0) {
firstAssetOrientation_ = UIImageOrientationUp;
}
if (firstTransform.a == -1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == -1.0) {
firstAssetOrientation_ = UIImageOrientationDown;
}
// [firstlayerInstruction setTransform:firstAssetTrack.preferredTransform atTime:kCMTimeZero];
// [firstlayerInstruction setCropRectangle:self.view.bounds atTime:kCMTimeZero];
CGFloat scale = [self getScaleFromAsset:firstAssetTrack];
firstTransform = CGAffineTransformScale(firstTransform, scale, scale);
[firstlayerInstruction setTransform:firstTransform atTime:kCMTimeZero];
// 2.4 - Add instructions
mainInstruction.layerInstructions = [NSArray arrayWithObjects:firstlayerInstruction,nil];
AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
mainCompositionInst.instructions = [NSArray arrayWithObject:mainInstruction];
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
// CGSize videoSize = firstAssetTrack.naturalSize;
CGSize videoSize = self.view.bounds.size;
BOOL isPortrait_ = [self isVideoPortrait:firstAsset];
if(isPortrait_) {
videoSize = CGSizeMake(videoSize.height, videoSize.width);
}
NSLog(#"%#", NSStringFromCGSize(videoSize));
mainCompositionInst.renderSize = videoSize;
// 3 - Audio track
AVMutableCompositionTrack *AudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
[AudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration)
ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:nil];
// 4 - Get path
NSString *outputPath = [[NSString alloc] initWithFormat:#"%#%#", NSTemporaryDirectory(), #"cutoutput.mov"];
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];
NSFileManager *manager = [[NSFileManager alloc] init];
if ([manager fileExistsAtPath:outputPath])
{
[manager removeItemAtPath:outputPath error:nil];
}
// 5 - Create exporter
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=outputURL;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.shouldOptimizeForNetworkUse = YES;
exporter.videoComposition = mainCompositionInst;
[exporter exportAsynchronouslyWithCompletionHandler:^{
switch ([exporter status])
{
case AVAssetExportSessionStatusFailed:
NSLog(#"Export failed: %# : %#", [[exporter error] localizedDescription], [exporter error]);
completionBlock(nil);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export canceled");
completionBlock(nil);
break;
default: {
NSURL *outputURL = exporter.outputURL;
dispatch_async(dispatch_get_main_queue(), ^{
completionBlock(outputURL);
});
break;
}
}
}];
}
Here is my interpretation of your question: You are capturing video on a device with a screen ratio of 4:3, thus your AVCaptureVideoPreviewLayer is 4:3, but the video input device captures video in 16:9 so the resulting video is 'larger' than seen in the preview.
If you are simply looking to crop the extra pixels not caught by the preview then check out this http://www.netwalk.be/article/record-square-video-ios. This article shows how to crop the video into a square. However you'll only need a few modifications to crop to 4:3. I've gone and tested this, here are the changes I made:
Once you have the AVAssetTrack for the video you will need to calculate a new height.
// we convert the captured height i.e. 1080 to a 4:3 screen ratio and get the new height
CGFloat newHeight = clipVideoTrack.naturalSize.height/3*4;
Then modify these two lines, using newHeight.
videoComposition.renderSize = CGSizeMake(clipVideoTrack.naturalSize.height, newHeight);
CGAffineTransform t1 = CGAffineTransformMakeTranslation(clipVideoTrack.naturalSize.height, -(clipVideoTrack.naturalSize.width - newHeight)/2 );
So what we've done here is set the renderSize to a 4:3 ratio - the exact dimension are based on the input device. We then use a CGAffineTransform to translate the video position so that what we saw in the AVCaptureVideoPreviewLayer is what is rendered to our file.
Edit: If you want to put it all together and crop a video based on the device's screen ratio (3:2, 4:3, 16:9) and take the video orientation into mind we need to add a few things.
First here is the modified sample code with a few critical alterations:
// output file
NSString* docFolder = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) lastObject];
NSString* outputPath = [docFolder stringByAppendingPathComponent:#"output2.mov"];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputPath])
[[NSFileManager defaultManager] removeItemAtPath:outputPath error:nil];
// input file
AVAsset* asset = [AVAsset assetWithURL:outputFileURL];
AVMutableComposition *composition = [AVMutableComposition composition];
[composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
// input clip
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
// crop clip to screen ratio
UIInterfaceOrientation orientation = [self orientationForTrack:asset];
BOOL isPortrait = (orientation == UIInterfaceOrientationPortrait || orientation == UIInterfaceOrientationPortraitUpsideDown) ? YES: NO;
CGFloat complimentSize = [self getComplimentSize:videoTrack.naturalSize.height];
CGSize videoSize;
if(isPortrait) {
videoSize = CGSizeMake(videoTrack.naturalSize.height, complimentSize);
} else {
videoSize = CGSizeMake(complimentSize, videoTrack.naturalSize.height);
}
AVMutableVideoComposition* videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.renderSize = videoSize;
videoComposition.frameDuration = CMTimeMake(1, 30);
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(60, 30) );
// rotate and position video
AVMutableVideoCompositionLayerInstruction* transformer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
CGFloat tx = (videoTrack.naturalSize.width-complimentSize)/2;
if (orientation == UIInterfaceOrientationPortrait || orientation == UIInterfaceOrientationLandscapeRight) {
// invert translation
tx *= -1;
}
// t1: rotate and position video since it may have been cropped to screen ratio
CGAffineTransform t1 = CGAffineTransformTranslate(videoTrack.preferredTransform, tx, 0);
// t2/t3: mirror video horizontally
CGAffineTransform t2 = CGAffineTransformTranslate(t1, isPortrait?0:videoTrack.naturalSize.width, isPortrait?videoTrack.naturalSize.height:0);
CGAffineTransform t3 = CGAffineTransformScale(t2, isPortrait?1:-1, isPortrait?-1:1);
[transformer setTransform:t3 atTime:kCMTimeZero];
instruction.layerInstructions = [NSArray arrayWithObject: transformer];
videoComposition.instructions = [NSArray arrayWithObject: instruction];
// export
exporter = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetHighestQuality] ;
exporter.videoComposition = videoComposition;
exporter.outputURL=[NSURL fileURLWithPath:outputPath];
exporter.outputFileType=AVFileTypeQuickTimeMovie;
[exporter exportAsynchronouslyWithCompletionHandler:^(void){
NSLog(#"Exporting done!");
// added export to library for testing
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:[NSURL fileURLWithPath:outputPath]]) {
[library writeVideoAtPathToSavedPhotosAlbum:[NSURL fileURLWithPath:outputPath]
completionBlock:^(NSURL *assetURL, NSError *error) {
NSLog(#"Saved to album");
if (error) {
}
}];
}
}];
What we added here is a call to get the new render size of the video based on cropping its dimensions to the screen ratio. Once we crop the size down, we need to translate the position to recenter the video. So we grab its orientation to move it in the proper direction. This will fix the off-center issue we saw with UIInterfaceOrientationLandscapeLeft. Finally CGAffineTransform t2, t3 mirror the video horizontally.
And here are the two new methods that make this happen:
- (CGFloat)getComplimentSize:(CGFloat)size {
CGRect screenRect = [[UIScreen mainScreen] bounds];
CGFloat ratio = screenRect.size.height / screenRect.size.width;
// we have to adjust the ratio for 16:9 screens
if (ratio == 1.775) ratio = 1.77777777777778;
return size * ratio;
}
- (UIInterfaceOrientation)orientationForTrack:(AVAsset *)asset {
UIInterfaceOrientation orientation = UIInterfaceOrientationPortrait;
NSArray *tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
if([tracks count] > 0) {
AVAssetTrack *videoTrack = [tracks objectAtIndex:0];
CGAffineTransform t = videoTrack.preferredTransform;
// Portrait
if(t.a == 0 && t.b == 1.0 && t.c == -1.0 && t.d == 0) {
orientation = UIInterfaceOrientationPortrait;
}
// PortraitUpsideDown
if(t.a == 0 && t.b == -1.0 && t.c == 1.0 && t.d == 0) {
orientation = UIInterfaceOrientationPortraitUpsideDown;
}
// LandscapeRight
if(t.a == 1.0 && t.b == 0 && t.c == 0 && t.d == 1.0) {
orientation = UIInterfaceOrientationLandscapeRight;
}
// LandscapeLeft
if(t.a == -1.0 && t.b == 0 && t.c == 0 && t.d == -1.0) {
orientation = UIInterfaceOrientationLandscapeLeft;
}
}
return orientation;
}
These are pretty straight forward. The only thing to note is that in the getComplimentSize: method we have to manually adjust the ratio for 16:9 since the iPhone5+ resolution is mathematically shy of true 16:9.
AVCaptureVideoDataOutput is a concrete sub-class of AVCaptureOutput you use to process uncompressed frames from the video being captured, or to access compressed frames.
An instance of AVCaptureVideoDataOutput produces video frames you can process using other media APIs. You can access the frames with the captureOutput:didOutputSampleBuffer:fromConnection: delegate method.
Configuring a Session
You use a preset on the session to specify the image quality and resolution you want. A preset is a constant that identifies one of a number of possible configurations; in some cases the actual configuration is device-specific:
https://developer.apple.com/library/mac/documentation/AudioVideo/Conceptual/AVFoundationPG/Articles/04_MediaCapture.html
the actual values these presets represent for various devices, see “Saving to a Movie File” and “Capturing Still Images.”
If you want to set a size-specific configuration, you should check whether it is supported before setting it:
if ([session canSetSessionPreset:AVCaptureSessionPreset1280x720]) {
session.sessionPreset = AVCaptureSessionPreset1280x720;
}
else {
// Handle the failure.
}

iOS AVFoundation: Setting Orientation of Video

I've been struggling with several dimensions to the problem of controlling video orientation during and after capture on an iOS device. Thanks to previous answers and documentation from Apple I've been able to figure it out. However, now that I want to push some video to a web site, I'm running into particular problems. I've outlined this problem in particular in this question, and the proposed solution turns out to require orientation options to be set during video encoding.
That may be, but I have no clue how to go about doing this. The documentation around setting orientation is in respect to setting it correctly for display on the device, and I've implemented the advice found here. However, this advice does not address setting the orientation properly for non-Apple software, such as VLC or the Chrome browser.
Can anyone provide insight into how to set orientation properly on the device such that it displays correctly for all viewing software?
Finally,based on the answers of #Aaron Vegh and #Prince, I figured out my resolution:
//Converting video
+(void)convertMOVToMp4:(NSString *)movFilePath completion:(void (^)(NSString *mp4FilePath))block{
AVURLAsset * videoAsset = [[AVURLAsset alloc]initWithURL:[NSURL fileURLWithPath:movFilePath] options:nil];
AVAssetTrack *sourceAudioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
AVMutableComposition* composition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:sourceAudioTrack
atTime:kCMTimeZero error:nil];
AVAssetExportSession * assetExport = [[AVAssetExportSession alloc] initWithAsset:composition
presetName:AVAssetExportPresetMediumQuality];
NSString *exportPath = [movFilePath stringByReplacingOccurrencesOfString:#".MOV" withString:#".mp4"];
NSURL * exportUrl = [NSURL fileURLWithPath:exportPath];
assetExport.outputFileType = AVFileTypeMPEG4;
assetExport.outputURL = exportUrl;
assetExport.shouldOptimizeForNetworkUse = YES;
assetExport.videoComposition = [self getVideoComposition:videoAsset composition:composition];
[assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
switch (assetExport.status)
{
case AVAssetExportSessionStatusCompleted:
// export complete
if (block) {
block(exportPath);
}
break;
case AVAssetExportSessionStatusFailed:
block(nil);
break;
case AVAssetExportSessionStatusCancelled:
block(nil);
break;
}
}];
}
//get current orientation
+(AVMutableVideoComposition *) getVideoComposition:(AVAsset *)asset composition:( AVMutableComposition*)composition{
BOOL isPortrait_ = [self isVideoPortrait:asset];
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:videoTrack atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionLayerInstruction *layerInst = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTrack];
CGAffineTransform transform = videoTrack.preferredTransform;
[layerInst setTransform:transform atTime:kCMTimeZero];
AVMutableVideoCompositionInstruction *inst = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
inst.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
inst.layerInstructions = [NSArray arrayWithObject:layerInst];
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.instructions = [NSArray arrayWithObject:inst];
CGSize videoSize = videoTrack.naturalSize;
if(isPortrait_) {
NSLog(#"video is portrait ");
videoSize = CGSizeMake(videoSize.height, videoSize.width);
}
videoComposition.renderSize = videoSize;
videoComposition.frameDuration = CMTimeMake(1,30);
videoComposition.renderScale = 1.0;
return videoComposition;
}
//get video
+(BOOL) isVideoPortrait:(AVAsset *)asset{
BOOL isPortrait = FALSE;
NSArray *tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
if([tracks count] > 0) {
AVAssetTrack *videoTrack = [tracks objectAtIndex:0];
CGAffineTransform t = videoTrack.preferredTransform;
// Portrait
if(t.a == 0 && t.b == 1.0 && t.c == -1.0 && t.d == 0)
{
isPortrait = YES;
}
// PortraitUpsideDown
if(t.a == 0 && t.b == -1.0 && t.c == 1.0 && t.d == 0) {
isPortrait = YES;
}
// LandscapeRight
if(t.a == 1.0 && t.b == 0 && t.c == 0 && t.d == 1.0)
{
isPortrait = FALSE;
}
// LandscapeLeft
if(t.a == -1.0 && t.b == 0 && t.c == 0 && t.d == -1.0)
{
isPortrait = FALSE;
}
}
return isPortrait;
}
In Apple's documentation here it states:
Clients may now receive physically rotated CVPixelBuffers in their AVCaptureVideoDataOutput -captureOutput:didOutputSampleBuffer:fromConnection: delegate callback. In previous iOS versions, the front-facing camera would always deliver buffers in AVCaptureVideoOrientationLandscapeLeft and the back-facing camera would always deliver buffers in AVCaptureVideoOrientationLandscapeRight. All 4 AVCaptureVideoOrientations are supported, and rotation is hardware accelerated. To request buffer rotation, a client calls -setVideoOrientation: on the AVCaptureVideoDataOutput's video AVCaptureConnection. Note that physically rotating buffers does come with a performance cost, so only request rotation if it's necessary. If, for instance, you want rotated video written to a QuickTime movie file using AVAssetWriter, it is preferable to set the -transform property on the AVAssetWriterInput rather than physically rotate the buffers in AVCaptureVideoDataOutput.
So the posted solution by Aaron Vegh that uses an AVAssetExportSession works, but is not needed. Like the Apple doc's say, if you'd like to have the orientation set correctly so that it plays in non-apple quicktime players like VLC or on the web using Chrome, you must set the video orientation on the AVCaptureConnection for the AVCaptureVideoDataOutput. If you try to set it for the AVAssetWriterInput you will get an incorrect orientation for players like VLC and Chrome.
Here is my code where I set it during setting up the capture session:
// DECLARED AS PROPERTIES ABOVE
#property (strong,nonatomic) AVCaptureDeviceInput *audioIn;
#property (strong,nonatomic) AVCaptureAudioDataOutput *audioOut;
#property (strong,nonatomic) AVCaptureDeviceInput *videoIn;
#property (strong,nonatomic) AVCaptureVideoDataOutput *videoOut;
#property (strong,nonatomic) AVCaptureConnection *audioConnection;
#property (strong,nonatomic) AVCaptureConnection *videoConnection;
------------------------------------------------------------------
------------------------------------------------------------------
-(void)setupCaptureSession{
// Setup Session
self.session = [[AVCaptureSession alloc]init];
[self.session setSessionPreset:AVCaptureSessionPreset640x480];
// Create Audio connection ----------------------------------------
self.audioIn = [[AVCaptureDeviceInput alloc]initWithDevice:[self getAudioDevice] error:nil];
if ([self.session canAddInput:self.audioIn]) {
[self.session addInput:self.audioIn];
}
self.audioOut = [[AVCaptureAudioDataOutput alloc]init];
dispatch_queue_t audioCaptureQueue = dispatch_queue_create("Audio Capture Queue", DISPATCH_QUEUE_SERIAL);
[self.audioOut setSampleBufferDelegate:self queue:audioCaptureQueue];
if ([self.session canAddOutput:self.audioOut]) {
[self.session addOutput:self.audioOut];
}
self.audioConnection = [self.audioOut connectionWithMediaType:AVMediaTypeAudio];
// Create Video connection ----------------------------------------
self.videoIn = [[AVCaptureDeviceInput alloc]initWithDevice:[self videoDeviceWithPosition:AVCaptureDevicePositionBack] error:nil];
if ([self.session canAddInput:self.videoIn]) {
[self.session addInput:self.videoIn];
}
self.videoOut = [[AVCaptureVideoDataOutput alloc]init];
[self.videoOut setAlwaysDiscardsLateVideoFrames:NO];
[self.videoOut setVideoSettings:nil];
dispatch_queue_t videoCaptureQueue = dispatch_queue_create("Video Capture Queue", DISPATCH_QUEUE_SERIAL);
[self.videoOut setSampleBufferDelegate:self queue:videoCaptureQueue];
if ([self.session canAddOutput:self.videoOut]) {
[self.session addOutput:self.videoOut];
}
self.videoConnection = [self.videoOut connectionWithMediaType:AVMediaTypeVideo];
// SET THE ORIENTATION HERE -------------------------------------------------
[self.videoConnection setVideoOrientation:AVCaptureVideoOrientationPortrait];
// --------------------------------------------------------------------------
// Create Preview Layer -------------------------------------------
AVCaptureVideoPreviewLayer *previewLayer = [[AVCaptureVideoPreviewLayer alloc]initWithSession:self.session];
CGRect bounds = self.videoView.bounds;
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
previewLayer.bounds = bounds;
previewLayer.position=CGPointMake(CGRectGetMidX(bounds), CGRectGetMidY(bounds));
[self.videoView.layer addSublayer:previewLayer];
// Start session
[self.session startRunning];
}
In case anyone else is looking for this answer as well, this is the method that I cooked up (modified a bit to simplify):
- (void)encodeVideoOrientation:(NSURL *)anOutputFileURL
{
CGAffineTransform rotationTransform;
CGAffineTransform rotateTranslate;
CGSize renderSize;
switch (self.recordingOrientation)
{
// set these 3 values based on orientation
}
AVURLAsset * videoAsset = [[AVURLAsset alloc]initWithURL:anOutputFileURL options:nil];
AVAssetTrack *sourceVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVAssetTrack *sourceAudioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
AVMutableComposition* composition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:sourceVideoTrack
atTime:kCMTimeZero error:nil];
[compositionVideoTrack setPreferredTransform:sourceVideoTrack.preferredTransform];
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:sourceAudioTrack
atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTrack];
[layerInstruction setTransform:rotateTranslate atTime:kCMTimeZero];
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.frameDuration = CMTimeMake(1,30);
videoComposition.renderScale = 1.0;
videoComposition.renderSize = renderSize;
instruction.layerInstructions = [NSArray arrayWithObject: layerInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
videoComposition.instructions = [NSArray arrayWithObject: instruction];
AVAssetExportSession * assetExport = [[AVAssetExportSession alloc] initWithAsset:composition
presetName:AVAssetExportPresetMediumQuality];
NSString* videoName = #"export.mov";
NSString *exportPath = [NSTemporaryDirectory() stringByAppendingPathComponent:videoName];
NSURL * exportUrl = [NSURL fileURLWithPath:exportPath];
if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath])
{
[[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil];
}
assetExport.outputFileType = AVFileTypeMPEG4;
assetExport.outputURL = exportUrl;
assetExport.shouldOptimizeForNetworkUse = YES;
assetExport.videoComposition = videoComposition;
[assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
switch (assetExport.status)
{
case AVAssetExportSessionStatusCompleted:
// export complete
NSLog(#"Export Complete");
break;
case AVAssetExportSessionStatusFailed:
NSLog(#"Export Failed");
NSLog(#"ExportSessionError: %#", [assetExport.error localizedDescription]);
// export error (see exportSession.error)
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export Failed");
NSLog(#"ExportSessionError: %#", [assetExport.error localizedDescription]);
// export cancelled
break;
}
}];
}
This stuff is poorly documented, unfortunately, but by stringing examples together from other SO questions and reading the header files, I was able to get this working. Hope this helps anyone else!
Use these below method to set correct orientation according to video asset orientation in AVMutableVideoComposition
-(AVMutableVideoComposition *) getVideoComposition:(AVAsset *)asset
{
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableComposition *composition = [AVMutableComposition composition];
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
CGSize videoSize = videoTrack.naturalSize;
BOOL isPortrait_ = [self isVideoPortrait:asset];
if(isPortrait_) {
NSLog(#"video is portrait ");
videoSize = CGSizeMake(videoSize.height, videoSize.width);
}
composition.naturalSize = videoSize;
videoComposition.renderSize = videoSize;
// videoComposition.renderSize = videoTrack.naturalSize; //
videoComposition.frameDuration = CMTimeMakeWithSeconds( 1 / videoTrack.nominalFrameRate, 600);
AVMutableCompositionTrack *compositionVideoTrack;
compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:videoTrack atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionLayerInstruction *layerInst;
layerInst = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
[layerInst setTransform:videoTrack.preferredTransform atTime:kCMTimeZero];
AVMutableVideoCompositionInstruction *inst = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
inst.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
inst.layerInstructions = [NSArray arrayWithObject:layerInst];
videoComposition.instructions = [NSArray arrayWithObject:inst];
return videoComposition;
}
-(BOOL) isVideoPortrait:(AVAsset *)asset
{
BOOL isPortrait = FALSE;
NSArray *tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
if([tracks count] > 0) {
AVAssetTrack *videoTrack = [tracks objectAtIndex:0];
CGAffineTransform t = videoTrack.preferredTransform;
// Portrait
if(t.a == 0 && t.b == 1.0 && t.c == -1.0 && t.d == 0)
{
isPortrait = YES;
}
// PortraitUpsideDown
if(t.a == 0 && t.b == -1.0 && t.c == 1.0 && t.d == 0) {
isPortrait = YES;
}
// LandscapeRight
if(t.a == 1.0 && t.b == 0 && t.c == 0 && t.d == 1.0)
{
isPortrait = FALSE;
}
// LandscapeLeft
if(t.a == -1.0 && t.b == 0 && t.c == 0 && t.d == -1.0)
{
isPortrait = FALSE;
}
}
return isPortrait;
}
Since iOS 5 you can request rotated CVPixelBuffers using AVCaptureVideoDataOutput documented here. This gives you the correct orientation without having to reprocess the video again with AVAssetExportSession.
Here is the latest swift version of #Jagie code.
extension AVURLAsset
{
func exportVideo(presetName: String = AVAssetExportPresetHighestQuality, outputFileType: AVFileType = .mp4, fileExtension: String = "mp4", then completion: #escaping (URL?) -> Void)
{
let filename = url.deletingPathExtension().appendingPathExtension(fileExtension).lastPathComponent
let outputURL = FileManager.default.temporaryDirectory.appendingPathComponent(filename)
do { // delete old video, if already exists
try FileManager.default.removeItem(at: outputURL)
} catch {
print(error.localizedDescription)
}
guard let sourceAudioTrack = self.tracks(withMediaType: .audio).first else { return }
let composition = AVMutableComposition()
let compositionAudioTrack = composition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid)
do{
try compositionAudioTrack?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: self.duration), of: sourceAudioTrack, at: CMTime.zero)
}catch(let e){
print("error: \(e)")
}
if let session = AVAssetExportSession(asset: composition, presetName: presetName) {
session.outputURL = outputURL
session.outputFileType = outputFileType
let start = CMTimeMakeWithSeconds(0.0, preferredTimescale: 0)
let range = CMTimeRangeMake(start: start, duration: duration)
session.timeRange = range
session.shouldOptimizeForNetworkUse = true
session.videoComposition = getVideoComposition(asset: self, composition: composition)
session.exportAsynchronously {
switch session.status {
case .completed:
completion(outputURL)
case .cancelled:
debugPrint("Video export cancelled.")
completion(nil)
case .failed:
let errorMessage = session.error?.localizedDescription ?? "n/a"
debugPrint("Video export failed with error: \(errorMessage)")
completion(nil)
default:
break
}
}
} else {
completion(nil)
}
}
private func getVideoComposition(asset: AVAsset, composition: AVMutableComposition) -> AVMutableVideoComposition{
let isPortrait = isVideoPortrait()
let compositionVideoTrack: AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: .video, preferredTrackID:kCMPersistentTrackID_Invalid)!
let videoTrack:AVAssetTrack = asset.tracks(withMediaType: .video).first!
do{
try compositionVideoTrack.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: asset.duration), of: videoTrack, at: CMTime.zero)
}catch(let e){
print("Error: \(e)")
}
let layerInst:AVMutableVideoCompositionLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: compositionVideoTrack)
let transform = videoTrack.preferredTransform
layerInst.setTransform(transform, at: CMTime.zero)
let inst:AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()
inst.timeRange = CMTimeRangeMake(start: CMTime.zero, duration: asset.duration);
inst.layerInstructions = [layerInst]
let videoComposition: AVMutableVideoComposition = AVMutableVideoComposition()
videoComposition.instructions = [inst]
var videoSize:CGSize = videoTrack.naturalSize;
if(isPortrait) {
print("video is portrait")
videoSize = CGSize(width: videoSize.height, height: videoSize.width)
}else{
print("video is landscape")
}
videoComposition.renderSize = videoSize;
videoComposition.frameDuration = CMTimeMake(value: 1,timescale: 30);
videoComposition.renderScale = 1.0;
return videoComposition;
}
func isVideoPortrait() -> Bool{
var isPortrait = false
let tracks = self.tracks(withMediaType: .video)
if(tracks.count > 0) {
let videoTrack:AVAssetTrack = tracks.first!;
let t:CGAffineTransform = videoTrack.preferredTransform;
// Portrait
if(t.a == 0 && t.b == 1.0 && t.c == -1.0 && t.d == 0)
{
isPortrait = true;
}
// PortraitUpsideDown
if(t.a == 0 && t.b == -1.0 && t.c == 1.0 && t.d == 0) {
isPortrait = true;
}
// LandscapeRight
if(t.a == 1.0 && t.b == 0 && t.c == 0 && t.d == 1.0)
{
isPortrait = false;
}
// LandscapeLeft
if(t.a == -1.0 && t.b == 0 && t.c == 0 && t.d == -1.0)
{
isPortrait = false;
}
}
return isPortrait;
}
}

Resources