iOS rotate every frame of video - ios

I need to rotate a video because of iPhone back camera is recording as if it were always on landscape left.
I need to physically rotate video because settings orientation don't work on all browser (ex. Chrome).
So I setup some code to read recorded video and saving it after rotation.
I used AVAssetExportSession which seems to work quite well apart two problems:
- the exported video have some black frames at the beginning
- some videos won't be exported.
Is it possible to have some insight about the problem?
Thanks!
Code:
AVAsset* asset = [AVURLAsset URLAssetWithURL: videoUrl options:nil];
AVMutableComposition *composition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVAssetTrack *audioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:videoTrack atTime:kCMTimeZero error: &error];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:audioTrack atTime:kCMTimeZero error: &error];
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake( kCMTimeZero, asset.duration);
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTrack];
[layerInstruction setTransform:transformToApply atTime:kCMTimeZero];
CGAffineTransform transformToApply = ///code to setup transformation
[layerInstruction setTransform:transformToApply atTime:kCMTimeZero];
[layerInstruction setOpacity:0.0 atTime:asset.duration];
instruction.layerInstructions = [NSArray arrayWithObject: layerInstruction];
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.instructions = [NSArray arrayWithObject: instruction];
videoComposition.frameDuration = CMTimeMake( 1, 600);
videoComposition.renderScale = 1.0;
videoComposition.renderSize = videoOutputSize;
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetMediumQuality] ;
exportSession.outputURL = self.outputUrl;
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
exportSession.videoComposition = videoComposition;
exportSession.shouldOptimizeForNetworkUse = YES;
exportSession.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
__block BOOL finishedExporting = NO;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
//
}];

Related

AVMutableVideoComposition stops video from rendering

I am trying to get a composition of 2 videos playing simultaneously in different positions based on this tutorial: https://abdulazeem.wordpress.com/2012/04/02/video-manipulation-in-ios-resizingmerging-and-overlapping-videos-in-ios/
Here is my code:
-(AVPlayer*)create{
// assets
AVAsset* videoAsset = [AVAsset assetWithURL:[NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:#"video_mov" ofType:#"mov"]]];
AVAsset* videoAsset2 = [AVAsset assetWithURL:[NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:#"video_mov" ofType:#"mov"]]];
// create a mutable composition and 2 tracks
AVMutableComposition* mutableComposition = [AVMutableComposition composition];
AVMutableCompositionTrack* videoTrack1 = [mutableComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack* videoTrack2 = [mutableComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
// add tracks from the video asset into the videoTrack1 (MutableCompositionTrack)
[videoTrack1 insertTimeRange: CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero
error:nil];
[videoTrack2 insertTimeRange: CMTimeRangeMake(kCMTimeZero, videoAsset2.duration)
ofTrack:[[videoAsset2 tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero
error:nil];
// Add audio tracks from the audio asset to our audioCompositionTrack
[audioCompositionTrack insertTimeRange: CMTimeRangeMake(kCMTimeZero, audioAsset.duration)
ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
atTime:kCMTimeZero
error:nil];
// Now create composition instructions for
AVMutableVideoCompositionInstruction * MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
// Track 1
AVMutableVideoCompositionLayerInstruction* FirstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack1];
CGAffineTransform Scale = videoTrack1.preferredTransform;
CGAffineTransform Move = CGAffineTransformMakeTranslation(300,200);
[FirstlayerInstruction setTransform:CGAffineTransformConcat(Scale,Move) atTime:kCMTimeZero];
// Track 2
AVMutableVideoCompositionLayerInstruction *SecondlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack2];
CGAffineTransform SecondScale = videoTrack1.preferredTransform;
CGAffineTransform SecondMove = CGAffineTransformMakeTranslation(0,0);
[SecondlayerInstruction setTransform:CGAffineTransformConcat(SecondScale,SecondMove) atTime:kCMTimeZero];
//Now we add our 2 created AVMutableVideoCompositionLayerInstruction objects to our AVMutableVideoCompositionInstruction in form of an array.
MainInstruction.layerInstructions = [NSArray arrayWithObjects:FirstlayerInstruction,nil];
// now create a video composition with the instruction, and set our instructions
AVMutableVideoComposition* videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.instructions = [NSArray arrayWithObject:MainInstruction];
videoComposition.frameDuration = CMTimeMake(1, 30);
videoComposition.renderSize = mutableComposition.naturalSize;
// Create the player item
AVPlayerItem* avPlayerItem =[[AVPlayerItem alloc]initWithAsset:mutableComposition];
avPlayerItem.videoComposition = videoComposition;
// Create the player and return it
AVPlayer* avPlayer = [[AVPlayer alloc]initWithPlayerItem:avPlayerItem];
return avPlayer;
}
The video does not render.
If I remove the line:
avPlayerItem.videoComposition = videoComposition;
The video renders again, since it has no custom videoComposition set on it. Several other examples I have seen appear to be doing this in the same way.
I can only assume my transform data is incorrect, Although I have tried various different numbers.
Try to create immutable composition apple doesn't recommend to play mutable composition
AVComposition *immutableSnapshotOfMyComposition = [myMutableComposition copy];
// Create a player to inspect and play the composition.
AVPlayerItem *playerItemForSnapshottedComposition =
[[AVPlayerItem alloc] initWithAsset:immutableSnapshotOfMyComposition];

AVAssetExportSession sporadically causing black frame

I am attempting to crop a video I take from within my iOS app, and export it as an mp4. The original video records perfectly, but 50% of the time when I crop the video, there is a black frame in the beginning.
Here is my code.
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:videoURL options:#{AVURLAssetPreferPreciseDurationAndTimingKey:#YES}];
AVMutableComposition *composition = [AVMutableComposition composition];
AVMutableCompositionTrack *videoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *assetTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject];
CMTime duration = assetTrack.timeRange.duration;
[videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, duration) ofTrack:assetTrack atTime:kCMTimeZero error:nil];
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.frameDuration = CMTimeMake(1, 30.0);
videoComposition.renderSize = CGSizeMake(videoTrack.naturalSize.height, videoTrack.naturalSize.height);
AVMutableVideoCompositionLayerInstruction *transformer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
CGAffineTransform t2 = CGAffineTransformRotate(t1, M_PI_2);
[transformer setTransform:t2 atTime:kCMTimeZero];
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.layerInstructions = #[transformer];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, duration);
videoComposition.instructions = #[instruction];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetHighestQuality];
exporter.videoComposition = videoComposition;
exporter.outputURL = outputURL;
exporter.outputFileType = AVFileTypeMPEG4;
[exporter exportAsynchronouslyWithCompletionHandler:^{
//Stuff
}];

AVAssetExportSession with instructions make black screen

I'm trying to combine 2 audio files and 1 video file into 1 .mov file. I realize it with next code:
-(void)combineData{
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
AVAsset *audioAsset = [AVAsset assetWithURL:_songURL];
AVAsset* audioAsset2 = [AVAsset assetWithURL:[NSURL fileURLWithPath:[NSTemporaryDirectory() stringByAppendingPathComponent:[#"speechRecord" stringByAppendingPathExtension:#"caf"]]]];
AVURLAsset *videoAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:[NSTemporaryDirectory() stringByAppendingPathComponent:[#"movie" stringByAppendingPathExtension:#"mov"]]] options:nil];
AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:nil];
AVMutableCompositionTrack *secondTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[secondTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:[[audioAsset2 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:nil];
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
/*CGAffineTransform rotationTransform = CGAffineTransformMakeRotation(DEGREES_TO_RADIANS(90));
CGAffineTransform rotateTranslate = CGAffineTransformTranslate(rotationTransform,320,0);*/
[layerInstruction setTransform:videoTrack.preferredTransform atTime:kCMTimeZero];
AVMutableVideoCompositionInstruction * mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
[mainInstruction setLayerInstructions:[NSArray arrayWithObject:layerInstruction]];
mixComposition.naturalSize = videoTrack.naturalSize;
AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
mainCompositionInst.renderScale = 1.0;
mainCompositionInst.renderSize = videoTrack.naturalSize;
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
mainCompositionInst.instructions = [NSArray arrayWithObject:mainInstruction];
//mainCompositionInst.
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:#"mergeVideo.mov"];
if ([[NSFileManager defaultManager] fileExistsAtPath:myPathDocs]) {
[[NSFileManager defaultManager] removeItemAtPath:myPathDocs error:nil];
NSLog(#"Removing old mergeVideo");
}
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.videoComposition = mainCompositionInst;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^
{
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:exporter];
});
}];
My first task is deal with 90 rotation of my new video. When I'm including mainCompositionInst into my code, my new video have all sounds, but it's have a black screen and still not rotated. If I don't using any instructions it works fine. Maybe I have mistakes in my code, or maybe not. What would you advise me?
I would try applying the rotation on top of the preferredTransform because it's possible that the preferredTransform translates your video so that it displays correctly on the screen.
I would try the following:
CGAffineTransform transform = videoTrack.preferredTransform;
transform = CGAffineTransformRotate(transform, DEGREES_TO_RADIANS(90));
//transform = CGAffineTransformTranslate(transform, 320, 0);
[layerInstruction setTransform:transform atTime:kCMTimeZero];
Proper working sample can be found here:
https://github.com/robovm/apple-ios-samples/blob/master/AVSimpleEditoriOS/AVSimpleEditor/AVSERotateCommand.m#L98
The things is that you need to translate the composition to compensate the movement caused by rotation (since rotation would cause it to move out of frame) as it is stated by comments in the Apple sample source code.
The correct translation to apply will be:
t1 = CGAffineTransformMakeTranslation(assetVideoTrack.naturalSize.height, 0.0);
// Rotate transformation
t2 = CGAffineTransformRotate(t1, degreesToRadians(90.0));

AVAssetExportSession rotation error's when video is from camera roll

I am trying to convert .mov video to .mp4 and at the same time correcting the orientation.
The code I use below works great when recording a video using UIImagePickerController however if the video is selected from the camera roll I get this error and I don't see why:
Export failed: Operation Stopped : Error
Domain=AVFoundationErrorDomain Code=-11841 "Operation Stopped"
UserInfo=0x1815ca50 {NSLocalizedDescription=Operation Stopped,
NSLocalizedFailureReason=The video could not be composed.}
I have tried first saving the video to another file, but it made no difference.
Here is the code I am using to convert the video:
- (void)convertVideoToLowQuailtyAndFixRotationWithInputURL:(NSURL*)inputURL handler:(void (^)(NSURL *outURL))handler
{
if ([[inputURL pathExtension] isEqualToString:#"MOV"])
{
NSURL *outputURL = [inputURL URLByDeletingPathExtension];
outputURL = [outputURL URLByAppendingPathExtension:#"mp4"];
AVURLAsset *avAsset = [AVURLAsset URLAssetWithURL:inputURL options:nil];
AVAssetTrack *sourceVideoTrack = [[avAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVAssetTrack *sourceAudioTrack = [[avAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
AVMutableComposition* composition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, avAsset.duration)
ofTrack:sourceVideoTrack
atTime:kCMTimeZero error:nil];
[compositionVideoTrack setPreferredTransform:sourceVideoTrack.preferredTransform];
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, avAsset.duration)
ofTrack:sourceAudioTrack
atTime:kCMTimeZero error:nil];
AVMutableVideoComposition *videoComposition = [self getVideoComposition:avAsset];
NSArray *compatiblePresets = [AVAssetExportSession exportPresetsCompatibleWithAsset:avAsset];
if ([compatiblePresets containsObject:AVAssetExportPresetMediumQuality])
{
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc]initWithAsset:composition presetName:AVAssetExportPresetMediumQuality];
exportSession.outputURL = outputURL;
exportSession.outputFileType = AVFileTypeMPEG4;
exportSession.shouldOptimizeForNetworkUse = YES;
exportSession.videoComposition = videoComposition;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch ([exportSession status])
{
case AVAssetExportSessionStatusFailed:
NSLog(#"Export failed: %# : %#", [[exportSession error] localizedDescription], [exportSession error]);
handler(nil);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export canceled");
handler(nil);
break;
default:
handler(outputURL);
break;
}
}];
}
} else {
handler(inputURL);
}
}
- (AVMutableVideoComposition *)getVideoComposition:(AVAsset *)asset
{
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableComposition *composition = [AVMutableComposition composition];
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
CGSize videoSize = videoTrack.naturalSize;
BOOL isPortrait_ = [self isVideoPortrait:asset];
if(isPortrait_) {
// NSLog(#"video is portrait ");
videoSize = CGSizeMake(videoSize.height, videoSize.width);
}
composition.naturalSize = videoSize;
videoComposition.renderSize = videoSize;
videoComposition.frameDuration = CMTimeMakeWithSeconds( 1 / videoTrack.nominalFrameRate, 600);
AVMutableCompositionTrack *compositionVideoTrack;
compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:videoTrack atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionLayerInstruction *layerInst;
layerInst = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
[layerInst setTransform:videoTrack.preferredTransform atTime:kCMTimeZero];
AVMutableVideoCompositionInstruction *inst = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
inst.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
inst.layerInstructions = [NSArray arrayWithObject:layerInst];
videoComposition.instructions = [NSArray arrayWithObject:inst];
return videoComposition;
}
AVFoundation Error Constant -11841 means that you have an invalid video composition. See this link if you'd like more info on the error constants:
https://developer.apple.com/library/ios/documentation/AVFoundation/Reference/AVFoundation_ErrorConstants/Reference/reference.html
While no major errors pop out at me immediately, I can suggest the following ways to narrow down the source of your problem.
First, instead of passing nil for the error parameter in these calls:
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, avAsset.duration)
ofTrack:sourceVideoTrack
atTime:kCMTimeZero error:nil];
create an NSError object and pass the reference to it like so:
NSError *error = nil;
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, avAsset.duration)
ofTrack:sourceVideoTrack
atTime:kCMTimeZero error:&error];
Examine the error to make sure your video and audio tracks get inserted in the composition track correctly. The error should be nil if all goes well.
if(error)
NSLog(#"Insertion error: %#", error);
You may also want to check your AVAsset's composable and exportable, and hasProtectedContent properties. If these are not YES, YES, and NO, respectively, you may have a problem creating your new video file.
Occasionally I've seen an issue where creating a time range for an audio track does not like the 600 timescale when used in a composition with a video track. You may want to create a new CMTime for the duration (avAsset.duration) in
CMTimeRangeMake(kCMTimeZero, avAsset.duration)
only for inserting the audio track. In the new CMTime,use a timescale of 44100 (or whatever the sample rate of the audio track is.) The same goes for your videoComposition.frameDuration. Depending on the nominalFrameRate of your video track, your time may not be represented correctly with 600 timescale.
Finally, there's a helpful tool provided by Apple to debug video compositions:
https://developer.apple.com/library/mac/samplecode/AVCompositionDebugViewer/Introduction/Intro.html
It gives a visual representation of your composition and you can see where things don't look like they should.
You should definitely use the method isValidForAsset:timeRange:validationDelegate: of AVVideoCompostion, it will diagnose any issue with your video composition.
I had the same problem and the solution for me was to create the layerInstruction with the AVMutableCompositionTrack instead of the original track:
layerInst = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTrack];
Try commenting the below line and run your project
exportSession.videoComposition = videoComposition;

Why aren't AVMutableVideoCompositionInstruction's being applied?

I am using AVAssetExportSession to concatenate several video recordings into a single entry.
As the user can switch between front and rear camera during the recording, the orientation of the recordings switches such that if a recording comprises of one clip using the forward facing camera and a second using the rear, then the concatenated video will show the first clip using the expected orientation and the second upside down (or vice versa).
I know I need to set the videoComposition instructions on the exporter, but I have had no joy in getting any rotations to be applied to the exported video.
In the example below, I am setting a dummy rotation transform of 90 degrees, just to try and force a change across the board, but my exported video is unaffected . . . I'm guessing I'm missing something obvious, but I can't see what, can anyone see my mistake?
AVMutableComposition *composition = [AVMutableComposition composition];
AVAssetExportSession *assetExport = [AVAssetExportSession exportSessionWithAsset:composition presetName:AVAssetExportPresetPassthrough];
NSMutableArray *instructions = [NSMutableArray new];
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
CGAffineTransform transformToApply=CGAffineTransformMakeRotation(90.0);
for (NSURL *path in filePaths) {
NSLog(#"%#", path);
AVURLAsset *asset =[AVURLAsset assetWithURL:path];
CMTime atTime = composition.duration;
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration)
ofTrack:[[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:atTime error:&error];
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:[[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]];
[layerInstruction setTransform:transformToApply atTime:kCMTimeZero];
AVMutableVideoCompositionInstruction * videoTrackInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
videoTrackInstruction.timeRange = CMTimeRangeMake(atTime, asset.duration);
videoTrackInstruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
[instructions addObject:videoTrackInstruction];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration)
ofTrack:[[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
atTime:atTime error:&error];
}
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.instructions = instructions;
videoComposition.frameDuration = CMTimeMake(1, compositionVideoTrack.naturalTimeScale);
videoComposition.renderSize = compositionVideoTrack.naturalSize;
assetExport.videoComposition = videoComposition;
Schoolboy error, the transform should be radians, not degrees - https://developer.apple.com/library/mac/#documentation/graphicsimaging/reference/CGAffineTransform/Reference/reference.html

Resources