Record video while playing other video and export - ios

I am using UIImagePickerController to record a video. And I am using AVPlayer to play video picked library, adding AVPlayerLayer to cameraOverlayView to see video while recording.
But I need to export the video that merge 2 videos (one is recorded video and one is library video). The result video should be the same with the view while I record (include 2 video).
Please help me the way to do that.

Finally, I found the solution. Simple than I think, AVFoundation make all for done my requirements.
//Load video using AVURLAsset
AVURLAsset *firstAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource: #"file1" ofType: #"mp4"]] options:nil];
AVURLAsset *secondAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource: #"file2" ofType: #"mp4"]] options:nil];
//Create AVMutableComposition Object.This object will hold our multiple AVMutableCompositionTrack.
AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];
//Here we are creating the first AVMutableCompositionTrack.See how we are adding a new track to our AVMutableComposition.
AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
//Now we set the length of the firstTrack equal to the length of the firstAsset and add the firstAsset to out newly created track at kCMTimeZero so video plays from the start of the track.
[firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration) ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
//Now we repeat the same process for the 2nd track as we did above for the first track.Note that the new track also starts at kCMTimeZero meaning both tracks will play simultaneously.
AVMutableCompositionTrack *secondTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[secondTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, secondAsset.duration) ofTrack:[[secondAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
//Create instruction
AVMutableVideoCompositionInstruction * MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, secondAsset.duration);
//Create layer instruction for first video
AVMutableVideoCompositionLayerInstruction *FirstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack];
CGAffineTransform Scale = CGAffineTransformMakeScale(0.7f,0.7f);
CGAffineTransform Move = CGAffineTransformMakeTranslation(200,120);
[FirstlayerInstruction setTransform:CGAffineTransformConcat(Scale,Move) atTime:kCMTimeZero];
//Create layer instruction for second video
AVMutableVideoCompositionLayerInstruction *SecondlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:secondTrack];
CGAffineTransform SecondScale = CGAffineTransformMakeScale(1.2f,1.2f);
CGAffineTransform SecondMove = CGAffineTransformMakeTranslation(0,0);
[SecondlayerInstruction setTransform:CGAffineTransformConcat(SecondScale,SecondMove) atTime:kCMTimeZero];
//Add the layer instruction to the composition instruction
MainInstruction.layerInstructions = [NSArray arrayWithObjects:FirstlayerInstruction,SecondlayerInstruction,nil];;
//Add composition instruction to video composition
AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];
MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
MainCompositionInst.frameDuration = CMTimeMake(1, 30);
MainCompositionInst.renderSize = CGSizeMake(640, 480);
And if you want to play the video composition
AVPlayerItem * newPlayerItem = [AVPlayerItem playerItemWithAsset:mixComposition];
newPlayerItem.videoComposition = MainCompositionInst;
self.mPlayer = [[AVPlayer alloc] initWithPlayerItem:newPlayerItem];
[self.mPlayer addObserver:self forKeyPath:#"status" options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew context:nil];
AVPlayerLayer *layer = [AVPlayerLayer playerLayerWithPlayer:self.mPlayer];
self.mPlayer.actionAtItemEnd = AVPlayerActionAtItemEndNone;
layer.frame = CGRectMake(0, 0, 640, 480);
[self.view.layer addSublayer: layer];
[self.mPlayer play];
And if you want to export the video composition to document directory
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,
NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString* path = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"outputFile_%#.mp4",#"Main"]];
NSURL* outputFileUrl = [NSURL fileURLWithPath:path];
exportSession = [[AVAssetExportSession alloc]initWithAsset:mixComposition presetName:AVAssetExportPreset640x480];
exportSession.videoComposition = MainCompositionInst;
exportSession.outputFileType = #"public.mpeg-4";
exportSession.outputURL = outputFileUrl;
NSLog(#"duration = %f", CMTimeGetSeconds(mixComposition.duration));
exportSession.timeRange=CMTimeRangeMake(kCMTimeZero, mixComposition.duration);
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch(exportSession.status){
case AVAssetExportSessionStatusExporting:
NSLog(#"Exporting...");
break;
case AVAssetExportSessionStatusCompleted:
NSLog(#"Export completed, wohooo!! \n Check %#", path2);
break;
case AVAssetExportSessionStatusWaiting:
NSLog(#"Waiting...");
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Cancelled");
break;
case AVAssetExportSessionStatusUnknown:
NSLog(#"Unknown");
break;
case AVAssetExportSessionStatusFailed:
NSLog(#"Failed with error: %#, try to save on %#", exportSession.error, path2);
break;
}
}];
Finally, if you want to tracking the progress
//Add a NSTimer for refresh checking the progress of AVAssetExportSession
exportProgressBarTimer = [NSTimer scheduledTimerWithTimeInterval:.1 target:self selector:#selector(updateExportDisplay) userInfo:nil repeats:YES];
And show the progress
- (void)updateExportDisplay {
NSLog(#"Exporting: %f", exportSession.progress);
if (exportSession.progress > .99) {
[exportProgressBarTimer invalidate];
}
}

Related

Video Could Not Be Composed when using AVAssetExportPresetHighestQuality

I am tying to create an app that splices together a number of videos. The issue seems to be that when I combine instructions with AVAssetExportPresetHighestQuality I get an error stating that
Export failed -> Reason: The video could not be composed., User Info:
{
NSLocalizedDescription = "Operation Stopped";
NSLocalizedFailureReason = "The video could not be composed.";
NSUnderlyingError = "Error Domain=NSOSStatusErrorDomain Code=-17390 \"(null)\""; }
If I change it to AVAssetExportPresetPassthrough it works ok but the instructions are ignored. Does anyone know what the issue might be using the following code. Im nearly there but this issue is holding me up.
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
AVMutableCompositionTrack *track = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
CMTime insertTime = kCMTimeZero;
NSMutableArray *arrayInstructions = [[NSMutableArray alloc] init];
int i = 0;
for (NSMutableDictionary * dict in self.arraySelectedAssets) {
AVAsset *asset = [dict objectForKey:#"avasset"];
//[self orientationForTrack:asset];
AVAssetTrack* videoAssetTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVAssetTrack* audioAssetTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[track insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:videoAssetTrack atTime:insertTime error:nil];
[audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:audioAssetTrack atTime:insertTime error:nil];
AVMutableVideoCompositionInstruction *firstVideoCompositionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
// Set the time range of the first instruction to span the duration of the first video track.
firstVideoCompositionInstruction.timeRange = CMTimeRangeMake(insertTime, videoAssetTrack.timeRange.duration);
AVMutableVideoCompositionLayerInstruction* firstVideoLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:[[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]];
CGAffineTransform translateToCenter = CGAffineTransformMakeTranslation( 0,-1334);
CGAffineTransform rotateBy90Degrees = CGAffineTransformMakeRotation( M_PI_2);
CGAffineTransform shrinkWidth = CGAffineTransformMakeScale(0.1, 0.1); // needed because Apple does a "stretch" by default - really, we should find and undo apple's stretch - I suspect it'll be a CALayer defaultTransform, or UIView property causing this
CGAffineTransform finalTransform = CGAffineTransformConcat( shrinkWidth, CGAffineTransformConcat(translateToCenter, rotateBy90Degrees) );
[firstVideoLayerInstruction setTransform:finalTransform atTime:kCMTimeZero];
firstVideoCompositionInstruction.layerInstructions = #[firstVideoLayerInstruction];
[arrayInstructions addObject:firstVideoCompositionInstruction];
insertTime = CMTimeAdd(insertTime, videoAssetTrack.timeRange.duration);
i = i + 1;
}
AVMutableVideoComposition *mutableVideoComposition = [AVMutableVideoComposition videoComposition];
mutableVideoComposition.instructions = arrayInstructions;
mutableVideoComposition.renderSize = CGSizeMake(1334, 750);
mutableVideoComposition.frameDuration = CMTimeMake(1, 30);
// 4 - Get path
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:
[NSString stringWithFormat:#"mergeVideo-%d.mov",arc4random() % 1000]];
self.combinedVideoURL = [NSURL fileURLWithPath:myPathDocs];
// 5 - Create exporter
self.timerExporter = [NSTimer scheduledTimerWithTimeInterval:0.01f
target:self
selector:#selector(exporterProgress)
userInfo:nil
repeats:YES];
// 5 - Create exporter
self.exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetHighestQuality];
self.exporter .outputURL=self.combinedVideoURL;
self.exporter .outputFileType = AVFileTypeQuickTimeMovie;
self.exporter .shouldOptimizeForNetworkUse = YES;
self.exporter.videoComposition = mutableVideoComposition;
[self.exporter exportAsynchronouslyWithCompletionHandler:^{
[self.timerExporter invalidate];
switch (self.exporter.status) {
case AVAssetExportSessionStatusFailed:
NSLog(#"Export failed -> Reason: %#, User Info: %#",
self.exporter.error.localizedFailureReason,
self.exporter.error.userInfo.description);
[self showError:self.exporter.error.localizedFailureReason];
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export cancelled");
break;
case AVAssetExportSessionStatusCompleted:
NSLog(#"Export finished");
dispatch_async(dispatch_get_main_queue(), ^{
self.labelProgressText.text = [NSString stringWithFormat:#"%# (100%%)", NSLocalizedString(#"Combining The Videos", nil)];
[self applyTheFilter];
});
break;
}
}];
This is not the answer you're looking for, I'm afraid. I had the same problem transforming and exporting a single video - AVAssetExportPresetHighestQuality would work for some assets and not for others.
My guess at the time was that the assets that didn't work weren't of a high enough size/framerate/quality to render using AVAssetExportPresetHighestQuality.
As you did, I ended up using AVAssetExportPresetPassthrough. In your case the end result will presumably be that all the assets you're splicing together will be rendered in their original format.

Is it possible to merge two video files to one file, one screen in iOS?

I'm new to video programming. I'm trying to exercise it but I'm having trouble, which merges two video files to one.
The merge I mean is as follows..
I have first video like this
Second video also like this
I want them to merge like this
I didn't want to use 2 video players because I want to send the merged video file to someone. I searched all day to solve this, but I could't find how to.
I wrote code referencing this link but it shows first video only, not merged.
My Code:
NSURL *firstURL = [NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:#"video1" ofType:#"mp4"]]
AVURLAsset *firstAsset = [[AVURLAsset alloc]initWithURL:firstURL options:nil];
NSURL *secondURL = [NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:#"video2" ofType:#"mp4"]];
VURLAsset *secondAsset = [[AVURLAsset alloc]initWithURL:secondURL options:nil];
AVMutableComposition* mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
[firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration)
ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
AVMutableCompositionTrack *secondTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
[secondTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, secondAsset.duration)
ofTrack:[[secondAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
[secondTrack setPreferredTransform:CGAffineTransformMakeScale(0.25f,0.25f)];
NSArray *dirPaths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *docsDir = [dirPaths objectAtIndex:0];
NSString *outputFilePath = [docsDir stringByAppendingPathComponent:[NSString stringWithFormat:#"FinalVideo.mov"]];
NSLog(#"%#", outputFilePath);
NSURL *outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
AVAssetExportSession* assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
assetExport.outputFileType = #"com.apple.quicktime-movie";
assetExport.outputURL = outputFileUrl;
[assetExport exportAsynchronouslyWithCompletionHandler: ^(void ) {
switch (assetExport.status) {
case AVAssetExportSessionStatusFailed:
NSLog(#"AVAssetExportSessionStatusFailed");
break;
case AVAssetExportSessionStatusCompleted:
NSLog(#"AVAssetExportSessionStatusCompleted");
break;
case AVAssetExportSessionStatusWaiting:
NSLog(#"AVAssetExportSessionStatusWaiting");
break;
default:
break;
}
}
];
What am I missing? I don't know how I can approach this to solve the problem.
Appreciate any ideas.
Thanks.
Edit:
i made a new code which referenced a link matt wrote, thanks matt. but when i tried to export it, only first video was exported. not together.. :(
my new code is..
NSURL *originalVideoURL1 = [[NSBundle mainBundle] URLForResource:#"video1" withExtension:#"mov"];
NSURL *originalVideoURL2 = [[NSBundle mainBundle] URLForResource:#"video2" withExtension:#"mov"];
AVURLAsset *firstAsset = [AVURLAsset URLAssetWithURL:originalVideoURL1 options:nil];
AVURLAsset *secondAsset = [AVURLAsset URLAssetWithURL:originalVideoURL2 options:nil];
AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init]; //[AVMutableComposition composition];
NSError *error = nil;
AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration) ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:&error];
if(error) {
NSLog(#"firstTrack error!!!. %#", error.localizedDescription);
}
AVMutableCompositionTrack *secondTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[secondTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, secondAsset.duration) ofTrack:[[secondAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:&error];
if(error) {
NSLog(#"secondTrack error!!!. %#", error.localizedDescription);
}
AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, firstAsset.duration);
AVMutableVideoCompositionLayerInstruction *firstLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack];
CGAffineTransform scale = CGAffineTransformMakeScale(0.7, 0.7);
CGAffineTransform move = CGAffineTransformMakeTranslation(230, 230);
[firstLayerInstruction setTransform:CGAffineTransformConcat(scale, move) atTime:kCMTimeZero];
AVMutableVideoCompositionLayerInstruction *secondLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:secondTrack];
CGAffineTransform secondScale = CGAffineTransformMakeScale(1.2, 1.5);
CGAffineTransform secondMove = CGAffineTransformMakeTranslation(0, 0);
[secondLayerInstruction setTransform:CGAffineTransformConcat(secondScale, secondMove) atTime:kCMTimeZero];
mainInstruction.layerInstructions = #[firstLayerInstruction, secondLayerInstruction];
AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
mainCompositionInst.instructions = #[mainInstruction];
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
mainCompositionInst.renderSize = CGSizeMake(640, 480);
AVPlayerItem *newPlayerItem = [AVPlayerItem playerItemWithAsset:mixComposition];
newPlayerItem.videoComposition = mainCompositionInst;
AVPlayer *player = [[AVPlayer alloc] initWithPlayerItem:newPlayerItem];
AVPlayerLayer *playerLayer =[AVPlayerLayer playerLayerWithPlayer:player];
[playerLayer setFrame:self.view.bounds];
[self.view.layer addSublayer:playerLayer];
[player seekToTime:kCMTimeZero];
[player play]; // play is Good!!
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *tempS2 = [documentsDirectory stringByAppendingPathComponent:#"FinalVideo.mov"];
if([[NSFileManager defaultManager] fileExistsAtPath:tempS2])
{
[[NSFileManager defaultManager] removeItemAtPath:tempS2 error:nil];
}
NSURL *url = [[NSURL alloc] initFileURLWithPath: tempS2];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc]
initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exportSession.outputURL=url;
NSLog(#"%#", [exportSession supportedFileTypes]);
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
if (exportSession.status==AVAssetExportSessionStatusFailed) {
NSLog(#"failed");
}
else {
NSLog(#"AudioLocation : %#",tempS2);
}
}];
how can i export my mixComposition and layerInstruction both?
please give me a little more ideas.
Thanks.
With reference to the code in your second edit, just as you've told the AVPlayerItem about your AVMutableVideoComposition, you need to also tell the AVAssetExportSession too:
exportSession.videoComposition = mainCompositionInst;
// exportAsynchronouslyWithCompletionHandler etc
N.B. make sure you choose the longer of the two track durations when setting your instruction duration:
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMaximum(firstAsset.duration, secondAsset.duration));
AVPlayer doesn't mind if you get this wrong, but AVAssetExportSession does and will return an AVErrorInvalidVideoComposition (-11841) error.
N.B. 2 Your AVPlayer isn't actually going out of scope, but it makes me nervous when I look at it. I'd assign it to a property if I were you.

AVMutableVideoComposition stops video from rendering

I am trying to get a composition of 2 videos playing simultaneously in different positions based on this tutorial: https://abdulazeem.wordpress.com/2012/04/02/video-manipulation-in-ios-resizingmerging-and-overlapping-videos-in-ios/
Here is my code:
-(AVPlayer*)create{
// assets
AVAsset* videoAsset = [AVAsset assetWithURL:[NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:#"video_mov" ofType:#"mov"]]];
AVAsset* videoAsset2 = [AVAsset assetWithURL:[NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:#"video_mov" ofType:#"mov"]]];
// create a mutable composition and 2 tracks
AVMutableComposition* mutableComposition = [AVMutableComposition composition];
AVMutableCompositionTrack* videoTrack1 = [mutableComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack* videoTrack2 = [mutableComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
// add tracks from the video asset into the videoTrack1 (MutableCompositionTrack)
[videoTrack1 insertTimeRange: CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero
error:nil];
[videoTrack2 insertTimeRange: CMTimeRangeMake(kCMTimeZero, videoAsset2.duration)
ofTrack:[[videoAsset2 tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero
error:nil];
// Add audio tracks from the audio asset to our audioCompositionTrack
[audioCompositionTrack insertTimeRange: CMTimeRangeMake(kCMTimeZero, audioAsset.duration)
ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
atTime:kCMTimeZero
error:nil];
// Now create composition instructions for
AVMutableVideoCompositionInstruction * MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
// Track 1
AVMutableVideoCompositionLayerInstruction* FirstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack1];
CGAffineTransform Scale = videoTrack1.preferredTransform;
CGAffineTransform Move = CGAffineTransformMakeTranslation(300,200);
[FirstlayerInstruction setTransform:CGAffineTransformConcat(Scale,Move) atTime:kCMTimeZero];
// Track 2
AVMutableVideoCompositionLayerInstruction *SecondlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack2];
CGAffineTransform SecondScale = videoTrack1.preferredTransform;
CGAffineTransform SecondMove = CGAffineTransformMakeTranslation(0,0);
[SecondlayerInstruction setTransform:CGAffineTransformConcat(SecondScale,SecondMove) atTime:kCMTimeZero];
//Now we add our 2 created AVMutableVideoCompositionLayerInstruction objects to our AVMutableVideoCompositionInstruction in form of an array.
MainInstruction.layerInstructions = [NSArray arrayWithObjects:FirstlayerInstruction,nil];
// now create a video composition with the instruction, and set our instructions
AVMutableVideoComposition* videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.instructions = [NSArray arrayWithObject:MainInstruction];
videoComposition.frameDuration = CMTimeMake(1, 30);
videoComposition.renderSize = mutableComposition.naturalSize;
// Create the player item
AVPlayerItem* avPlayerItem =[[AVPlayerItem alloc]initWithAsset:mutableComposition];
avPlayerItem.videoComposition = videoComposition;
// Create the player and return it
AVPlayer* avPlayer = [[AVPlayer alloc]initWithPlayerItem:avPlayerItem];
return avPlayer;
}
The video does not render.
If I remove the line:
avPlayerItem.videoComposition = videoComposition;
The video renders again, since it has no custom videoComposition set on it. Several other examples I have seen appear to be doing this in the same way.
I can only assume my transform data is incorrect, Although I have tried various different numbers.
Try to create immutable composition apple doesn't recommend to play mutable composition
AVComposition *immutableSnapshotOfMyComposition = [myMutableComposition copy];
// Create a player to inspect and play the composition.
AVPlayerItem *playerItemForSnapshottedComposition =
[[AVPlayerItem alloc] initWithAsset:immutableSnapshotOfMyComposition];

Build AVMutableComposition from AVURLAssets in loop

I'm working on an app that will need to concat a group of videos recorded from camera. Ultimately I'll have an array of URL's to work with but I can't figure out how to get two movie assets to concat properly. Here's some standalone code:
- (void)buildComposition {
NSString *path1 = [[NSBundle mainBundle] pathForResource:#"IMG_1049" ofType:#"MOV"];
NSString *path2 = [[NSBundle mainBundle] pathForResource:#"IMG_1431" ofType:#"MOV"];
NSURL *url1 = [NSURL fileURLWithPath:path1];
NSURL *url2 = [NSURL fileURLWithPath:path2];
AVMutableComposition *composition = [AVMutableComposition composition];
AVMutableVideoCompositionInstruction *compositionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
NSMutableArray *layerInstructions = [NSMutableArray array];
CGSize renderSize = CGSizeZero;
NSUInteger count = 0;
for (NSURL *url in #[url1, url2]) {
NSDictionary *options = #{ AVURLAssetPreferPreciseDurationAndTimingKey: #(YES) };
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:url options:options];
CMTimeRange editRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(1.0, 600));
NSError *error = nil;
CMTime insertionTime = composition.duration;
NSArray *videoTracks = [asset tracksWithMediaType:AVMediaTypeVideo];
AVAssetTrack *videoTrack = videoTracks.firstObject;
AVMutableCompositionTrack *videoCompositionTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[videoCompositionTrack insertTimeRange:editRange ofTrack:videoTrack atTime:insertionTime error:&error];
if (count == 0) {
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
CGAffineTransform scale = CGAffineTransformMakeScale(0.6, 0.6);
[layerInstruction setTransform:CGAffineTransformConcat(videoTrack.preferredTransform, scale) atTime:kCMTimeZero];
[layerInstructions addObject:layerInstruction];
}
else {
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
CGAffineTransform scale = CGAffineTransformMakeScale(0.9, 0.9);
[layerInstruction setTransform:CGAffineTransformConcat(videoTrack.preferredTransform, scale) atTime:kCMTimeZero];
[layerInstructions addObject:layerInstruction];
}
// set the render size
CGRect transformed = CGRectApplyAffineTransform(CGRectMakeWithCGSize(videoTrack.naturalSize), videoTrack.preferredTransform);
renderSize = CGSizeUnion(renderSize, transformed.size);
NSArray *audioTracks = [asset tracksWithMediaType:AVMediaTypeAudio];
AVAssetTrack *audioTrack = audioTracks.firstObject;
AVMutableCompositionTrack *audioCompositionTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[audioCompositionTrack insertTimeRange:editRange ofTrack:audioTrack atTime:insertionTime error:&error];
++count;
}
// set the composition instructions
compositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, composition.duration);
compositionInstruction.layerInstructions = layerInstructions;
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoCompositionWithPropertiesOfAsset:composition];
videoComposition.frameDuration = CMTimeMake(1, 30);
videoComposition.instructions = #[compositionInstruction];
videoComposition.renderSize = renderSize;
// export the composition
NSTimeInterval time = [NSDate timeIntervalSinceReferenceDate];
NSString *filename = [[NSString stringWithFormat:#"video-export-%f", time] stringByAppendingPathExtension:#"mov"];
NSString *pathTo = [NSHomeDirectory() stringByAppendingPathComponent:[NSString stringWithFormat:#"Documents/%#", filename]];
NSURL *fileUrl = [NSURL fileURLWithPath:pathTo];
AVAssetExportSession *assetExport = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetHighestQuality];
assetExport.videoComposition = videoComposition;
assetExport.outputFileType = AVFileTypeQuickTimeMovie;
assetExport.shouldOptimizeForNetworkUse = YES;
assetExport.outputURL = fileUrl;
[assetExport exportAsynchronouslyWithCompletionHandler:^{
switch (assetExport.status) {
case AVAssetExportSessionStatusFailed:
NSLog(#"\n\nFailed: %#\n\n", assetExport.error);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"\n\nCancelled: %#\n\n", assetExport.error);
break;
default:
NSLog(#"\n\nExported: %#\n\n", fileUrl);
break;
}
}];
}
What I expect to happen is the first video plays for 1 second at 60% scale, and then the second video plays for 1 second at 90% scale.
What actually happens is the first video plays at both 60% and 90% at the start of the video. After 1 second, the video goes black but the audio plays correctly.
Any ideas? Thanks!
Figured it out for anyone who is curious. In my layer instructions, I was mistakenly building them using the AVURLAsset's videoTrack, not the AVMutableComposition's compositionTrack!
This line:
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
Should be:
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoCompositionTrack];

Cropping a video with AVAsset/AVCaptureSession

I'm trying to crop a video into a square. I already have an AVCaptureSession setup that produces a rectangle resolution .mov file. So I'm just trying to crop it with some code I found here. Here is my code:
-(void)cropVideo:(NSURL*)videoURL{
// input file
AVAsset* asset = [AVAsset assetWithURL:videoURL];
AVMutableComposition *composition = [AVMutableComposition composition];
[composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
// input clip
AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
// make it square
AVMutableVideoComposition* videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.renderSize = CGSizeMake(clipVideoTrack.naturalSize.height, clipVideoTrack.naturalSize.height);
videoComposition.frameDuration = CMTimeMake(1, 30);
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(60, 30) );
// rotate to portrait
AVMutableVideoCompositionLayerInstruction* transformer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:clipVideoTrack];
CGAffineTransform t1 = CGAffineTransformMakeTranslation(clipVideoTrack.naturalSize.height, -(clipVideoTrack.naturalSize.width - clipVideoTrack.naturalSize.height) /2 );
CGAffineTransform t2 = CGAffineTransformRotate(t1, M_PI_2);
CGAffineTransform finalTransform = t2;
[transformer setTransform:finalTransform atTime:kCMTimeZero];
instruction.layerInstructions = [NSArray arrayWithObject:transformer];
videoComposition.instructions = [NSArray arrayWithObject: instruction];
// export
exporter = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetHighestQuality] ;
exporter.videoComposition = videoComposition;
NSURL *tmpDirURL = [NSURL fileURLWithPath:NSTemporaryDirectory() isDirectory:YES];
NSURL *fileURL = [[tmpDirURL URLByAppendingPathComponent:#"final"] URLByAppendingPathExtension:#"mov"];
exporter.outputURL=fileURL;
exporter.outputFileType=AVFileTypeQuickTimeMovie;
[exporter exportAsynchronouslyWithCompletionHandler:^(void){
NSLog(#"Exporting done!");
NSLog(#"exporter's outputURL is %#", exporter.outputURL);
self.moviePlayer = [[MPMoviePlayerController alloc] initWithContentURL:fileURL];
[self.view addSubview:self.moviePlayer.view];
self.moviePlayer.fullscreen = YES;
[self.moviePlayer play];
}];}
However, after running the exporter, the movie doesn't play in self.moviePlayer. It just goes "Loading..". Note that I tested the moviePlayer with the original videoURL parameter and that plays fine. Any ideas on what I'm doing wrong?
Thanks
Need:
dispatch_async(dispatch_get_main_queue(), ^{
//play movie here
});
in the completion handler.

Resources