I'm trying to create a video By combining two videos
It works properly until I tried to add transition between videos
It gives me an unplayable video file.
The code :
AVMutableComposition* mixComposition = [AVMutableComposition composition];
NSString* a_inputFileName = #"v1.mp4";
NSString* a_inputFilePath = [NSString stringWithFormat:#"%#/%#",documentsDirectoryPath,a_inputFileName];
NSURL * a_inputFileUrl = [NSURL fileURLWithPath:a_inputFilePath];
NSString* b_inputFileName = #"v2.mp4";
NSString* b_inputFilePath = [NSString stringWithFormat:#"%#/%#",documentsDirectoryPath,b_inputFileName];
NSURL* b_inputFileUrl = [NSURL fileURLWithPath:b_inputFilePath];
NSString* outputFileName = #"outputFile.mp4";
NSString* outputFilePath = [NSString stringWithFormat:#"%#/%#",documentsDirectoryPath,outputFileName];
NSURL* outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
CMTime nextClipStartTime = kCMTimeZero;
AVURLAsset* a_videoAsset = [[AVURLAsset alloc]initWithURL:a_inputFileUrl options:nil];
CMTimeRange a_timeRange = CMTimeRangeMake(kCMTimeZero,a_videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:a_timeRange ofTrack:[[a_videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
nextClipStartTime = CMTimeAdd(nextClipStartTime, a_timeRange.duration);
nextClipStartTime = CMTimeSubtract(nextClipStartTime, CMTimeMake(1, 1));
AVURLAsset* b_videoAsset = [[AVURLAsset alloc]initWithURL:b_inputFileUrl options:nil];
CMTimeRange b_timeRange = CMTimeRangeMake(kCMTimeZero, b_videoAsset.duration);
[a_compositionVideoTrack insertTimeRange:b_timeRange ofTrack:[[b_videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
AVMutableVideoCompositionInstruction * instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, mixComposition.duration);
AVMutableVideoCompositionLayerInstruction * firstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:[[a_videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]];
[firstlayerInstruction setOpacityRampFromStartOpacity:1.0f toEndOpacity:0.0f timeRange:CMTimeRangeMake(CMTimeMake(3,1), CMTimeMake(1, 1))];
AVMutableVideoCompositionLayerInstruction * secondlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:[[b_videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]];
[secondlayerInstruction setOpacity:1.0f atTime:kCMTimeZero];
instruction.layerInstructions = [NSArray arrayWithObjects:firstlayerInstruction,secondlayerInstruction, nil];
AVMutableVideoComposition * videoComp = [AVMutableVideoComposition videoComposition];
videoComp.instructions = [NSArray arrayWithObject:instruction];
[videoComp setRenderSize:CGSizeMake(320, 480)];
[videoComp setFrameDuration:CMTimeMake(1, 30)];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
_assetExport.videoComposition = videoComp;
_assetExport.outputFileType = AVFileTypeMPEG4;
_assetExport.outputURL = outputFileUrl;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
[self saveVideoToAlbum:outputFilePath];
}
];
If i remove this line : _assetExport.videoComposition = videoComp;
It works properly but no transition between videos.
Edit :
I found out that the exportsession's status is stuck on exporting nd it never finishes and i don't know why.
THX in advance.
Related
When I am blending two videos with AVAssetExportSession in ios 9 its working perfectly. but when i blend with AVAssetExportSession in iOS 10, it in not working. Please help me if any know the reason, Thank you.
actualy code is working for iphone 6s and earlier, but not for working for iPhone 7
for example
-(void) blendVideoOverVideo:(NSURL*)mainVideoUrl andBlendVideoUrl:(NSURL*)liveEffectUrl
{
AVURLAsset *mainVideoUrlAsset =[AVURLAsset URLAssetWithURL:mainVideoUrl options:nil];
// AVPlayerItem* mainVideoPlayerItem =[[AVPlayerItem alloc]initWithAsset:mainVideoUrlAsset];
AVAssetTrack* mainVideoTrack =[[mainVideoUrlAsset tracksWithMediaType:AVMediaTypeVideo]firstObject];
CGSize mainVideoSize = [mainVideoTrack naturalSize];
AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:mainVideoUrl options:nil];
if(mainVideoUrl!=nil)
{
if([[audioAsset tracksWithMediaType:AVMediaTypeAudio] count])
{
AVMutableCompositionTrack *compositionCommentaryTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionCommentaryTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, mainVideoUrlAsset.duration )
ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
atTime:kCMTimeZero
error:nil];
}
}
AVMutableCompositionTrack *mainVideoConpositionTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[mainVideoConpositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, mainVideoUrlAsset.duration) ofTrack:mainVideoTrack atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionLayerInstruction *mainVideoLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:mainVideoConpositionTrack];
//SEcond Track
AVURLAsset *blendVideoUrlAsset =[AVURLAsset URLAssetWithURL:liveEffectUrl options:nil];
// AVPlayerItem* blendVideoPlayerItem =[[AVPlayerItem alloc]initWithAsset:blendVideoUrlAsset];
AVAssetTrack* blendVideoTrack =[[blendVideoUrlAsset tracksWithMediaType:AVMediaTypeVideo]firstObject];
CGSize blendVideoSize = [blendVideoTrack naturalSize];
AVMutableCompositionTrack *blendVideoConpositionTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
CMTime oldTime=CMTimeMakeWithSeconds(CMTimeGetSeconds(blendVideoUrlAsset.duration), blendVideoUrlAsset.duration.timescale);
// CMTime timeNew=CMTimeMakeWithSeconds(CMTimeGetSeconds(blendVideoUrlAsset.duration)/2, blendVideoUrlAsset.duration.timescale);
[blendVideoConpositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, oldTime) ofTrack:blendVideoTrack atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionLayerInstruction *blendVideoLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:blendVideoConpositionTrack];
AVMutableVideoCompositionInstruction * MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, mainVideoUrlAsset.duration);
CGAffineTransform Scale = CGAffineTransformMakeScale(1.0f,1.0f);
CGAffineTransform Move = CGAffineTransformMakeTranslation(0,0);
[mainVideoLayerInstruction setTransform:CGAffineTransformConcat(Scale,Move) atTime:kCMTimeZero];
[blendVideoLayerInstruction setOpacity:0.5 atTime:kCMTimeZero];
// [blendVideoLayerInstruction setOpacity:0.0 atTime:timeNew];
CGFloat cropOffX = 1.0;
CGFloat cropOffY = 1.0;
if(blendVideoSize.height>mainVideoSize.height)
{
cropOffY = mainVideoSize.height/blendVideoSize.height;
}else{
cropOffY = mainVideoSize.height/blendVideoSize.height;
}
if(blendVideoSize.width>mainVideoSize.width)
{
cropOffX = mainVideoSize.width/blendVideoSize.width;
}
Scale = CGAffineTransformMakeScale(cropOffX,cropOffY);
Move = CGAffineTransformMakeTranslation(0.1, 0.1);
[blendVideoLayerInstruction setTransform:CGAffineTransformConcat(Scale,Move) atTime:kCMTimeZero];
MainInstruction.layerInstructions = [NSArray arrayWithObjects:blendVideoLayerInstruction,mainVideoLayerInstruction,nil];
AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];
MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
MainCompositionInst.frameDuration = CMTimeMake(1, 30);
MainCompositionInst.renderSize = mainVideoSize;
NSString *fullName= [NSString stringWithFormat:#"video%d.mov",arc4random() % 1000];
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:fullName];
if([[NSFileManager defaultManager] fileExistsAtPath:myPathDocs])
{
[[NSFileManager defaultManager] removeItemAtPath:myPathDocs error:nil];
}
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality];
exporter.outputURL=url;
CMTime start;
CMTime duration;
NSLog(#"Main Video dura %f blend dura - %f, ",CMTimeGetSeconds(mainVideoUrlAsset.duration),CMTimeGetSeconds(blendVideoUrlAsset.duration));
if(CMTimeGetSeconds(blendVideoUrlAsset.duration)>CMTimeGetSeconds(mainVideoUrlAsset.duration))
{
start = CMTimeMakeWithSeconds(0.0, blendVideoUrlAsset.duration.timescale);
duration = CMTimeMakeWithSeconds(CMTimeGetSeconds(mainVideoUrlAsset.duration), blendVideoUrlAsset.duration.timescale);
}
else if(CMTimeGetSeconds(mainVideoUrlAsset.duration)>CMTimeGetSeconds(blendVideoUrlAsset.duration))
{
start = CMTimeMakeWithSeconds(0.0, mainVideoUrlAsset.duration.timescale);
duration = CMTimeMakeWithSeconds(CMTimeGetSeconds(mainVideoUrlAsset.duration), mainVideoUrlAsset.duration.timescale);
}
CMTimeRange range = CMTimeRangeMake(start, duration);
exporter.timeRange = range;
[exporter setVideoComposition:MainCompositionInst];
exporter.outputFileType = AVFileTypeQuickTimeMovie;
__weak typeof(self) weakSelf = self;
[weakSelf createMBCircularProgress:exporter];
[exporter exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
[weakSelf exportDidFinish:exporter];
});
}];
}
this code will run in ios 9 and even iOS 10 in iPhone 6s, 6,5 etc but this code will not run in iPhone 7 simulator.
The solution is we need to use latest XCode 8.1 beta for running this
It's a bug.
It's fixed in Xcode 8.1 beta.
Xcode 8.1 beta [AVAssetExportSession allExportPresets] iPhone 7 Simulator now returns
AVAssetExportPreset1920x1080,
AVAssetExportPresetLowQuality,
AVAssetExportPresetAppleM4A,
AVAssetExportPreset640x480,
AVAssetExportPreset3840x2160,
AVAssetExportPresetHighestQuality,
AVAssetExportPreset1280x720,
AVAssetExportPresetMediumQuality,
AVAssetExportPreset960x540
Xcode 8.0 [AVAssetExportSession allExportPresets] iPhone 7 Simulator returns an empty array
AVAssetExportSession can be NULL, so need to check NULL before work on it
https://developer.apple.com/library/content/qa/qa1730/_index.html
I'm new to video programming. I'm trying to exercise it but I'm having trouble, which merges two video files to one.
The merge I mean is as follows..
I have first video like this
Second video also like this
I want them to merge like this
I didn't want to use 2 video players because I want to send the merged video file to someone. I searched all day to solve this, but I could't find how to.
I wrote code referencing this link but it shows first video only, not merged.
My Code:
NSURL *firstURL = [NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:#"video1" ofType:#"mp4"]]
AVURLAsset *firstAsset = [[AVURLAsset alloc]initWithURL:firstURL options:nil];
NSURL *secondURL = [NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:#"video2" ofType:#"mp4"]];
VURLAsset *secondAsset = [[AVURLAsset alloc]initWithURL:secondURL options:nil];
AVMutableComposition* mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
[firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration)
ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
AVMutableCompositionTrack *secondTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
[secondTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, secondAsset.duration)
ofTrack:[[secondAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
[secondTrack setPreferredTransform:CGAffineTransformMakeScale(0.25f,0.25f)];
NSArray *dirPaths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *docsDir = [dirPaths objectAtIndex:0];
NSString *outputFilePath = [docsDir stringByAppendingPathComponent:[NSString stringWithFormat:#"FinalVideo.mov"]];
NSLog(#"%#", outputFilePath);
NSURL *outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
AVAssetExportSession* assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
assetExport.outputFileType = #"com.apple.quicktime-movie";
assetExport.outputURL = outputFileUrl;
[assetExport exportAsynchronouslyWithCompletionHandler: ^(void ) {
switch (assetExport.status) {
case AVAssetExportSessionStatusFailed:
NSLog(#"AVAssetExportSessionStatusFailed");
break;
case AVAssetExportSessionStatusCompleted:
NSLog(#"AVAssetExportSessionStatusCompleted");
break;
case AVAssetExportSessionStatusWaiting:
NSLog(#"AVAssetExportSessionStatusWaiting");
break;
default:
break;
}
}
];
What am I missing? I don't know how I can approach this to solve the problem.
Appreciate any ideas.
Thanks.
Edit:
i made a new code which referenced a link matt wrote, thanks matt. but when i tried to export it, only first video was exported. not together.. :(
my new code is..
NSURL *originalVideoURL1 = [[NSBundle mainBundle] URLForResource:#"video1" withExtension:#"mov"];
NSURL *originalVideoURL2 = [[NSBundle mainBundle] URLForResource:#"video2" withExtension:#"mov"];
AVURLAsset *firstAsset = [AVURLAsset URLAssetWithURL:originalVideoURL1 options:nil];
AVURLAsset *secondAsset = [AVURLAsset URLAssetWithURL:originalVideoURL2 options:nil];
AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init]; //[AVMutableComposition composition];
NSError *error = nil;
AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration) ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:&error];
if(error) {
NSLog(#"firstTrack error!!!. %#", error.localizedDescription);
}
AVMutableCompositionTrack *secondTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[secondTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, secondAsset.duration) ofTrack:[[secondAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:&error];
if(error) {
NSLog(#"secondTrack error!!!. %#", error.localizedDescription);
}
AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, firstAsset.duration);
AVMutableVideoCompositionLayerInstruction *firstLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack];
CGAffineTransform scale = CGAffineTransformMakeScale(0.7, 0.7);
CGAffineTransform move = CGAffineTransformMakeTranslation(230, 230);
[firstLayerInstruction setTransform:CGAffineTransformConcat(scale, move) atTime:kCMTimeZero];
AVMutableVideoCompositionLayerInstruction *secondLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:secondTrack];
CGAffineTransform secondScale = CGAffineTransformMakeScale(1.2, 1.5);
CGAffineTransform secondMove = CGAffineTransformMakeTranslation(0, 0);
[secondLayerInstruction setTransform:CGAffineTransformConcat(secondScale, secondMove) atTime:kCMTimeZero];
mainInstruction.layerInstructions = #[firstLayerInstruction, secondLayerInstruction];
AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
mainCompositionInst.instructions = #[mainInstruction];
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
mainCompositionInst.renderSize = CGSizeMake(640, 480);
AVPlayerItem *newPlayerItem = [AVPlayerItem playerItemWithAsset:mixComposition];
newPlayerItem.videoComposition = mainCompositionInst;
AVPlayer *player = [[AVPlayer alloc] initWithPlayerItem:newPlayerItem];
AVPlayerLayer *playerLayer =[AVPlayerLayer playerLayerWithPlayer:player];
[playerLayer setFrame:self.view.bounds];
[self.view.layer addSublayer:playerLayer];
[player seekToTime:kCMTimeZero];
[player play]; // play is Good!!
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *tempS2 = [documentsDirectory stringByAppendingPathComponent:#"FinalVideo.mov"];
if([[NSFileManager defaultManager] fileExistsAtPath:tempS2])
{
[[NSFileManager defaultManager] removeItemAtPath:tempS2 error:nil];
}
NSURL *url = [[NSURL alloc] initFileURLWithPath: tempS2];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc]
initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exportSession.outputURL=url;
NSLog(#"%#", [exportSession supportedFileTypes]);
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
if (exportSession.status==AVAssetExportSessionStatusFailed) {
NSLog(#"failed");
}
else {
NSLog(#"AudioLocation : %#",tempS2);
}
}];
how can i export my mixComposition and layerInstruction both?
please give me a little more ideas.
Thanks.
With reference to the code in your second edit, just as you've told the AVPlayerItem about your AVMutableVideoComposition, you need to also tell the AVAssetExportSession too:
exportSession.videoComposition = mainCompositionInst;
// exportAsynchronouslyWithCompletionHandler etc
N.B. make sure you choose the longer of the two track durations when setting your instruction duration:
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMaximum(firstAsset.duration, secondAsset.duration));
AVPlayer doesn't mind if you get this wrong, but AVAssetExportSession does and will return an AVErrorInvalidVideoComposition (-11841) error.
N.B. 2 Your AVPlayer isn't actually going out of scope, but it makes me nervous when I look at it. I'd assign it to a property if I were you.
I'm working on an app that will need to concat a group of videos recorded from camera. Ultimately I'll have an array of URL's to work with but I can't figure out how to get two movie assets to concat properly. Here's some standalone code:
- (void)buildComposition {
NSString *path1 = [[NSBundle mainBundle] pathForResource:#"IMG_1049" ofType:#"MOV"];
NSString *path2 = [[NSBundle mainBundle] pathForResource:#"IMG_1431" ofType:#"MOV"];
NSURL *url1 = [NSURL fileURLWithPath:path1];
NSURL *url2 = [NSURL fileURLWithPath:path2];
AVMutableComposition *composition = [AVMutableComposition composition];
AVMutableVideoCompositionInstruction *compositionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
NSMutableArray *layerInstructions = [NSMutableArray array];
CGSize renderSize = CGSizeZero;
NSUInteger count = 0;
for (NSURL *url in #[url1, url2]) {
NSDictionary *options = #{ AVURLAssetPreferPreciseDurationAndTimingKey: #(YES) };
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:url options:options];
CMTimeRange editRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(1.0, 600));
NSError *error = nil;
CMTime insertionTime = composition.duration;
NSArray *videoTracks = [asset tracksWithMediaType:AVMediaTypeVideo];
AVAssetTrack *videoTrack = videoTracks.firstObject;
AVMutableCompositionTrack *videoCompositionTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[videoCompositionTrack insertTimeRange:editRange ofTrack:videoTrack atTime:insertionTime error:&error];
if (count == 0) {
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
CGAffineTransform scale = CGAffineTransformMakeScale(0.6, 0.6);
[layerInstruction setTransform:CGAffineTransformConcat(videoTrack.preferredTransform, scale) atTime:kCMTimeZero];
[layerInstructions addObject:layerInstruction];
}
else {
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
CGAffineTransform scale = CGAffineTransformMakeScale(0.9, 0.9);
[layerInstruction setTransform:CGAffineTransformConcat(videoTrack.preferredTransform, scale) atTime:kCMTimeZero];
[layerInstructions addObject:layerInstruction];
}
// set the render size
CGRect transformed = CGRectApplyAffineTransform(CGRectMakeWithCGSize(videoTrack.naturalSize), videoTrack.preferredTransform);
renderSize = CGSizeUnion(renderSize, transformed.size);
NSArray *audioTracks = [asset tracksWithMediaType:AVMediaTypeAudio];
AVAssetTrack *audioTrack = audioTracks.firstObject;
AVMutableCompositionTrack *audioCompositionTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[audioCompositionTrack insertTimeRange:editRange ofTrack:audioTrack atTime:insertionTime error:&error];
++count;
}
// set the composition instructions
compositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, composition.duration);
compositionInstruction.layerInstructions = layerInstructions;
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoCompositionWithPropertiesOfAsset:composition];
videoComposition.frameDuration = CMTimeMake(1, 30);
videoComposition.instructions = #[compositionInstruction];
videoComposition.renderSize = renderSize;
// export the composition
NSTimeInterval time = [NSDate timeIntervalSinceReferenceDate];
NSString *filename = [[NSString stringWithFormat:#"video-export-%f", time] stringByAppendingPathExtension:#"mov"];
NSString *pathTo = [NSHomeDirectory() stringByAppendingPathComponent:[NSString stringWithFormat:#"Documents/%#", filename]];
NSURL *fileUrl = [NSURL fileURLWithPath:pathTo];
AVAssetExportSession *assetExport = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetHighestQuality];
assetExport.videoComposition = videoComposition;
assetExport.outputFileType = AVFileTypeQuickTimeMovie;
assetExport.shouldOptimizeForNetworkUse = YES;
assetExport.outputURL = fileUrl;
[assetExport exportAsynchronouslyWithCompletionHandler:^{
switch (assetExport.status) {
case AVAssetExportSessionStatusFailed:
NSLog(#"\n\nFailed: %#\n\n", assetExport.error);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"\n\nCancelled: %#\n\n", assetExport.error);
break;
default:
NSLog(#"\n\nExported: %#\n\n", fileUrl);
break;
}
}];
}
What I expect to happen is the first video plays for 1 second at 60% scale, and then the second video plays for 1 second at 90% scale.
What actually happens is the first video plays at both 60% and 90% at the start of the video. After 1 second, the video goes black but the audio plays correctly.
Any ideas? Thanks!
Figured it out for anyone who is curious. In my layer instructions, I was mistakenly building them using the AVURLAsset's videoTrack, not the AVMutableComposition's compositionTrack!
This line:
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
Should be:
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoCompositionTrack];
I'm trying to combine 2 audio files and 1 video file into 1 .mov file. I realize it with next code:
-(void)combineData{
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
AVAsset *audioAsset = [AVAsset assetWithURL:_songURL];
AVAsset* audioAsset2 = [AVAsset assetWithURL:[NSURL fileURLWithPath:[NSTemporaryDirectory() stringByAppendingPathComponent:[#"speechRecord" stringByAppendingPathExtension:#"caf"]]]];
AVURLAsset *videoAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:[NSTemporaryDirectory() stringByAppendingPathComponent:[#"movie" stringByAppendingPathExtension:#"mov"]]] options:nil];
AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:nil];
AVMutableCompositionTrack *secondTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[secondTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:[[audioAsset2 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:nil];
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
/*CGAffineTransform rotationTransform = CGAffineTransformMakeRotation(DEGREES_TO_RADIANS(90));
CGAffineTransform rotateTranslate = CGAffineTransformTranslate(rotationTransform,320,0);*/
[layerInstruction setTransform:videoTrack.preferredTransform atTime:kCMTimeZero];
AVMutableVideoCompositionInstruction * mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
[mainInstruction setLayerInstructions:[NSArray arrayWithObject:layerInstruction]];
mixComposition.naturalSize = videoTrack.naturalSize;
AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
mainCompositionInst.renderScale = 1.0;
mainCompositionInst.renderSize = videoTrack.naturalSize;
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
mainCompositionInst.instructions = [NSArray arrayWithObject:mainInstruction];
//mainCompositionInst.
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:#"mergeVideo.mov"];
if ([[NSFileManager defaultManager] fileExistsAtPath:myPathDocs]) {
[[NSFileManager defaultManager] removeItemAtPath:myPathDocs error:nil];
NSLog(#"Removing old mergeVideo");
}
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.videoComposition = mainCompositionInst;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^
{
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:exporter];
});
}];
My first task is deal with 90 rotation of my new video. When I'm including mainCompositionInst into my code, my new video have all sounds, but it's have a black screen and still not rotated. If I don't using any instructions it works fine. Maybe I have mistakes in my code, or maybe not. What would you advise me?
I would try applying the rotation on top of the preferredTransform because it's possible that the preferredTransform translates your video so that it displays correctly on the screen.
I would try the following:
CGAffineTransform transform = videoTrack.preferredTransform;
transform = CGAffineTransformRotate(transform, DEGREES_TO_RADIANS(90));
//transform = CGAffineTransformTranslate(transform, 320, 0);
[layerInstruction setTransform:transform atTime:kCMTimeZero];
Proper working sample can be found here:
https://github.com/robovm/apple-ios-samples/blob/master/AVSimpleEditoriOS/AVSimpleEditor/AVSERotateCommand.m#L98
The things is that you need to translate the composition to compensate the movement caused by rotation (since rotation would cause it to move out of frame) as it is stated by comments in the Apple sample source code.
The correct translation to apply will be:
t1 = CGAffineTransformMakeTranslation(assetVideoTrack.naturalSize.height, 0.0);
// Rotate transformation
t2 = CGAffineTransformRotate(t1, degreesToRadians(90.0));
Here's my end goal: I'd like to use AVVideoCompositionCoreAnimationTool to create a video from Core Animation. I will not be using an existing AVAsset in this composition.
My question is, how can I use AVMutableComposition to make a video with a static solid color for a given amount of time? After I figure that out, I can add the animation.
Here's my code:
- (void)exportVideo {
AVMutableComposition *mixComposition = [AVMutableComposition composition];
CMTimeRange timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(10, 600));
[mixComposition insertEmptyTimeRange:timeRange];
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[videoTrack insertEmptyTimeRange:timeRange];
AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
mainInstruction.timeRange = timeRange;
mainInstruction.backgroundColor = [UIColor blueColor].CGColor;
AVMutableVideoComposition *mainComposition = [AVMutableVideoComposition videoComposition];
mainComposition.renderSize = CGSizeMake(500, 500);
mainComposition.instructions = [NSArray arrayWithObject:mainInstruction];
mainComposition.frameDuration = CMTimeMake(1, 30);
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"FinalVideo-%d.mov",arc4random() % 1000]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL = url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.shouldOptimizeForNetworkUse = YES;
exporter.videoComposition = mainComposition;
[exporter exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:exporter];
});
}];
}
I know you have to add at least one track to the mixComposition, so I've added a video track and inserted an empty time range, but when I call exportAsynchronouslyWithCompletionHandler, the handler is never called. I can add a dispatch_after for any amount of time after I call export and observe that the exporter has a status of AVAssetExportSessionStatusExporting.
What am I doing wrong?
After playing around with this a few months ago I found that the only reliable way to get it to work is to use a short, blank video in your AVMutableCompositionTrack, then overlay it with the desired layers.
I uploaded a project to GitHub about a month ago as a result of a bug in the simulator. You can download the blank video here, if you'd like.
-(void)exportVideo
{
CGSize renderingSize = CGSizeMake(640, 360); // The desired size of your video
float displayDuration = 2.0f; //The duration of the desired video, in seconds
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"FinalVideo-%d.mov",arc4random() % 1000]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
NSError *error;
[[NSFileManager defaultManager] removeItemAtURL:url error:&error];
mutableComposition = [AVMutableComposition composition];
mutableCompositionVideoTrack = [mutableComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.renderSize = renderingSize;
videoComposition.frameDuration = CMTimeMake(1, 30);
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, videoComposition.renderSize.width, videoComposition.renderSize.height);
videoLayer.frame = CGRectMake(0, 0, videoComposition.renderSize.width, videoComposition.renderSize.height);
[parentLayer addSublayer:videoLayer];
videoComposition.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
NSString *path = [[NSBundle mainBundle] pathForResource:#"blank_1080p" ofType:#"mp4"];
NSURL *trackUrl = [NSURL fileURLWithPath:path];
AVAsset *asset = [AVAsset assetWithURL:trackUrl];
AVAssetTrack *track = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
[mutableCompositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,CMTimeMakeWithSeconds(displayDuration, 600)) ofTrack:track atTime:kCMTimeZero error:nil];
CALayer *imageLayer = [CALayer layer];
imageLayer.bounds = parentLayer.frame;
imageLayer.anchorPoint = CGPointMake(0.5, 0.5);
imageLayer.position = CGPointMake(CGRectGetMidX(imageLayer.bounds), CGRectGetMidY(imageLayer.bounds));
imageLayer.backgroundColor = [UIColor blueColor].CGColor;
imageLayer.contentsGravity = kCAGravityResizeAspectFill;
[parentLayer addSublayer:imageLayer];
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(displayDuration, 600));
videoComposition.instructions = #[instruction];
exporter = [[AVAssetExportSession alloc] initWithAsset:mutableComposition presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL = url;
exporter.videoComposition = videoComposition;
exporter.outputFileType= AVFileTypeMPEG4;
exporter.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(displayDuration, 600));
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^(void){
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:exporter];
});
}];
}