I'm trying to combine 2 audio files and 1 video file into 1 .mov file. I realize it with next code:
-(void)combineData{
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
AVAsset *audioAsset = [AVAsset assetWithURL:_songURL];
AVAsset* audioAsset2 = [AVAsset assetWithURL:[NSURL fileURLWithPath:[NSTemporaryDirectory() stringByAppendingPathComponent:[#"speechRecord" stringByAppendingPathExtension:#"caf"]]]];
AVURLAsset *videoAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:[NSTemporaryDirectory() stringByAppendingPathComponent:[#"movie" stringByAppendingPathExtension:#"mov"]]] options:nil];
AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:nil];
AVMutableCompositionTrack *secondTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[secondTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:[[audioAsset2 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:nil];
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
/*CGAffineTransform rotationTransform = CGAffineTransformMakeRotation(DEGREES_TO_RADIANS(90));
CGAffineTransform rotateTranslate = CGAffineTransformTranslate(rotationTransform,320,0);*/
[layerInstruction setTransform:videoTrack.preferredTransform atTime:kCMTimeZero];
AVMutableVideoCompositionInstruction * mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
[mainInstruction setLayerInstructions:[NSArray arrayWithObject:layerInstruction]];
mixComposition.naturalSize = videoTrack.naturalSize;
AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
mainCompositionInst.renderScale = 1.0;
mainCompositionInst.renderSize = videoTrack.naturalSize;
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
mainCompositionInst.instructions = [NSArray arrayWithObject:mainInstruction];
//mainCompositionInst.
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:#"mergeVideo.mov"];
if ([[NSFileManager defaultManager] fileExistsAtPath:myPathDocs]) {
[[NSFileManager defaultManager] removeItemAtPath:myPathDocs error:nil];
NSLog(#"Removing old mergeVideo");
}
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.videoComposition = mainCompositionInst;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^
{
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:exporter];
});
}];
My first task is deal with 90 rotation of my new video. When I'm including mainCompositionInst into my code, my new video have all sounds, but it's have a black screen and still not rotated. If I don't using any instructions it works fine. Maybe I have mistakes in my code, or maybe not. What would you advise me?
I would try applying the rotation on top of the preferredTransform because it's possible that the preferredTransform translates your video so that it displays correctly on the screen.
I would try the following:
CGAffineTransform transform = videoTrack.preferredTransform;
transform = CGAffineTransformRotate(transform, DEGREES_TO_RADIANS(90));
//transform = CGAffineTransformTranslate(transform, 320, 0);
[layerInstruction setTransform:transform atTime:kCMTimeZero];
Proper working sample can be found here:
https://github.com/robovm/apple-ios-samples/blob/master/AVSimpleEditoriOS/AVSimpleEditor/AVSERotateCommand.m#L98
The things is that you need to translate the composition to compensate the movement caused by rotation (since rotation would cause it to move out of frame) as it is stated by comments in the Apple sample source code.
The correct translation to apply will be:
t1 = CGAffineTransformMakeTranslation(assetVideoTrack.naturalSize.height, 0.0);
// Rotate transformation
t2 = CGAffineTransformRotate(t1, degreesToRadians(90.0));
Related
When I am blending two videos with AVAssetExportSession in ios 9 its working perfectly. but when i blend with AVAssetExportSession in iOS 10, it in not working. Please help me if any know the reason, Thank you.
actualy code is working for iphone 6s and earlier, but not for working for iPhone 7
for example
-(void) blendVideoOverVideo:(NSURL*)mainVideoUrl andBlendVideoUrl:(NSURL*)liveEffectUrl
{
AVURLAsset *mainVideoUrlAsset =[AVURLAsset URLAssetWithURL:mainVideoUrl options:nil];
// AVPlayerItem* mainVideoPlayerItem =[[AVPlayerItem alloc]initWithAsset:mainVideoUrlAsset];
AVAssetTrack* mainVideoTrack =[[mainVideoUrlAsset tracksWithMediaType:AVMediaTypeVideo]firstObject];
CGSize mainVideoSize = [mainVideoTrack naturalSize];
AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:mainVideoUrl options:nil];
if(mainVideoUrl!=nil)
{
if([[audioAsset tracksWithMediaType:AVMediaTypeAudio] count])
{
AVMutableCompositionTrack *compositionCommentaryTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionCommentaryTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, mainVideoUrlAsset.duration )
ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
atTime:kCMTimeZero
error:nil];
}
}
AVMutableCompositionTrack *mainVideoConpositionTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[mainVideoConpositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, mainVideoUrlAsset.duration) ofTrack:mainVideoTrack atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionLayerInstruction *mainVideoLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:mainVideoConpositionTrack];
//SEcond Track
AVURLAsset *blendVideoUrlAsset =[AVURLAsset URLAssetWithURL:liveEffectUrl options:nil];
// AVPlayerItem* blendVideoPlayerItem =[[AVPlayerItem alloc]initWithAsset:blendVideoUrlAsset];
AVAssetTrack* blendVideoTrack =[[blendVideoUrlAsset tracksWithMediaType:AVMediaTypeVideo]firstObject];
CGSize blendVideoSize = [blendVideoTrack naturalSize];
AVMutableCompositionTrack *blendVideoConpositionTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
CMTime oldTime=CMTimeMakeWithSeconds(CMTimeGetSeconds(blendVideoUrlAsset.duration), blendVideoUrlAsset.duration.timescale);
// CMTime timeNew=CMTimeMakeWithSeconds(CMTimeGetSeconds(blendVideoUrlAsset.duration)/2, blendVideoUrlAsset.duration.timescale);
[blendVideoConpositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, oldTime) ofTrack:blendVideoTrack atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionLayerInstruction *blendVideoLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:blendVideoConpositionTrack];
AVMutableVideoCompositionInstruction * MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, mainVideoUrlAsset.duration);
CGAffineTransform Scale = CGAffineTransformMakeScale(1.0f,1.0f);
CGAffineTransform Move = CGAffineTransformMakeTranslation(0,0);
[mainVideoLayerInstruction setTransform:CGAffineTransformConcat(Scale,Move) atTime:kCMTimeZero];
[blendVideoLayerInstruction setOpacity:0.5 atTime:kCMTimeZero];
// [blendVideoLayerInstruction setOpacity:0.0 atTime:timeNew];
CGFloat cropOffX = 1.0;
CGFloat cropOffY = 1.0;
if(blendVideoSize.height>mainVideoSize.height)
{
cropOffY = mainVideoSize.height/blendVideoSize.height;
}else{
cropOffY = mainVideoSize.height/blendVideoSize.height;
}
if(blendVideoSize.width>mainVideoSize.width)
{
cropOffX = mainVideoSize.width/blendVideoSize.width;
}
Scale = CGAffineTransformMakeScale(cropOffX,cropOffY);
Move = CGAffineTransformMakeTranslation(0.1, 0.1);
[blendVideoLayerInstruction setTransform:CGAffineTransformConcat(Scale,Move) atTime:kCMTimeZero];
MainInstruction.layerInstructions = [NSArray arrayWithObjects:blendVideoLayerInstruction,mainVideoLayerInstruction,nil];
AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];
MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
MainCompositionInst.frameDuration = CMTimeMake(1, 30);
MainCompositionInst.renderSize = mainVideoSize;
NSString *fullName= [NSString stringWithFormat:#"video%d.mov",arc4random() % 1000];
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:fullName];
if([[NSFileManager defaultManager] fileExistsAtPath:myPathDocs])
{
[[NSFileManager defaultManager] removeItemAtPath:myPathDocs error:nil];
}
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality];
exporter.outputURL=url;
CMTime start;
CMTime duration;
NSLog(#"Main Video dura %f blend dura - %f, ",CMTimeGetSeconds(mainVideoUrlAsset.duration),CMTimeGetSeconds(blendVideoUrlAsset.duration));
if(CMTimeGetSeconds(blendVideoUrlAsset.duration)>CMTimeGetSeconds(mainVideoUrlAsset.duration))
{
start = CMTimeMakeWithSeconds(0.0, blendVideoUrlAsset.duration.timescale);
duration = CMTimeMakeWithSeconds(CMTimeGetSeconds(mainVideoUrlAsset.duration), blendVideoUrlAsset.duration.timescale);
}
else if(CMTimeGetSeconds(mainVideoUrlAsset.duration)>CMTimeGetSeconds(blendVideoUrlAsset.duration))
{
start = CMTimeMakeWithSeconds(0.0, mainVideoUrlAsset.duration.timescale);
duration = CMTimeMakeWithSeconds(CMTimeGetSeconds(mainVideoUrlAsset.duration), mainVideoUrlAsset.duration.timescale);
}
CMTimeRange range = CMTimeRangeMake(start, duration);
exporter.timeRange = range;
[exporter setVideoComposition:MainCompositionInst];
exporter.outputFileType = AVFileTypeQuickTimeMovie;
__weak typeof(self) weakSelf = self;
[weakSelf createMBCircularProgress:exporter];
[exporter exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
[weakSelf exportDidFinish:exporter];
});
}];
}
this code will run in ios 9 and even iOS 10 in iPhone 6s, 6,5 etc but this code will not run in iPhone 7 simulator.
The solution is we need to use latest XCode 8.1 beta for running this
It's a bug.
It's fixed in Xcode 8.1 beta.
Xcode 8.1 beta [AVAssetExportSession allExportPresets] iPhone 7 Simulator now returns
AVAssetExportPreset1920x1080,
AVAssetExportPresetLowQuality,
AVAssetExportPresetAppleM4A,
AVAssetExportPreset640x480,
AVAssetExportPreset3840x2160,
AVAssetExportPresetHighestQuality,
AVAssetExportPreset1280x720,
AVAssetExportPresetMediumQuality,
AVAssetExportPreset960x540
Xcode 8.0 [AVAssetExportSession allExportPresets] iPhone 7 Simulator returns an empty array
AVAssetExportSession can be NULL, so need to check NULL before work on it
https://developer.apple.com/library/content/qa/qa1730/_index.html
I'm new to video programming. I'm trying to exercise it but I'm having trouble, which merges two video files to one.
The merge I mean is as follows..
I have first video like this
Second video also like this
I want them to merge like this
I didn't want to use 2 video players because I want to send the merged video file to someone. I searched all day to solve this, but I could't find how to.
I wrote code referencing this link but it shows first video only, not merged.
My Code:
NSURL *firstURL = [NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:#"video1" ofType:#"mp4"]]
AVURLAsset *firstAsset = [[AVURLAsset alloc]initWithURL:firstURL options:nil];
NSURL *secondURL = [NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:#"video2" ofType:#"mp4"]];
VURLAsset *secondAsset = [[AVURLAsset alloc]initWithURL:secondURL options:nil];
AVMutableComposition* mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
[firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration)
ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
AVMutableCompositionTrack *secondTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
[secondTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, secondAsset.duration)
ofTrack:[[secondAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
[secondTrack setPreferredTransform:CGAffineTransformMakeScale(0.25f,0.25f)];
NSArray *dirPaths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *docsDir = [dirPaths objectAtIndex:0];
NSString *outputFilePath = [docsDir stringByAppendingPathComponent:[NSString stringWithFormat:#"FinalVideo.mov"]];
NSLog(#"%#", outputFilePath);
NSURL *outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
AVAssetExportSession* assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
assetExport.outputFileType = #"com.apple.quicktime-movie";
assetExport.outputURL = outputFileUrl;
[assetExport exportAsynchronouslyWithCompletionHandler: ^(void ) {
switch (assetExport.status) {
case AVAssetExportSessionStatusFailed:
NSLog(#"AVAssetExportSessionStatusFailed");
break;
case AVAssetExportSessionStatusCompleted:
NSLog(#"AVAssetExportSessionStatusCompleted");
break;
case AVAssetExportSessionStatusWaiting:
NSLog(#"AVAssetExportSessionStatusWaiting");
break;
default:
break;
}
}
];
What am I missing? I don't know how I can approach this to solve the problem.
Appreciate any ideas.
Thanks.
Edit:
i made a new code which referenced a link matt wrote, thanks matt. but when i tried to export it, only first video was exported. not together.. :(
my new code is..
NSURL *originalVideoURL1 = [[NSBundle mainBundle] URLForResource:#"video1" withExtension:#"mov"];
NSURL *originalVideoURL2 = [[NSBundle mainBundle] URLForResource:#"video2" withExtension:#"mov"];
AVURLAsset *firstAsset = [AVURLAsset URLAssetWithURL:originalVideoURL1 options:nil];
AVURLAsset *secondAsset = [AVURLAsset URLAssetWithURL:originalVideoURL2 options:nil];
AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init]; //[AVMutableComposition composition];
NSError *error = nil;
AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration) ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:&error];
if(error) {
NSLog(#"firstTrack error!!!. %#", error.localizedDescription);
}
AVMutableCompositionTrack *secondTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[secondTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, secondAsset.duration) ofTrack:[[secondAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:&error];
if(error) {
NSLog(#"secondTrack error!!!. %#", error.localizedDescription);
}
AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, firstAsset.duration);
AVMutableVideoCompositionLayerInstruction *firstLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack];
CGAffineTransform scale = CGAffineTransformMakeScale(0.7, 0.7);
CGAffineTransform move = CGAffineTransformMakeTranslation(230, 230);
[firstLayerInstruction setTransform:CGAffineTransformConcat(scale, move) atTime:kCMTimeZero];
AVMutableVideoCompositionLayerInstruction *secondLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:secondTrack];
CGAffineTransform secondScale = CGAffineTransformMakeScale(1.2, 1.5);
CGAffineTransform secondMove = CGAffineTransformMakeTranslation(0, 0);
[secondLayerInstruction setTransform:CGAffineTransformConcat(secondScale, secondMove) atTime:kCMTimeZero];
mainInstruction.layerInstructions = #[firstLayerInstruction, secondLayerInstruction];
AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
mainCompositionInst.instructions = #[mainInstruction];
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
mainCompositionInst.renderSize = CGSizeMake(640, 480);
AVPlayerItem *newPlayerItem = [AVPlayerItem playerItemWithAsset:mixComposition];
newPlayerItem.videoComposition = mainCompositionInst;
AVPlayer *player = [[AVPlayer alloc] initWithPlayerItem:newPlayerItem];
AVPlayerLayer *playerLayer =[AVPlayerLayer playerLayerWithPlayer:player];
[playerLayer setFrame:self.view.bounds];
[self.view.layer addSublayer:playerLayer];
[player seekToTime:kCMTimeZero];
[player play]; // play is Good!!
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *tempS2 = [documentsDirectory stringByAppendingPathComponent:#"FinalVideo.mov"];
if([[NSFileManager defaultManager] fileExistsAtPath:tempS2])
{
[[NSFileManager defaultManager] removeItemAtPath:tempS2 error:nil];
}
NSURL *url = [[NSURL alloc] initFileURLWithPath: tempS2];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc]
initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exportSession.outputURL=url;
NSLog(#"%#", [exportSession supportedFileTypes]);
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
if (exportSession.status==AVAssetExportSessionStatusFailed) {
NSLog(#"failed");
}
else {
NSLog(#"AudioLocation : %#",tempS2);
}
}];
how can i export my mixComposition and layerInstruction both?
please give me a little more ideas.
Thanks.
With reference to the code in your second edit, just as you've told the AVPlayerItem about your AVMutableVideoComposition, you need to also tell the AVAssetExportSession too:
exportSession.videoComposition = mainCompositionInst;
// exportAsynchronouslyWithCompletionHandler etc
N.B. make sure you choose the longer of the two track durations when setting your instruction duration:
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMaximum(firstAsset.duration, secondAsset.duration));
AVPlayer doesn't mind if you get this wrong, but AVAssetExportSession does and will return an AVErrorInvalidVideoComposition (-11841) error.
N.B. 2 Your AVPlayer isn't actually going out of scope, but it makes me nervous when I look at it. I'd assign it to a property if I were you.
I have an NSArray containing a list of video NSURL's and I want to merge them together to make one long compilation. The problem is, when I use the code below the videos merge but there is no audio.
- (IBAction)buildVideo {
// 1 - Create AVMutableComposition object. This object will hold your AVMutableCompositionTrack instances.
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
AVMutableCompositionTrack *track = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
int i = 0;
for (id object in movieArray) {
AVAsset *asset = [AVAsset assetWithURL:object];
if(i == 0){
[track insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration)
ofTrack:[[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
}else {
[track insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration)
ofTrack:[[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:[mixComposition duration] error:nil];
}
i = i + 1;
}
// 4 - Get path
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:
[NSString stringWithFormat:#"mergeVideo-%d.mov",arc4random() % 1000]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
// 5 - Create exporter
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:exporter];
});
}];
}
The reason you're not getting audio is that you're not adding the audio track. You need to create an additional AVMutableCompositionTrack with a type of AVMediaTypeAudio:
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
And insert the time range for the source audio and video tracks into both composition tracks:
CMTime insertTime = kCMTimeZero;
for (id object in movieArray) {
AVAsset *asset = [AVAsset assetWithURL:object];
CMTimeRange timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
[videoTrack insertTimeRange:timeRange
ofTrack:[[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:insertTime
error:nil];
[audioTrack insertTimeRange:timeRange
ofTrack:[[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
atTime:insertTime
error:nil];
insertTime = CMTimeAdd(insertTime,asset.duration);
}
I'm trying to create a video By combining two videos
It works properly until I tried to add transition between videos
It gives me an unplayable video file.
The code :
AVMutableComposition* mixComposition = [AVMutableComposition composition];
NSString* a_inputFileName = #"v1.mp4";
NSString* a_inputFilePath = [NSString stringWithFormat:#"%#/%#",documentsDirectoryPath,a_inputFileName];
NSURL * a_inputFileUrl = [NSURL fileURLWithPath:a_inputFilePath];
NSString* b_inputFileName = #"v2.mp4";
NSString* b_inputFilePath = [NSString stringWithFormat:#"%#/%#",documentsDirectoryPath,b_inputFileName];
NSURL* b_inputFileUrl = [NSURL fileURLWithPath:b_inputFilePath];
NSString* outputFileName = #"outputFile.mp4";
NSString* outputFilePath = [NSString stringWithFormat:#"%#/%#",documentsDirectoryPath,outputFileName];
NSURL* outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
CMTime nextClipStartTime = kCMTimeZero;
AVURLAsset* a_videoAsset = [[AVURLAsset alloc]initWithURL:a_inputFileUrl options:nil];
CMTimeRange a_timeRange = CMTimeRangeMake(kCMTimeZero,a_videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:a_timeRange ofTrack:[[a_videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
nextClipStartTime = CMTimeAdd(nextClipStartTime, a_timeRange.duration);
nextClipStartTime = CMTimeSubtract(nextClipStartTime, CMTimeMake(1, 1));
AVURLAsset* b_videoAsset = [[AVURLAsset alloc]initWithURL:b_inputFileUrl options:nil];
CMTimeRange b_timeRange = CMTimeRangeMake(kCMTimeZero, b_videoAsset.duration);
[a_compositionVideoTrack insertTimeRange:b_timeRange ofTrack:[[b_videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
AVMutableVideoCompositionInstruction * instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, mixComposition.duration);
AVMutableVideoCompositionLayerInstruction * firstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:[[a_videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]];
[firstlayerInstruction setOpacityRampFromStartOpacity:1.0f toEndOpacity:0.0f timeRange:CMTimeRangeMake(CMTimeMake(3,1), CMTimeMake(1, 1))];
AVMutableVideoCompositionLayerInstruction * secondlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:[[b_videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]];
[secondlayerInstruction setOpacity:1.0f atTime:kCMTimeZero];
instruction.layerInstructions = [NSArray arrayWithObjects:firstlayerInstruction,secondlayerInstruction, nil];
AVMutableVideoComposition * videoComp = [AVMutableVideoComposition videoComposition];
videoComp.instructions = [NSArray arrayWithObject:instruction];
[videoComp setRenderSize:CGSizeMake(320, 480)];
[videoComp setFrameDuration:CMTimeMake(1, 30)];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
_assetExport.videoComposition = videoComp;
_assetExport.outputFileType = AVFileTypeMPEG4;
_assetExport.outputURL = outputFileUrl;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
[self saveVideoToAlbum:outputFilePath];
}
];
If i remove this line : _assetExport.videoComposition = videoComp;
It works properly but no transition between videos.
Edit :
I found out that the exportsession's status is stuck on exporting nd it never finishes and i don't know why.
THX in advance.
I need to rotate a video because of iPhone back camera is recording as if it were always on landscape left.
I need to physically rotate video because settings orientation don't work on all browser (ex. Chrome).
So I setup some code to read recorded video and saving it after rotation.
I used AVAssetExportSession which seems to work quite well apart two problems:
- the exported video have some black frames at the beginning
- some videos won't be exported.
Is it possible to have some insight about the problem?
Thanks!
Code:
AVAsset* asset = [AVURLAsset URLAssetWithURL: videoUrl options:nil];
AVMutableComposition *composition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVAssetTrack *audioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:videoTrack atTime:kCMTimeZero error: &error];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:audioTrack atTime:kCMTimeZero error: &error];
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake( kCMTimeZero, asset.duration);
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTrack];
[layerInstruction setTransform:transformToApply atTime:kCMTimeZero];
CGAffineTransform transformToApply = ///code to setup transformation
[layerInstruction setTransform:transformToApply atTime:kCMTimeZero];
[layerInstruction setOpacity:0.0 atTime:asset.duration];
instruction.layerInstructions = [NSArray arrayWithObject: layerInstruction];
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.instructions = [NSArray arrayWithObject: instruction];
videoComposition.frameDuration = CMTimeMake( 1, 600);
videoComposition.renderScale = 1.0;
videoComposition.renderSize = videoOutputSize;
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetMediumQuality] ;
exportSession.outputURL = self.outputUrl;
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
exportSession.videoComposition = videoComposition;
exportSession.shouldOptimizeForNetworkUse = YES;
exportSession.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
__block BOOL finishedExporting = NO;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
//
}];