float sliderValue = 0.5;
NSURL *audio_url = [[NSBundle mainBundle] pathForResource:#“video_fileName” ofType:#"mp4"]];
AVURLAsset* audio_Asset = [[AVURLAsset alloc]initWithURL:audio_url options:nil];
AVMutableAudioMixInputParameters *audioInputParams = [AVMutableAudioMixInputParameters audioMixInputParameters];
NSString* formattedNumber = [NSString stringWithFormat:#"%.01f", sliderValue];
NSLog(#"formattedNumber %#",formattedNumber);
NSLog(#"formattedNumber %.01f",[formattedNumber floatValue]);
[audioInputParams setVolume:[formattedNumber floatValue] atTime:kCMTimeZero];
[audioInputParams setTrackID:[[[audio_Asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] trackID]];
AVMutableAudioMix *audioMix = [AVMutableAudioMix audioMix];
audioMix.inputParameters = [NSArray arrayWithObject:audioInputParams];
AVAssetExportSession *exportSession=[AVAssetExportSession exportSessionWithAsset:audio_Asset presetName:AVAssetExportPresetAppleM4A];
exportSession.audioMix = audioMix;
exportSession.outputURL=[NSURL fileURLWithPath:audioPath];
exportSession.outputFileType=AVFileTypeAppleM4A;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
if (exportSession.status==AVAssetExportSessionStatusFailed) {
NSLog(#"failed");
}
else {
NSLog(#"AudioLocation : %#",audioPath);
}
}];
Issue: get asset blank, so app crashes.
[[audio_Asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
[__NSArrayM objectAtIndex:]: index 0 beyond bounds for empty array';
The crash has described the issue, you are trying to access an array beyond it's bounds. change:
[audioInputParams setTrackID:[[[audio_Asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] trackID]];
to:
NSArray * tracks = [audio_Asset tracksWithMediaType:AVMediaTypeAudio];
if([tracks count]) {
[audioInputParams setTrackID:[[tracks firstObject] trackID]];
}
Your main issue may be that loading an AVURLAsset might not immediately load all the tracks. You can wrap your method (after getting audio_Asset) in the load method to have better guarantee:
NSString *tracksKey = #"tracks";
[audio_Asset loadValuesAsynchronouslyForKeys:#[tracksKey] completionHandler: ^{
// rest of the code here
Related
I have following code to fix Tranform of video
- (AVVideoComposition *)squareVideoCompositionFor:(AVAsset *)asset {
AVAssetTrack *track = [asset tracksWithMediaType:AVMediaTypeVideo].firstObject;
CGFloat length = MAX(track.naturalSize.width, track.naturalSize.height);
CGSize size = track.naturalSize;
CGFloat scale = 0;
CGAffineTransform transform = track.preferredTransform;
if (transform.a == 0 && transform.b == 1 && transform.c == -1 && transform.d == 0) {
scale = -1;
}
else if (transform.a == 0 && transform.b == -1 && transform.c == 1 && transform.d == 0) {
scale = -1;
}
else if (transform.a == 1 && transform.b == 0 && transform.c == 0 && transform.d == 1) {
scale = 1;
}
else if (transform.a == -1 && transform.b == 0 && transform.c == 0 && transform.d == -1) {
scale = -1;
}
transform = CGAffineTransformTranslate(transform, scale * -(size.width - length) / 2, scale * -(size.height - length) / 2);
AVMutableVideoCompositionLayerInstruction *transformer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:track];
[transformer setTransform:transform atTime:kCMTimeZero];
// CGAffineTransform finalTransform = t2;
// [transformer setTransform:finalTransform atTime:kCMTimeZero];
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, kCMTimePositiveInfinity);
instruction.layerInstructions = #[transformer];
AVMutableVideoComposition *composition = [AVMutableVideoComposition videoComposition];
composition.frameDuration = CMTimeMake(1, 30);
composition.renderSize = CGSizeMake(length, length);
composition.instructions = #[instruction];
composition.renderScale = 1.0;
return composition;
}
And Following code for Mute Audio
- (AVMutableComposition *) removeAudioFromVideoFileFor:(AVAsset *)asset {
AVMutableComposition *composition_Mix = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [composition_Mix addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
BOOL ok = NO;
AVAssetTrack * sourceVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
CMTimeRange x = CMTimeRangeMake(kCMTimeZero, [asset duration]);
NSError *error;
ok = [compositionVideoTrack insertTimeRange:x ofTrack:sourceVideoTrack atTime:kCMTimeZero error:&error];
return composition_Mix;
}
Here how i call the function
AVAsset *asset = [AVAsset assetWithURL:inputURL];
AVMutableComposition *composition = [self removeAudioFromVideoFileFor:asset];
AVAssetExportSession *session = [AVAssetExportSession exportSessionWithAsset:composition presetName:AVAssetExportPresetHighestQuality];
session.videoComposition = [self squareVideoCompositionFor:asset];
session.outputURL = outputURL;
session.outputFileType = AVFileTypeMPEG4;
session.shouldOptimizeForNetworkUse = true;
session.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
But it shows error if I used both composition and [self squareVideoCompositionFor:asset]
Error Domain=AVFoundationErrorDomain Code=-11841 "Operation Stopped" UserInfo={NSLocalizedDescription=Operation Stopped, NSLocalizedFailureReason=The video could not be composed.}
If I omit one then it is working fine means One AVAssetExportSession can either mute audio from video or squareVideo
Is there a way I can achieve both using single progress of export of AVAssetExportSession ?
Your code looks good but I have made changes in your code to get it work.
The inputURL and outputURL should be prefixed with either file:// or https:// (as it is url, in your case it should be start with file://)
If your is not valid then you will not get the desired output.
//FOR OUTPUT URL
NSString *path = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES)[0];
path = [path stringByAddingPercentEncodingWithAllowedCharacters:[NSCharacterSet URLQueryAllowedCharacterSet]];
//the output video will be written to file final.mp4
NSURL *outputURL = [NSURL fileURLWithPath:path];
outputURL = [outputURL URLByAppendingPathComponent:#"final.mp4"];
NSLog(#"outputURL = %#", outputURL);
//FOR INPUT URL
//This is the path of the bundle resource that is going to be used
NSURL *inputURL = [[NSBundle mainBundle] URLForResource:#"video" withExtension:#"mp4"];
NSLog(#"inputURL = %#", inputURL);
Export the composition
//this will export the composition with the specified configuration
[session exportAsynchronouslyWithCompletionHandler:^{
NSLog(#"Success");
}];
When you see the "Success" log in console check the document directory of your application. The video will be written at outptURL.
NOTE: USE CMD + SHIFT + G and pase the outputURL. You will be redirected to the document folder of your app (For simulator only). For device, you need to download the app container and see the package content.
Complete CODE
The removeAudioFromVideoFileFor: and squareVideoCompositionFor: methods looks good. just need to change the following.
Here "video" is the name of the resource file in app bundle.
- (void)viewDidLoad {
[super viewDidLoad];
NSString *path = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES)[0];
path = [path stringByAddingPercentEncodingWithAllowedCharacters:[NSCharacterSet URLQueryAllowedCharacterSet]];
NSURL *outputURL = [NSURL fileURLWithPath:path];
outputURL = [outputURL URLByAppendingPathComponent:#"final.mp4"];
NSLog(#"outputURL = %#", outputURL);
NSURL *inputURL = [[NSBundle mainBundle] URLForResource:#"video" withExtension:#"mp4"];
NSLog(#"inputURL = %#", inputURL);
AVAsset *asset = [AVAsset assetWithURL:inputURL];
AVMutableComposition *composition = [self removeAudioFromVideoFileFor: asset];
AVAssetExportSession *session = [AVAssetExportSession exportSessionWithAsset:composition presetName:AVAssetExportPresetHighestQuality];
session.videoComposition = [self squareVideoCompositionFor:asset];
session.outputURL = outputURL;
session.outputFileType = AVFileTypeMPEG4;
session.shouldOptimizeForNetworkUse = true;
session.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
[session exportAsynchronouslyWithCompletionHandler:^{
NSLog(#"Success:");
}];
}
Hope it will help
I am trying to split a video into 4 second chunks with AVAssetExportSession. The initial split works and returns a 8mb/4 second chunk. But the second returns 12mb which is incorrect when the original video os only 18mb.
- (void) splitVideo{
AVURLAsset *vidAsset = [AVURLAsset URLAssetWithURL:output options:nil];
CMTime duration = vidAsset.duration;
NSLog(#"File size is : %.2f MB And Duration: %f",(float)[NSData dataWithContentsOfURL:output].length/1024.0f/1024.0f, CMTimeGetSeconds(duration));
splitArray = [[NSMutableArray alloc]init];
CMTime end = CMTimeMake(4, 1);
CMTimeRange range = CMTimeRangeMake(kCMTimeZero, end);
NSString *outputPath = [NSTemporaryDirectory() stringByAppendingPathComponent:#"output0.mp4"];
totalSeconds = 4.0f;
[self cutVideo:output withRange:range withOutput:outputPath];
}
- (void) cutVideo:(NSURL *)url withRange:(CMTimeRange)range withOutput:(NSString*)path{
AVAsset *asset = [[AVURLAsset alloc] initWithURL:url options:nil];
NSArray *compatiblePresets = [AVAssetExportSession exportPresetsCompatibleWithAsset:asset];
if ([compatiblePresets containsObject:AVAssetExportPresetHighestQuality]) {
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetHighestQuality];
NSURL *finalUrl = [NSURL fileURLWithPath:path];
[[NSFileManager defaultManager] removeItemAtURL:finalUrl error:NULL];
exportSession.outputURL = finalUrl;
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
exportSession.shouldOptimizeForNetworkUse = YES;
exportSession.timeRange = range;
NSLog(#"start: %f end: %f", CMTimeGetSeconds(range.start), CMTimeGetSeconds(range.duration));
[exportSession exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
});
if ([exportSession status] == AVAssetExportSessionStatusCompleted){
NSData *videoData = [[NSData alloc]initWithContentsOfURL:exportSession.outputURL];
NSLog(#"DL: %f", (float)videoData.length/1024.0f/1024.0f);
[self makeFile:finalUrl];
AVURLAsset *fullVid = [AVURLAsset URLAssetWithURL:output options:nil];
CMTime start = CMTimeMake(totalSeconds, 1);
totalSeconds = totalSeconds + 4.0f;
CMTime end;
if ((CMTimeGetSeconds(start) + 4) > CMTimeGetSeconds(fullVid.duration)) {
end = fullVid.duration;
}else{
end = CMTimeMake(CMTimeGetSeconds(start) + 4, 1);
}
CMTimeRange range2 = CMTimeRangeMake(start, end);
NSLog(#"%f < %f\n\n", CMTimeGetSeconds(start), CMTimeGetSeconds(fullVid.duration));
if (CMTimeGetSeconds(start) < CMTimeGetSeconds(fullVid.duration)) {
NSString *outputPath = [NSTemporaryDirectory() stringByAppendingPathComponent:[NSString stringWithFormat:#"output%lu.mp4", splitArray.count]];
[self cutVideo:output withRange:range2 withOutput:outputPath];
}else{
[self saveVideo:true];
}
}else if ([exportSession status] == AVAssetExportSessionStatusFailed){
NSLog(#"Export failed: %#", [[exportSession error] localizedDescription]);
}else if ([exportSession status] == AVAssetExportSessionStatusCancelled){
NSLog(#"Export canceled");
}
}];
}
}
File size is : 18.86 MB And Duration: 9.171667
first
start: 0.000000 end: 4.000000
DL: 8.194733
4.000000 < 9.171667
second
start: 4.000000 end: 8.000000
DL: 12.784523
It's not incorrect, because video decoders stores changes from last frame, not just a set of "images". I guess your video in have more color changes in second chunk, that's why you get more space.
I want to export and compress a video from iPod-Library,but the "exportAsynchronouslyWithCompletionHandler" can work correctly for some video,and not work for some video.it's not work and no throw exception.
But it is strange that if I comment out the method "setVideoComposition:",the "exportAsynchronouslyWithCompletionHandler" can work normally.
Here is my code:
AVAsset *_videoAsset = [AVAsset assetWithURL:[NSURL URLWithString:filmElementModel.alassetUrl]];
CMTime assetTime = [_videoAsset duration];
AVAssetTrack *avAssetTrack = [[_videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
Float64 duration = CMTimeGetSeconds(assetTime);
AVMutableComposition *avMutableComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *avMutableCompositionTrack = [avMutableComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
NSError *error = nil;
[avMutableCompositionTrack insertTimeRange:CMTimeRangeMake(CMTimeMakeWithSeconds(0.0f, 30), CMTimeMakeWithSeconds(duration>8.0f?8.0f:duration, 30))
ofTrack:avAssetTrack
atTime:kCMTimeZero
error:&error];
AVMutableVideoComposition *avMutableVideoComposition = [AVMutableVideoComposition videoComposition];
avMutableVideoComposition.frameDuration = CMTimeMake(1, 30);
AVMutableVideoCompositionLayerInstruction *layerInstruciton = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:avMutableComposition.tracks[0]];
[layerInstruciton setTransform:[[[_videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] preferredTransform] atTime:kCMTimeZero];
[layerInstruciton setOpacity:0.0f atTime:[_videoAsset duration]];
AVMutableVideoCompositionInstruction *avMutableVideoCompositionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
[avMutableVideoCompositionInstruction setTimeRange:CMTimeRangeMake(kCMTimeZero, [avMutableComposition duration])];
avMutableVideoCompositionInstruction.layerInstructions = [NSArray arrayWithObject:layerInstruciton];
if (avAssetTrack.preferredTransform.a) {
NSLog(#"横向");
avMutableVideoComposition.renderSize = CGSizeMake(avAssetTrack.naturalSize.width, avAssetTrack.naturalSize.height);
}else
{
avMutableVideoComposition.renderSize = CGSizeMake(avAssetTrack.naturalSize.height, avAssetTrack.naturalSize.width);
}
avMutableVideoComposition.instructions = [NSArray arrayWithObject:avMutableVideoCompositionInstruction];
// the url for save video
NSString *outUrlString = ITTPathForBabyShotResource([NSString stringWithFormat:#"%#/%#.mp4",DATA_ENV.userModel.userId,filmElementModel.filmElementId]);
NSFileManager *fm = [[NSFileManager alloc] init];
if ([fm fileExistsAtPath:outUrlString]) {
NSLog(#"video is have. then delete that");
if ([fm removeItemAtPath:outUrlString error:&error]) {
NSLog(#"delete is ok");
}else {
NSLog(#"delete is no error = %#",error.description);
}
}
CGSize renderSize = CGSizeMake(1280, 720);
if (MIN(avAssetTrack.naturalSize.width, avAssetTrack.naturalSize.height)<720) {
renderSize =avAssetTrack.naturalSize;
}
long long fileLimite =renderSize.width*renderSize.height*(duration>8.0f?8.0f:duration)/2;
_avAssetExportSession = [[AVAssetExportSession alloc] initWithAsset:avMutableComposition presetName:AVAssetExportPreset1280x720];
[_avAssetExportSession setVideoComposition:avMutableVideoComposition];
[_avAssetExportSession setOutputURL:[NSURL fileURLWithPath:outUrlString]];
[_avAssetExportSession setOutputFileType:AVFileTypeQuickTimeMovie];
[_avAssetExportSession setFileLengthLimit: fileLimite];
[_avAssetExportSession setShouldOptimizeForNetworkUse:YES];
[_avAssetExportSession exportAsynchronouslyWithCompletionHandler:^(void){
switch (_avAssetExportSession.status) {
case AVAssetExportSessionStatusFailed:
{
}
break;
case AVAssetExportSessionStatusCompleted:
{
}
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"export cancelled");
break;
case AVAssetExportSessionStatusExporting:
NSLog(#"AVAssetExportSessionStatusExporting");
break;
case AVAssetExportSessionStatusWaiting:
NSLog(#"AVAssetExportSessionStatusWaiting");
break;
}
}];
if (_avAssetExportSession.status != AVAssetExportSessionStatusCompleted){
NSLog(#"Retry export");
}
I solved this problem!
[avMutableCompositionTrack insertTimeRange:CMTimeRangeMake(CMTimeMakeWithSeconds(0.0f, 30), CMTimeMakeWithSeconds(duration>8.0f?8.0f:duration, 30))
ofTrack:avAssetTrack
atTime:kCMTimeZero
error:&error];
I replace the CMTimeRangeMake(CMTimeMakeWithSeconds(0.0f, 30) with CMTimeRangeMake(CMTimeMakeWithSeconds(0.1f, 30).
But I don't know why it can Work properly。
I'm trying to find a way to fade in an audio track held locally without an audio glitch. I'm using AVPlayer and referencing mp3s from the iPodLibrary using AVURLAsset. The following method works most of the time but not all the time so I'm thinking I need to scan through the audio data looking for the nearest volume zero-crossing and do my fade from there. Any pointers would be much appreciated.
float duration = 0.5;
AVAsset *asset = [self.av_Player.currentItem asset];
NSArray *keys = [NSArray arrayWithObject:#"tracks"];
[asset loadValuesAsynchronouslyForKeys:keys completionHandler:^(void) {
NSError *error = nil;
NSTimeInterval now = [self currentPlaybackTime];
CMTime mainFadeIn = CMTimeMakeWithSeconds(now, 6000);
CMTime mainFadeDuration = CMTimeMakeWithSeconds(duration, 6000);
CMTimeRange timerange = CMTimeRangeMake(mainFadeIn, mainFadeDuration);
AVKeyValueStatus trackStatus = [asset statusOfValueForKey:#"tracks" error:&error];
switch (trackStatus) {
case AVKeyValueStatusLoaded:
if( self.av_Player ) {
NSArray *tracks = [asset tracksWithMediaType:AVMediaTypeAudio];
AVMutableAudioMixInputParameters *volumeMixInput = [AVMutableAudioMixInputParameters audioMixInputParameters];
[volumeMixInput setVolumeRampFromStartVolume:0.0 toEndVolume:tovolume timeRange:timerange];
[volumeMixInput setTrackID:[[tracks objectAtIndex:0] trackID]];
AVMutableAudioMix *audioMix = [AVMutableAudioMix audioMix];
[audioMix setInputParameters:[NSArray arrayWithObject:volumeMixInput]];
[self.av_Player.currentItem setAudioMix:audioMix];
}
break;
default:
break;
}
}];
No idea why I was getting glitches with the above method but assume it might have something to do with using blocks. Anyway I used the code below and it now works smoothly for fade in and fade outs - hope this helps others.
NSTimeInterval now = [self currentPlaybackTime];
AVPlayerItem *playerItem = self.av_Player.currentItem;
AVAsset *asset = playerItem.asset;
NSArray *audioTracks = [asset tracksWithMediaType:AVMediaTypeAudio];
NSMutableArray *allAudioParams = [NSMutableArray array];
for (AVAssetTrack *track in audioTracks) {
AVMutableAudioMixInputParameters *audioInputParams = [AVMutableAudioMixInputParameters audioMixInputParameters];
[audioInputParams setVolume:fromvolume atTime:CMTimeMakeWithSeconds(now-0.1, 6000)];
[audioInputParams setVolume:fromvolume atTime:CMTimeMakeWithSeconds(now, 6000)];
[audioInputParams setVolume:tovolume atTime:CMTimeMakeWithSeconds(now+duration, 6000)];
[audioInputParams setTrackID:[track trackID]];
[allAudioParams addObject:audioInputParams];
}
AVMutableAudioMix *audioMix = [AVMutableAudioMix audioMix];
[audioMix setInputParameters:allAudioParams];
[playerItem setAudioMix:audioMix];
I am currently working on an iOS app which merges desired number of videos. Once the user taps the button to merge the videos, the videos are joined and then played using AVPlayer as:
CMTime nextClipStartTime = kCMTimeZero;
NSInteger i;
CMTime transitionDuration = CMTimeMake(1, 1); // Default transition duration is one second.
// Add two video tracks and two audio tracks.
AVMutableCompositionTrack *compositionVideoTracks[2];
AVMutableCompositionTrack *compositionAudioTracks[2];
compositionVideoTracks[0] = [self.mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
compositionVideoTracks[1] = [self.mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
compositionAudioTracks[0] = [self.mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
compositionAudioTracks[1] = [self.mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
CMTimeRange *passThroughTimeRanges = alloca(sizeof(CMTimeRange) * [self.selectedAssets count]);
CMTimeRange *transitionTimeRanges = alloca(sizeof(CMTimeRange) * [self.selectedAssets count]);
// Place clips into alternating video & audio tracks in composition, overlapped by transitionDuration.
for (i = 0; i < [self.selectedAssets count]; i++ )
{
NSInteger alternatingIndex = i % 2; // alternating targets: 0, 1, 0, 1, ...
AVURLAsset *asset = [self.selectedAssets objectAtIndex:i];
NSLog(#"number of tracks %d",asset.tracks.count);
CMTimeRange assetTimeRange;
assetTimeRange.start = kCMTimeZero;
assetTimeRange.duration = asset.duration;
NSValue *clipTimeRange = [NSValue valueWithCMTimeRange:assetTimeRange];
CMTimeRange timeRangeInAsset;
if (clipTimeRange)
timeRangeInAsset = [clipTimeRange CMTimeRangeValue];
else
timeRangeInAsset = CMTimeRangeMake(kCMTimeZero, [asset duration]);
AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
[compositionVideoTracks[alternatingIndex] insertTimeRange:timeRangeInAsset ofTrack:clipVideoTrack atTime:nextClipStartTime error:nil];
AVAssetTrack *clipAudioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[compositionAudioTracks[alternatingIndex] insertTimeRange:timeRangeInAsset ofTrack:clipAudioTrack atTime:nextClipStartTime error:nil];
// Remember the time range in which this clip should pass through.
// Every clip after the first begins with a transition.
// Every clip before the last ends with a transition.
// Exclude those transitions from the pass through time ranges.
passThroughTimeRanges[i] = CMTimeRangeMake(nextClipStartTime, timeRangeInAsset.duration);
if (i > 0) {
passThroughTimeRanges[i].start = CMTimeAdd(passThroughTimeRanges[i].start, transitionDuration);
passThroughTimeRanges[i].duration = CMTimeSubtract(passThroughTimeRanges[i].duration, transitionDuration);
}
if (i+1 < [self.selectedAssets count]) {
passThroughTimeRanges[i].duration = CMTimeSubtract(passThroughTimeRanges[i].duration, transitionDuration);
}
// The end of this clip will overlap the start of the next by transitionDuration.
// (Note: this arithmetic falls apart if timeRangeInAsset.duration < 2 * transitionDuration.)
nextClipStartTime = CMTimeAdd(nextClipStartTime, timeRangeInAsset.duration);
nextClipStartTime = CMTimeSubtract(nextClipStartTime, transitionDuration);
// Remember the time range for the transition to the next item.
transitionTimeRanges[i] = CMTimeRangeMake(nextClipStartTime, transitionDuration);
}
// Set up the video composition if we are to perform crossfade or push transitions between clips.
NSMutableArray *instructions = [NSMutableArray array];
// Cycle between "pass through A", "transition from A to B", "pass through B", "transition from B to A".
for (i = 0; i < [self.selectedAssets count]; i++ )
{
NSInteger alternatingIndex = i % 2; // alternating targets
// Pass through clip i.
AVMutableVideoCompositionInstruction *passThroughInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
passThroughInstruction.timeRange = passThroughTimeRanges[i];
AVMutableVideoCompositionLayerInstruction *passThroughLayer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTracks[alternatingIndex]];
passThroughInstruction.layerInstructions = [NSArray arrayWithObject:passThroughLayer];
[instructions addObject:passThroughInstruction];
AVMutableVideoCompositionLayerInstruction *fromLayer;
AVMutableVideoCompositionLayerInstruction *toLayer;
if (i+1 < [self.selectedAssets count])
{
// Add transition from clip i to clip i+1.
AVMutableVideoCompositionInstruction *transitionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
transitionInstruction.timeRange = transitionTimeRanges[i];
fromLayer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTracks[alternatingIndex]];
toLayer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTracks[1-alternatingIndex]];
// Fade out the fromLayer by setting a ramp from 1.0 to 0.0.
[fromLayer setOpacityRampFromStartOpacity:1.0 toEndOpacity:0.0 timeRange:transitionTimeRanges[i]];
transitionInstruction.layerInstructions = [NSArray arrayWithObjects:fromLayer, toLayer, nil];
[instructions addObject:transitionInstruction];
}
AVURLAsset *sourceAsset = [AVURLAsset URLAssetWithURL:[self.selectedItemsURL objectAtIndex:i] options:[NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES] forKey:AVURLAssetPreferPreciseDurationAndTimingKey]];
AVAssetTrack *sourceVideoTrack = [[sourceAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
CGSize temp = CGSizeApplyAffineTransform(sourceVideoTrack.naturalSize, sourceVideoTrack.preferredTransform);
CGSize size = CGSizeMake(fabsf(temp.width), fabsf(temp.height));
CGAffineTransform transform = sourceVideoTrack.preferredTransform;
self.videoComposition.renderSize = sourceVideoTrack.naturalSize;
if (size.width > size.height) {
[fromLayer setTransform:transform atTime:sourceAsset.duration];
} else {
float s = size.width/size.height;
CGAffineTransform new = CGAffineTransformConcat(transform, CGAffineTransformMakeScale(s,s));
float x = (size.height - size.width*s)/2;
CGAffineTransform newer = CGAffineTransformConcat(new, CGAffineTransformMakeTranslation(x, 0));
[fromLayer setTransform:newer atTime:sourceAsset.duration];
}
}
self.videoComposition.instructions = instructions;
self.videoComposition.frameDuration = CMTimeMake(1, 30);
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"mergeVideo-%d.mov",arc4random() % 1000]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
self.exporter = [[AVAssetExportSession alloc] initWithAsset:self.mixComposition presetName:AVAssetExportPresetMediumQuality];
self.exporter.outputURL=url;
self.exporter.outputFileType = AVFileTypeQuickTimeMovie;
self.exporter.videoComposition = self.videoComposition;
self.exporter.shouldOptimizeForNetworkUse = YES;
self.playerItem = [AVPlayerItem playerItemWithAsset:self.mixComposition];
self.playerItem.videoComposition = self.videoComposition;
AVPlayer *player = [AVPlayer playerWithPlayerItem:self.playerItem];
AVPlayerLayer *playerLayer = [AVPlayerLayer playerLayerWithPlayer:player];
[playerLayer setFrame:CGRectMake(0, 0, self.imageView.frame.size.width, self.imageView.frame.size.height)];
[[[self imageView] layer] addSublayer:playerLayer];
playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[player play];
[[NSNotificationCenter defaultCenter]
addObserver:self selector:#selector(checkPlayEnded) name:AVPlayerItemDidPlayToEndTimeNotification object:self.playerItem];
I am currently facing the following issues:
If one video is in portrait, and other is in landscape, how i will be able to rotate the portrait video in landscape as my view is in landscape orientation but the portrait video retain its original ?
(i am loading videos stored in the camera roll, not recording them inside my app)
Neglecting the above mentioned issue, if i merges any number of videos, they work fine. Once i save that new video in my library, and then load it in my app again and try to join that video with some other new video, the resolution got disturbed although both videos if played separately in the app, works really fine. How can i solve that?
(I have tried to follow the WWDC 2010 video editing tutorial, so this code is extracted from there.)
You can check the orientation of the video run time in above code while you create object for AVMutableVideoCompositionInstruction.
The code that is to be appended to the code to fix issue is....
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mutableComposition duration]);
AVAssetTrack *videoTrack = [[mutableComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableVideoCompositionLayerInstruction * layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
UIImageOrientation videoAssetOrientation_ = UIImageOrientationUp;
BOOL isVideoAssetPortrait_ = NO;
CGAffineTransform videoTransform = assetVideoTrack.preferredTransform;
if(videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0)
{
videoAssetOrientation_= UIImageOrientationRight;
isVideoAssetPortrait_ = YES;
}
if(videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0)
{
videoAssetOrientation_ = UIImageOrientationLeft;
isVideoAssetPortrait_ = YES;
}
CGFloat FirstAssetScaleToFitRatio = 320.0 / assetVideoTrack.naturalSize.width;
if(isVideoAssetPortrait_)
{
videoSize=CGSizeMake(350,400);
FirstAssetScaleToFitRatio = 320.0/assetVideoTrack.naturalSize.height;
CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
[layerInstruction setTransform:CGAffineTransformConcat(assetVideoTrack.preferredTransform, FirstAssetScaleFactor) atTime:kCMTimeZero];
}
else
{
videoSize=CGSizeMake(assetVideoTrack.naturalSize.width,assetVideoTrack.naturalSize.height);
}
The above code will keep the landscape video in landscape and prevent video being convert from portrait to landscape.
I hope this will help.Instead of first converting into proper orientation and then applying editing.If you append this code your one step will be reduced and can do both things(i.e editing & orientation) in one code and also in much quicker way.