I'm looking for the way that make a audio file like wav, alf or etc.. from a video.
Simply, I want to get only audio from the video.
How can I make it?
I tried to AVAssetExportSession without video, but just only audio,
But it is not work.
PLS help.
Here's my codes
- (void)exportAudioComposition
{
self.filteredSoundURL = [self saveAudioFileRoot];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:self.assetComposition presetName:AVAssetExportPresetPassthrough];
exportSession.outputURL = self.filteredSoundURL;
exportSession.outputFileType = AVFileTypeWAVE;
exportSession.timeRange = CMTimeRangeMake(kCMTimeZero, self.assetComposition.duration);
[exportSession exportAsynchronouslyWithCompletionHandler:^
{
dispatch_async(dispatch_get_main_queue(), ^{
});
}];
[self doAudioProcessWithFilter:SOUN_TURTLE andAssetURL:self.filteredSoundURL];
}
- (void)setupCompositionWithAudio:(NSURL *)audioURL andVideo:(NSURL *)videoURL
{
if (self.assetComposition != nil)
{
self.assetComposition = nil;
}
self.assetComposition = [AVMutableComposition composition];
if (videoURL != nil)
{
AVURLAsset *videoAsset = [[AVURLAsset alloc] initWithURL:videoURL options:nil];
AVMutableCompositionTrack *compVideoTrack = [self.assetComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *videoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
CMTimeRange videoTimeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
[compVideoTrack insertTimeRange:videoTimeRange ofTrack:videoTrack atTime:kCMTimeZero error:nil];
}
AVURLAsset *audioAsset = [[AVURLAsset alloc] initWithURL:audioURL options:nil];
AVMutableCompositionTrack *compAudioTrack = [self.assetComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *audioTrack = [[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
CMTimeRange audioTimeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
[compAudioTrack insertTimeRange:audioTimeRange ofTrack:audioTrack atTime:kCMTimeZero error:nil];
}
Related
I'm new to video programming. I'm trying to exercise it but I'm having trouble, which merges two video files to one.
The merge I mean is as follows..
I have first video like this
Second video also like this
I want them to merge like this
I didn't want to use 2 video players because I want to send the merged video file to someone. I searched all day to solve this, but I could't find how to.
I wrote code referencing this link but it shows first video only, not merged.
My Code:
NSURL *firstURL = [NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:#"video1" ofType:#"mp4"]]
AVURLAsset *firstAsset = [[AVURLAsset alloc]initWithURL:firstURL options:nil];
NSURL *secondURL = [NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:#"video2" ofType:#"mp4"]];
VURLAsset *secondAsset = [[AVURLAsset alloc]initWithURL:secondURL options:nil];
AVMutableComposition* mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
[firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration)
ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
AVMutableCompositionTrack *secondTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
[secondTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, secondAsset.duration)
ofTrack:[[secondAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
[secondTrack setPreferredTransform:CGAffineTransformMakeScale(0.25f,0.25f)];
NSArray *dirPaths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *docsDir = [dirPaths objectAtIndex:0];
NSString *outputFilePath = [docsDir stringByAppendingPathComponent:[NSString stringWithFormat:#"FinalVideo.mov"]];
NSLog(#"%#", outputFilePath);
NSURL *outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
AVAssetExportSession* assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
assetExport.outputFileType = #"com.apple.quicktime-movie";
assetExport.outputURL = outputFileUrl;
[assetExport exportAsynchronouslyWithCompletionHandler: ^(void ) {
switch (assetExport.status) {
case AVAssetExportSessionStatusFailed:
NSLog(#"AVAssetExportSessionStatusFailed");
break;
case AVAssetExportSessionStatusCompleted:
NSLog(#"AVAssetExportSessionStatusCompleted");
break;
case AVAssetExportSessionStatusWaiting:
NSLog(#"AVAssetExportSessionStatusWaiting");
break;
default:
break;
}
}
];
What am I missing? I don't know how I can approach this to solve the problem.
Appreciate any ideas.
Thanks.
Edit:
i made a new code which referenced a link matt wrote, thanks matt. but when i tried to export it, only first video was exported. not together.. :(
my new code is..
NSURL *originalVideoURL1 = [[NSBundle mainBundle] URLForResource:#"video1" withExtension:#"mov"];
NSURL *originalVideoURL2 = [[NSBundle mainBundle] URLForResource:#"video2" withExtension:#"mov"];
AVURLAsset *firstAsset = [AVURLAsset URLAssetWithURL:originalVideoURL1 options:nil];
AVURLAsset *secondAsset = [AVURLAsset URLAssetWithURL:originalVideoURL2 options:nil];
AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init]; //[AVMutableComposition composition];
NSError *error = nil;
AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration) ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:&error];
if(error) {
NSLog(#"firstTrack error!!!. %#", error.localizedDescription);
}
AVMutableCompositionTrack *secondTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[secondTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, secondAsset.duration) ofTrack:[[secondAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:&error];
if(error) {
NSLog(#"secondTrack error!!!. %#", error.localizedDescription);
}
AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, firstAsset.duration);
AVMutableVideoCompositionLayerInstruction *firstLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack];
CGAffineTransform scale = CGAffineTransformMakeScale(0.7, 0.7);
CGAffineTransform move = CGAffineTransformMakeTranslation(230, 230);
[firstLayerInstruction setTransform:CGAffineTransformConcat(scale, move) atTime:kCMTimeZero];
AVMutableVideoCompositionLayerInstruction *secondLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:secondTrack];
CGAffineTransform secondScale = CGAffineTransformMakeScale(1.2, 1.5);
CGAffineTransform secondMove = CGAffineTransformMakeTranslation(0, 0);
[secondLayerInstruction setTransform:CGAffineTransformConcat(secondScale, secondMove) atTime:kCMTimeZero];
mainInstruction.layerInstructions = #[firstLayerInstruction, secondLayerInstruction];
AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
mainCompositionInst.instructions = #[mainInstruction];
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
mainCompositionInst.renderSize = CGSizeMake(640, 480);
AVPlayerItem *newPlayerItem = [AVPlayerItem playerItemWithAsset:mixComposition];
newPlayerItem.videoComposition = mainCompositionInst;
AVPlayer *player = [[AVPlayer alloc] initWithPlayerItem:newPlayerItem];
AVPlayerLayer *playerLayer =[AVPlayerLayer playerLayerWithPlayer:player];
[playerLayer setFrame:self.view.bounds];
[self.view.layer addSublayer:playerLayer];
[player seekToTime:kCMTimeZero];
[player play]; // play is Good!!
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *tempS2 = [documentsDirectory stringByAppendingPathComponent:#"FinalVideo.mov"];
if([[NSFileManager defaultManager] fileExistsAtPath:tempS2])
{
[[NSFileManager defaultManager] removeItemAtPath:tempS2 error:nil];
}
NSURL *url = [[NSURL alloc] initFileURLWithPath: tempS2];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc]
initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exportSession.outputURL=url;
NSLog(#"%#", [exportSession supportedFileTypes]);
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
if (exportSession.status==AVAssetExportSessionStatusFailed) {
NSLog(#"failed");
}
else {
NSLog(#"AudioLocation : %#",tempS2);
}
}];
how can i export my mixComposition and layerInstruction both?
please give me a little more ideas.
Thanks.
With reference to the code in your second edit, just as you've told the AVPlayerItem about your AVMutableVideoComposition, you need to also tell the AVAssetExportSession too:
exportSession.videoComposition = mainCompositionInst;
// exportAsynchronouslyWithCompletionHandler etc
N.B. make sure you choose the longer of the two track durations when setting your instruction duration:
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMaximum(firstAsset.duration, secondAsset.duration));
AVPlayer doesn't mind if you get this wrong, but AVAssetExportSession does and will return an AVErrorInvalidVideoComposition (-11841) error.
N.B. 2 Your AVPlayer isn't actually going out of scope, but it makes me nervous when I look at it. I'd assign it to a property if I were you.
i have using this code which is working perfectly fine on previous version of IOS 9. but on IOS 9 version it is always going AVAssetExportSessionStatusFailed
AVAsset *avAsset1 = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:[NSString stringWithFormat:#"%#/firstPart.caf",docsDir]] options:nil];
AVAsset *avAsset2 = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:[NSString stringWithFormat:#"%#/New-Recording.caf",docsDir]] options:nil];
AVMutableComposition *composition = [[AVMutableComposition alloc] init];
[composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *track = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *assetTrack1;
AVAssetTrack *assetTrack2;
if ([avAsset1 tracksWithMediaType:AVMediaTypeAudio].count > 0) {
assetTrack1 = [[avAsset1 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
assetTrack2 = [[avAsset2 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
}
CMTime insertionPoint = kCMTimeZero;
[track insertTimeRange:CMTimeRangeMake(kCMTimeZero, avAsset1.duration) ofTrack:assetTrack1 atTime:insertionPoint error:nil];
insertionPoint = CMTimeAdd(insertionPoint, avAsset1.duration);
[track insertTimeRange:CMTimeRangeMake(kCMTimeZero, avAsset2.duration) ofTrack:assetTrack2 atTime:insertionPoint error:nil];
AVAssetExportSession *exportSession = [AVAssetExportSession exportSessionWithAsset:composition presetName:AVAssetExportPresetAppleM4A];
exportSession.outputURL = [NSURL fileURLWithPath:[NSString stringWithFormat:#"%#/mergedFilePart1.caf",documentsDirectory]];
exportSession.outputFileType = AVFileTypeAppleM4A;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
if (AVAssetExportSessionStatusCompleted == exportSession.status) {
NSLog(#"AVAssetExportSessionStatusCompleted");
[self mergeAudioPart2];
} else if (AVAssetExportSessionStatusFailed == exportSession.status) {
NSLog(#"AVAssetExportSessionStatusFailed");
} else {
NSLog(#"Export Session Status: %ld", (long)exportSession.status);
}
}];
i searched all over internet but, i didn't find any solution any help will be appreciated.
Your code should work, but without seeing your input files it's hard to tell. However it can still be improved.
You've got an unused mutable track for some reason. You can delete it:
AVMutableComposition *composition = [[AVMutableComposition alloc] init];
// This track is unused. Delete it!
// [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *track = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
When the export fails (AVAssetExportSessionStatusFailed == exportSession.status) check exportSession.error for more information. AVFoundation error messages often leave much to be desired, but you could be lucky.
You're exporting an m4a file, but the file suffix is .caf. Change it to .m4a (AVAssetExportSession doesn't appear to support exporting to AVFileTypeCoreAudioFormat).
Make sure to always remove the output URL before exporting, you've forgotten to do this:
NSURL *ouputURL = [NSURL fileURLWithPath:[NSString stringWithFormat:#"%#/mergedFilePart1.m4a",documentsDirectory]];
[[NSFileManager defaultManager] removeItemAtURL:ouputURL error:nil];
AVAssetExportSession *exportSession = [AVAssetExportSession exportSessionWithAsset:composition presetName:AVAssetExportPresetAppleM4A];
exportSession.outputURL = ouputURL;
exportSession.outputFileType = AVFileTypeAppleM4A;
insertTimeRange:ofTrack:error: returns a success flag and an error! Consult them! They might point to a problem with your code!
NSError *error;
if (![track insertTimeRange:CMTimeRangeMake(kCMTimeZero, avAsset1.duration) ofTrack:assetTrack1 atTime:insertionPoint error:&error]) {
NSLog(#"ERROR 1: %#", error);
}
This condition is strange. You check the existence of audio tracks in one asset, then use the other as well:
if ([avAsset1 tracksWithMediaType:AVMediaTypeAudio].count > 0) {
assetTrack1 = [[avAsset1 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
assetTrack2 = [[avAsset2 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
}
the issue was here :
// [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; this line was adding extra track. now this code is working perfect on IOS 9 as well.
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *path1 = [NSString stringWithFormat:#"%#/firstPart.caf",documentsDirectory];
NSString *path2 = [NSString stringWithFormat:#"%#/New-Recording.caf",documentsDirectory];
AVAsset *avAsset1 = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:path1] options:nil];
AVAsset *avAsset2 = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:path2] options:nil];
AVMutableComposition *composition = [[AVMutableComposition alloc] init];
// [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *track = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *assetTrack1;
AVAssetTrack *assetTrack2;
if ([avAsset1 tracksWithMediaType:AVMediaTypeAudio].count > 0) {
assetTrack1 = [[avAsset1 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
assetTrack2 = [[avAsset2 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
}
NSError *error;
CMTime insertionPoint = kCMTimeZero;
[track insertTimeRange:CMTimeRangeMake(kCMTimeZero, avAsset1.duration) ofTrack:assetTrack1 atTime:insertionPoint error:nil];
insertionPoint = CMTimeAdd(insertionPoint, avAsset1.duration);
[track insertTimeRange:CMTimeRangeMake(kCMTimeZero, avAsset2.duration) ofTrack:assetTrack2 atTime:insertionPoint error:nil];
NSURL *outPutUrl = [NSURL fileURLWithPath:[NSString stringWithFormat:#"%#/mergedFilePart1.caf",documentsDirectory]];
AVAssetExportSession *exportSession = [AVAssetExportSession exportSessionWithAsset:composition presetName:AVAssetExportPresetAppleM4A];
exportSession.outputURL = outPutUrl;
exportSession.outputFileType = AVFileTypeAppleM4A;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
if (AVAssetExportSessionStatusCompleted == exportSession.status) {
NSLog(#"AVAssetExportSessionStatusCompleted");
[self mergeAudioPart2];
} else if (AVAssetExportSessionStatusFailed == exportSession.status) {
NSLog(#"%ld",(long)exportSession.status);
NSLog(#"AVAssetExportSessionStatusFailed");
} else {
NSLog(#"Export Session Status: %ld", (long)exportSession.status);
}
}];
I have an NSArray containing a list of video NSURL's and I want to merge them together to make one long compilation. The problem is, when I use the code below the videos merge but there is no audio.
- (IBAction)buildVideo {
// 1 - Create AVMutableComposition object. This object will hold your AVMutableCompositionTrack instances.
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
AVMutableCompositionTrack *track = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
int i = 0;
for (id object in movieArray) {
AVAsset *asset = [AVAsset assetWithURL:object];
if(i == 0){
[track insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration)
ofTrack:[[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
}else {
[track insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration)
ofTrack:[[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:[mixComposition duration] error:nil];
}
i = i + 1;
}
// 4 - Get path
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:
[NSString stringWithFormat:#"mergeVideo-%d.mov",arc4random() % 1000]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
// 5 - Create exporter
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:exporter];
});
}];
}
The reason you're not getting audio is that you're not adding the audio track. You need to create an additional AVMutableCompositionTrack with a type of AVMediaTypeAudio:
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
And insert the time range for the source audio and video tracks into both composition tracks:
CMTime insertTime = kCMTimeZero;
for (id object in movieArray) {
AVAsset *asset = [AVAsset assetWithURL:object];
CMTimeRange timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
[videoTrack insertTimeRange:timeRange
ofTrack:[[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:insertTime
error:nil];
[audioTrack insertTimeRange:timeRange
ofTrack:[[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
atTime:insertTime
error:nil];
insertTime = CMTimeAdd(insertTime,asset.duration);
}
Please see the code below. I'm trying to add a 2nd track so that I have a smaller video overlaid on top of a background video. However I can only get the background video to display in the final exported file. What am I doing wrong? And how do I control the order of tracks, i.e. the layering of the composition?
AVMutableComposition* composition = [[AVMutableComposition alloc] init];
NSURL* backgroundURL = [NSURL fileURLWithPath:backgroundOutputPath];
AVURLAsset* url0 = [AVURLAsset URLAssetWithURL:backgroundURL options:nil];
AVMutableCompositionTrack *backgroundVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack* bgAssetTrack = [[url0 tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
[backgroundVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [url0 duration]) ofTrack:bgAssetTrack atTime:kCMTimeZero error:&error];
NSURL* videoURL = [[NSBundle mainBundle] URLForResource: #"star" withExtension:#"mp4"];
AVURLAsset* url = [AVURLAsset URLAssetWithURL:videoURL options:nil];
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *clipVideoTrack = [[url tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [url duration]) ofTrack:clipVideoTrack atTime:kCMTimeZero error:&error];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetMediumQuality];
NSString *exportPath = [documentsDirectoryPath stringByAppendingPathComponent:#"test_AV.mp4"];
[[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil];
NSURL *exportURL = [NSURL fileURLWithPath:exportPath];
exportSession.outputURL = exportURL;
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
NSLog (#"Exporting. status is %d",
exportSession.status);
switch (exportSession.status) {
case AVAssetExportSessionStatusFailed:
case AVAssetExportSessionStatusCompleted: {
NSLog(#"export done");
break;
}
};
}];
Check this tutorials.This may be helpful to you:http://abdulazeem.wordpress.com/2012/04/02/video-manipulation-in-ios-resizingmerging-and-overlapping-videos-in-ios/
http://www.raywenderlich.com/13418/how-to-play-record-edit-videos-in-ios
https://github.com/androidios/AVEditDemo
In my application I need to merge audio and video and then I need to play the audio file in the Mediaplayer. How can I merge the audio and video in IOS. Is there is any source code for this. Please suggest me some ideas
Thanks in advance
Use this
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audioUrl options:nil];
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:videoUrl options:nil];
AVMutableComposition* mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionCommentaryTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionCommentaryTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioAsset.duration)
ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetHighestQuality];
NSString* videoName = #"export.mov";
NSString *exportPath = [NSTemporaryDirectory() stringByAppendingPathComponent:videoName];
NSURL *exportUrl = [NSURL fileURLWithPath:exportPath];
if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath])
{
[[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil];
}
_assetExport.outputFileType = #"com.apple.quicktime-movie";
_assetExport.outputURL = exportUrl;
_assetExport.shouldOptimizeForNetworkUse = YES;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
// your completion code here
}
}
];
You can merge videos by creating the Mutable composition.
AVMutableComposition* composition = [[AVMutableComposition alloc]init];
AVURLAsset* video1 = [[AVURLAsset alloc]initWithURL:[NSURL fileURLWithPath:path1]options:nil];
NSArray *pathComponents = [NSArray arrayWithObjects:
[NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) lastObject],#"MyAudio.m4a",nil];
NSURL *outputFileURL = [NSURL fileURLWithPathComponents:pathComponents];
AVAsset *audioAsset = [AVAsset assetWithURL:outputFileURL];
//Create mutable composition of audio type
AVMutableCompositionTrack *audioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,video1.duration)
ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:nil];
AVMutableCompositionTrack* composedTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[composedTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, video1.duration)
ofTrack:[[video1 tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
AVAssetExportSession*exporter = [[AVAssetExportSession alloc]initWithAsset:composition presetName:AVAssetExportPresetHighestQuality];
[exporter exportAsynchronouslyWithCompletionHandler:^{
case AVAssetExportSessionStatusFailed:
NSLog(#"Failed to export video");
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"export cancelled");
break;
}
For Video merging visit this tutorial. http://iosbucket.blogspot.in/2015/04/mp4-conversion-and-video-merging-in-ios.html
You can also find the sample project for merging videos.
http://www.raywenderlich.com/13418/how-to-play-record-edit-videos-in-ios visit this tutorial for merging audio and video files
It’s a bit late to answer, but it can help someone in the future. repeats audio if video duration is larger than audio.
+ (void)mergeVideoWithAudio:(NSURL *)videoUrl audioUrl:(NSURL *)audioUrl success:(void (^)(NSURL *url))success failure:(void (^)(NSError *error))failure {
AVMutableComposition *mixComposition = [AVMutableComposition new];
NSMutableArray<AVMutableCompositionTrack *> *mutableCompositionVideoTrack = [NSMutableArray new];
NSMutableArray<AVMutableCompositionTrack *> *mutableCompositionAudioTrack = [NSMutableArray new];
AVMutableVideoCompositionInstruction *totalVideoCompositionInstruction = [AVMutableVideoCompositionInstruction new];
AVAsset *aVideoAsset = [AVAsset assetWithURL:videoUrl];
AVAsset *aAudioAsset = [AVAsset assetWithURL:audioUrl];
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
if (videoTrack && audioTrack) {
[mutableCompositionVideoTrack addObject:videoTrack];
[mutableCompositionAudioTrack addObject:audioTrack];
AVAssetTrack *aVideoAssetTrack = [aVideoAsset tracksWithMediaType:AVMediaTypeVideo].firstObject;
AVAssetTrack *aAudioAssetTrack = [aAudioAsset tracksWithMediaType:AVMediaTypeAudio].firstObject;
if (aVideoAssetTrack && aAudioAssetTrack) {
[mutableCompositionVideoTrack.firstObject insertTimeRange:CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration) ofTrack:aVideoAssetTrack atTime:kCMTimeZero error:nil];
CMTime videoDuration = aVideoAsset.duration;
if (CMTimeCompare(videoDuration, aAudioAsset.duration) == -1) {
[mutableCompositionAudioTrack.firstObject insertTimeRange:CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration) ofTrack:aAudioAssetTrack atTime:kCMTimeZero error:nil];
} else if (CMTimeCompare(videoDuration, aAudioAsset.duration) == 1) {
CMTime currentDuration = kCMTimeZero;
while (CMTimeCompare(currentDuration, videoDuration) == -1) {
// repeats audio
CMTime restTime = CMTimeSubtract(videoDuration, currentDuration);
CMTime maxTime = CMTimeMinimum(aAudioAsset.duration, restTime);
[mutableCompositionAudioTrack.firstObject insertTimeRange:CMTimeRangeMake(kCMTimeZero, maxTime) ofTrack:aAudioAssetTrack atTime:currentDuration error:nil];
currentDuration = CMTimeAdd(currentDuration, aAudioAsset.duration);
}
}
videoTrack.preferredTransform = aVideoAssetTrack.preferredTransform;
totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration);
}
}
NSString *outputPath = [NSHomeDirectory() stringByAppendingPathComponent:#"tmp/screenCapture.mp4"];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputPath]) {
[[NSFileManager defaultManager] removeItemAtPath:outputPath error:nil];
}
NSURL *outputURL = [NSURL fileURLWithPath:outputPath];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exportSession.outputURL = outputURL;
exportSession.outputFileType = AVFileTypeMPEG4;
exportSession.shouldOptimizeForNetworkUse = YES;
// try to export the file and handle the status cases
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch (exportSession.status) {
case AVAssetExportSessionStatusFailed:
failure(exportSession.error);
break;
case AVAssetExportSessionStatusCancelled:
failure(exportSession.error);
break;
default:
success(outputURL);
break;
}
}];
}