I have an app which combines video files together to make a long video. There could be a delay between videos (e.g. V1 starts at t=0s and runs for 5 seconds, V1 starts at t=10s). In this case, I want the video to freeze the last frame of V1 until V2 starts.
I'm using the code below, but between videos, the whole video goes white.
Any ideas how I can get the effect I'm looking for?
Thanks!
#interface VideoJoins : NSObject
-(instancetype)initWithURL:(NSURL*)url
andDelay:(NSTimeInterval)delay;
#property (nonatomic, strong) NSURL* url;
#property (nonatomic) NSTimeInterval delay;
#end
and
+(void)joinVideosSequentially:(NSArray*)videoJoins
withFileType:(NSString*)fileType
toOutput:(NSURL*)outputVideoURL
onCompletion:(dispatch_block_t) onCompletion
onError:(ErrorBlock) onError
onCancel:(dispatch_block_t) onCancel
{
//From original question on http://stackoverflow.com/questions/6575128/how-to-combine-video-clips-with-different-orientation-using-avfoundation
// Didn't add support for portrait+landscape.
AVMutableComposition *composition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
CMTime startTime = kCMTimeZero;
/*videoClipPaths is a array of paths of the video clips recorded*/
//for loop to combine clips into a single video
for (NSInteger i=0; i < [videoJoins count]; i++)
{
VideoJoins* vj = videoJoins[i];
NSURL *url = vj.url;
NSTimeInterval nextDelayTI = 0;
if(i+1 < [videoJoins count])
{
VideoJoins* vjNext = videoJoins[i+1];
nextDelayTI = vjNext.delay;
}
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:url options:nil];
CMTime assetDuration = [asset duration];
CMTime assetDurationWithNextDelay = assetDuration;
if(nextDelayTI != 0)
{
CMTime nextDelay = CMTimeMakeWithSeconds(nextDelayTI, 1000000);
assetDurationWithNextDelay = CMTimeAdd(assetDuration, nextDelay);
}
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVAssetTrack *audioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
//set the orientation
if(i == 0)
{
[compositionVideoTrack setPreferredTransform:videoTrack.preferredTransform];
}
BOOL ok = [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, assetDurationWithNextDelay) ofTrack:videoTrack atTime:startTime error:nil];
ok = [compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, assetDuration) ofTrack:audioTrack atTime:startTime error:nil];
startTime = CMTimeAdd(startTime, assetDurationWithNextDelay);
}
//Delete output video if it exists
NSString* outputVideoString = [outputVideoURL absoluteString];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputVideoString])
{
[[NSFileManager defaultManager] removeItemAtPath:outputVideoString error:nil];
}
//export the combined video
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:composition
presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL = outputVideoURL;
exporter.outputFileType = fileType;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^(void)
{
switch (exporter.status)
{
case AVAssetExportSessionStatusCompleted: {
onCompletion();
break;
}
case AVAssetExportSessionStatusFailed:
{
NSLog(#"Export Failed");
NSError* err = exporter.error;
NSLog(#"ExportSessionError: %#", [err localizedDescription]);
onError(err);
break;
}
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export Cancelled");
NSLog(#"ExportSessionError: %#", [exporter.error localizedDescription]);
onCancel();
break;
}
}];
}
EDIT: Got it working. Here is how I extract the images and generate the videos from those images:
+ (void)writeImageAsMovie:(UIImage*)image
toPath:(NSURL*)url
fileType:(NSString*)fileType
duration:(NSTimeInterval)duration
completion:(VoidBlock)completion
{
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:url
fileType:fileType
error:&error];
NSParameterAssert(videoWriter);
CGSize size = image.size;
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* writerInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
[videoWriter addInput:writerInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
//Write samples:
CMTime halfTime = CMTimeMakeWithSeconds(duration/2, 100000);
CMTime endTime = CMTimeMakeWithSeconds(duration, 100000);
CVPixelBufferRef buffer = [VideoCreator pixelBufferFromCGImage:image.CGImage];
[adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];
[adaptor appendPixelBuffer:buffer withPresentationTime:halfTime];
[adaptor appendPixelBuffer:buffer withPresentationTime:endTime];
//Finish the session:
[writerInput markAsFinished];
[videoWriter endSessionAtSourceTime:endTime];
[videoWriter finishWritingWithCompletionHandler:^{
if(videoWriter.error)
{
NSLog(#"Error:%#", [error localizedDescription]);
}
if(completion)
{
completion();
}
}];
}
+(void)generateVideoImageFromURL:(NSURL*)url
atTime:(CMTime)thumbTime
withMaxSize:(CGSize)maxSize
completion:(ImageBlock)handler
{
AVURLAsset *asset=[[AVURLAsset alloc] initWithURL:url options:nil];
if(!asset)
{
if(handler)
{
handler(nil);
return;
}
}
if(CMTIME_IS_POSITIVE_INFINITY(thumbTime))
{
thumbTime = asset.duration;
}
else if(CMTIME_IS_NEGATIVE_INFINITY(thumbTime) || CMTIME_IS_INVALID(thumbTime) || CMTIME_IS_INDEFINITE(thumbTime))
{
thumbTime = CMTimeMake(0, 30);
}
AVAssetImageGenerator *generator = [[AVAssetImageGenerator alloc] initWithAsset:asset];
generator.appliesPreferredTrackTransform=TRUE;
generator.maximumSize = maxSize;
CMTime actualTime;
NSError* error;
CGImageRef image = [generator copyCGImageAtTime:thumbTime actualTime:&actualTime error:&error];
UIImage *thumb = [[UIImage alloc] initWithCGImage:image];
CGImageRelease(image);
if(handler)
{
handler(thumb);
}
}
AVMutableComposition can only stitch videos together. I did it by doing two things:
Extracting last frame of the first video as image.
Making a video using this image(duration depends on your requirement).
Then you can compose these three videos (V1,V2 and your single image video). Both tasks are very easy to do.
For extracting the image out of the video, look at this link. If you don't want to use MPMoviePlayerController,which is used by accepted answer, then look at other answer by Steve.
For making video using the image check out this link. Question is about the issue of audio but I don't think you need audio. So just look at the method mentioned in question itself.
UPDATE:
There is an easier way but it comes with a disadvantage. You can have two AVPlayer. First one plays your video which has white frames in between. Other one sits behind paused at last frame of video 1. So when the middle part comes, you will see the second AVPlayer loaded with last frame. So as a whole it will look like video 1 is paused. And trust me naked eye can't make out when player got changed. But the obvious disadvantage is that your exported video will be same with blank frames. So if you are just going to play it back in your app only, you can go with this approach.
The first frame of video asset is always black or white
CMTime delta = CMTimeMake(1, 25); //1 frame (if fps = 25)
CMTimeRange timeRangeInVideoAsset = CMTimeRangeMake(delta,clipVideoTrack.timeRange.duration);
nextVideoClipStartTime = CMTimeAdd(nextVideoClipStartTime, timeRangeInVideoAsset.duration);
Merged more then 400 shirt videos in one.
Related
My app captures a video clip for 3 seconds, and programmatically I want to create a 15 sec clip from recorded 3 sec clip by looping it 5 times. And finally have to save 15 sec clip in CameraRoll.
I have got my 3 sec video clip via AVCaptureMovieFileOutput and I have its NSURL from delegate which is currently in NSTemporaryDirectory().
I am using AVAssetWriterInput for looping it. But it ask for CMSampleBufferRef like :
[writerInput appendSampleBuffer:sampleBuffer];
How will I gee this CMSampleBufferRef from video in NSTemporaryDirectory() ?
I have seen code for converting UIImage to CMSampleBufferRef, but I can find any for video file.
Any suggestion will be helpful. :)
Finally, I fixed my problem using AVMutableComposition. Here is my code :
AVMutableComposition *mixComposition = [AVMutableComposition new];
AVMutableCompositionTrack *mutableCompVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVURLAsset *videoAsset = [[AVURLAsset alloc]initWithURL:3SecFileURL options:nil];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero, [videoAsset duration]);
CGAffineTransform rotationTransform = CGAffineTransformMakeRotation(M_PI_2);
[mutableCompVideoTrack setPreferredTransform:rotationTransform];
CMTime currentCMTime = kCMTimeZero;
for (NSInteger count = 0 ; count < 5 ; count++)
{
[mutableCompVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:currentCMTime error:nil];
currentCMTime = CMTimeAdd(currentCMTime, [videoAsset duration]);
}
NSString *fullMoviePath = [NSTemporaryDirectory() stringByAppendingPathComponent:[#"moviefull" stringByAppendingPathExtension:#"mov"]];
NSURL *finalVideoFileURL = [NSURL fileURLWithPath:fullMoviePath];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetPassthrough];
[exportSession setOutputFileType:AVFileTypeQuickTimeMovie];
[exportSession setOutputURL:finalVideoFileURL];
CMTimeValue val = [mixComposition duration].value;
CMTime start = CMTimeMake(0, 1);
CMTime duration = CMTimeMake(val, 1);
CMTimeRange range = CMTimeRangeMake(start, duration);
[exportSession setTimeRange:range];
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch ([exportSession status])
{
case AVAssetExportSessionStatusFailed:
{
NSLog(#"Export failed: %# %#", [[exportSession error] localizedDescription], [[exportSession error]debugDescription]);
}
case AVAssetExportSessionStatusCancelled:
{
NSLog(#"Export canceled");
break;
}
case AVAssetExportSessionStatusCompleted:
{
NSLog(#"Export complete!");
}
default: NSLog(#"default");
}
}];
Take a look at AVAssetReader, this can return an CMSampleBufferRef. Keep in mind, you'll need to manipulate the timestamp for your approach to work.
I have a set of video clips that I would like to merge together and then put a watermark on it.
I am able to do both functions individually, however problems arise when performing the them together.
All clips that will be merged are either 1920x1080 or 960x540.
For some reason, AVAssetExportSession does not display them well together.
Here are the 2 bugs based on 3 different scenarios:
This image is a result of:
Merging Clips together
As you can see, there is nothing wrong here, the output video produces the desired effect.
However, when I then try to add a watermark, it creates the following issue:
This image is a result of:
Merging Clips together
Putting a watermark on it
BUG 1: Some clips in the video get resized for whatever reason while other clips do not.
This image is a result of:
Merging Clips together
Resizing clips that are 960x540 to 1920x1080
Putting a watermark on it
Bug 2 Now the clips that need to be resized get resized, however the old unresized clip is still there.
Merging/Resizing Code:
-(void) mergeClips{
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
AVMutableCompositionTrack *mutableVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *mutableAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
// loop through the list of videos and add them to the track
CMTime currentTime = kCMTimeZero;
NSMutableArray* instructionArray = [[NSMutableArray alloc] init];
if (_clipsArray){
for (int i = 0; i < (int)[_clipsArray count]; i++){
NSURL* url = [_clipsArray objectAtIndex:i];
AVAsset *asset = [AVAsset assetWithURL:url];
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVAssetTrack *audioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
CGSize size = videoTrack.naturalSize;
CGFloat widthScale = 1920.0f/size.width;
CGFloat heightScale = 1080.0f/size.height;
// lines that performs resizing
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:mutableVideoTrack];
CGAffineTransform scale = CGAffineTransformMakeScale(widthScale,heightScale);
CGAffineTransform move = CGAffineTransformMakeTranslation(0,0);
[layerInstruction setTransform:CGAffineTransformConcat(scale, move) atTime:currentTime];
[instructionArray addObject:layerInstruction];
[mutableVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration)
ofTrack:videoTrack
atTime:currentTime error:nil];
[mutableAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration)
ofTrack:audioTrack
atTime:currentTime error:nil];
currentTime = CMTimeMakeWithSeconds(CMTimeGetSeconds(asset.duration) + CMTimeGetSeconds(currentTime), asset.duration.timescale);
}
}
AVMutableVideoCompositionInstruction * mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, currentTime);
mainInstruction.layerInstructions = instructionArray;
// 4 - Get path
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *lastPostedDayPath = [documentsDirectory stringByAppendingPathComponent:#"lastPostedDay"];
//Check if folder exists, if not create folder
if (![[NSFileManager defaultManager] fileExistsAtPath:lastPostedDayPath]){
[[NSFileManager defaultManager] createDirectoryAtPath:lastPostedDayPath withIntermediateDirectories:NO attributes:nil error:nil];
}
NSString *fileName = [NSString stringWithFormat:#"%li_%li_%li.mov", (long)_month, (long)_day, (long)_year];
NSString *finalDayPath = [lastPostedDayPath stringByAppendingPathComponent:fileName];
NSURL *url = [NSURL fileURLWithPath:finalDayPath];
BOOL fileExists = [[NSFileManager defaultManager] fileExistsAtPath:finalDayPath];
if (fileExists){
NSLog(#"file exists");
[[NSFileManager defaultManager] removeItemAtURL:url error:nil];
}
AVMutableVideoComposition *mainComposition = [AVMutableVideoComposition videoComposition];
mainComposition.instructions = [NSArray arrayWithObject:mainInstruction];
mainComposition.frameDuration = CMTimeMake(1, 30);
mainComposition.renderSize = CGSizeMake(1920.0f, 1080.0f);
// 5 - Create exporter
_exportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetHighestQuality];
_exportSession.outputURL=url;
_exportSession.outputFileType = AVFileTypeQuickTimeMovie;
_exportSession.shouldOptimizeForNetworkUse = YES;
_exportSession.videoComposition = mainComposition;
[_exportSession exportAsynchronouslyWithCompletionHandler:^{
[merge_timer invalidate];
merge_timer = nil;
switch (_exportSession.status) {
case AVAssetExportSessionStatusFailed:
NSLog(#"Export failed -> Reason: %#, User Info: %#",
_exportSession.error.localizedDescription,
_exportSession.error.userInfo.description);
[self showSavingFailedDialog];
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export cancelled");
[self showSavingFailedDialog];
break;
case AVAssetExportSessionStatusCompleted:
NSLog(#"Export finished");
[self addWatermarkToExportSession:_exportSession];
break;
default:
break;
}
}];
});
}
Once it finishes this, I run it through a different Export Session that just simply adds a watermark.
Is there something I am doing wrong in my code or process?
Is there an easier way for achieving this?
Thank you for your time!
I was able to solve my issue.
For some reason, AVAssetExportSession will not actually create a 'flat' video file of the merged clips, so it still recognized the lower resolution clips and their locations when adding the watermark which caused them to resize.
What I did to solve this was, first use AVAssetWriter to merge my clips and create one 'flat' file. I then could add a watermark without having a resizing issue.
Hope this helps anyone who may come across this problem in the future!
I also encountered the same problem,
you can set opacity after one video end like this:
[layerInstruction setOpacity:0.0 atTime:duration];
At the moment I am debugging some code. What it does it reads movie file into frames array, applies some transformations to the frames and compiles everything back to the video file. I have fixed all memory leaks which were called by myself the remaining one is pretty serious. It leaves almost 400 mb of memory after the process. Here is the screenshot of the leak.
As you can see, the higher level call is only in VideoToolbox library. Hovever, I do not even include this library into my project. I don't believe that this leak comes with Apple's library and there is nothing I can do about it.
Here is the only code that uses something related to h264 and decoding which were mentioned in the call tree.
-(void)writeImageAsMovie:(NSArray *)array toPath:(NSString*)path size:(CGSize)size
{
if ([[NSFileManager defaultManager] fileExistsAtPath:path]) {
[[NSFileManager defaultManager] removeItemAtPath:path error:nil];
}
NSError *error = nil;
// FIRST, start up an AVAssetWriter instance to write your video
// Give it a destination path (for us: tmp/temp.mov)
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:path]
fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
[videoWriter addInput:writerInput];
//Start a SESSION of writing.
// After you start a session, you will keep adding image frames
// until you are complete - then you will tell it you are done.
[videoWriter startWriting];
// This starts your video at time = 0
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
//int frameDuration = (1/fps)*600.0;
//NSLog(#"FRAME DURATION: %d",frameDuration);
int i = 0;
while (1)
{
// Check if the writer is ready for more data, if not, just wait
if(writerInput.readyForMoreMediaData){
CMTime frameTime = CMTimeMake(1, fps);
// CMTime = Value and Timescale.
// Timescale = the number of tics per second you want
// Value is the number of tics
// For us - each frame we add will be 1/4th of a second
// Apple recommend 600 tics per second for video because it is a
// multiple of the standard video rates 24, 30, 60 fps etc.
CMTime lastTime=CMTimeMake(i, fps);
CMTime presentTime=CMTimeAdd(lastTime, frameTime);
if (i == 0) {presentTime = CMTimeMake(0, 600);}
// This ensures the first frame starts at 0.
if (i >= [array count])
{
buffer = NULL;
}
else
{
// This command grabs the next UIImage and converts it to a CGImage
CVPixelBufferRef tempBuffer = buffer;
CVPixelBufferRelease(tempBuffer);
buffer = [self pixelBufferFromCGImage:[[array objectAtIndex:i] CGImage]andSize:size];
}
if (buffer)
{
// Give the CGImage to the AVAssetWriter to add to your video
[adaptor appendPixelBuffer:buffer withPresentationTime:presentTime];
i++;
}
else
{
//Finish the session:
// This is important to be done exactly in this order
[writerInput markAsFinished];
// WARNING: finishWriting in the solution above is deprecated.
// You now need to give a completion handler.
[videoWriter finishWritingWithCompletionHandler:^{
NSLog(#"Finished writing...checking completion status...");
if (videoWriter.status != AVAssetWriterStatusFailed && videoWriter.status == AVAssetWriterStatusCompleted)
{
NSLog(#"Video writing succeeded.");
// Move video to camera roll
// NOTE: You cannot write directly to the camera roll.
// You must first write to an iOS director then move it!
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0]; // Get documents folder
NSString *audioTemp = [documentsDirectory stringByAppendingPathComponent:#"/temp-audio.m4a"];
NSString *finalTemp = [documentsDirectory stringByAppendingPathComponent:#"/temp.mov"];
[self mergeVideoFromPath:[NSURL fileURLWithPath:path] withAudioFromPath:[NSURL fileURLWithPath:audioTemp] atPath:[NSURL fileURLWithPath:finalTemp]];
} else
{
NSLog(#"Video writing failed: %#", videoWriter.error);
}
}]; // end videoWriter finishWriting Block
CVPixelBufferRelease(buffer);
CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
NSLog (#"Done");
break;
}
}
}
}
There's a strange behaviour I've found when trying to merge videos with AVFoundation. I'm pretty sure that I've made a mistake somewhere but I'm too blind to see it. My goal is just to merge 4 videos (later there will be crossfade transition between them).
Everytime I'm trying to export video I get this error:
Error Domain=AVFoundationErrorDomain Code=-11821 "Cannot Decode" UserInfo=0x7fd94073cc30 {NSLocalizedDescription=Cannot Decode, NSLocalizedFailureReason=The media data could not be decoded. It may be damaged.}
The funniest thing is that if I don't provide AVAssetExportSession with AVMutableVideoComposition, then everything works fine! I can't understand what I'm doing wrong. The source videos are downloaded from youtube and have .mp4 extension. I can play them with MPMoviePlayerController. While checking the source code, please, look carefully at AVMutableVideoComposition.
I was testing this code in Xcode 6.0.1 on iOS simulator.
#import "VideoStitcher.h"
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
#import <AssetsLibrary/AssetsLibrary.h>
#implementation VideoStitcher
{
VideoStitcherCompletionBlock _completionBlock;
AVMutableComposition *_composition;
AVMutableVideoComposition *_videoComposition;
}
- (instancetype)init
{
self = [super init];
if (self)
{
_composition = [AVMutableComposition composition];
_videoComposition = [AVMutableVideoComposition videoComposition];
}
return self;
}
- (void)compileVideoWithAssets:(NSArray *)assets completion:(VideoStitcherCompletionBlock)completion
{
_completionBlock = [completion copy];
if (assets == nil || assets.count < 2)
{
// We need at least two video to make a stitch, right?
NSAssert(NO, #"VideoStitcher: assets parameter is nil or has not enough items in it");
}
else
{
[self composeAssets:assets];
if (_composition != nil) // if stitching went good and no errors were found
[self exportComposition];
}
}
- (void)composeAssets:(NSArray *)assets
{
AVMutableCompositionTrack *compositionVideoTrack = [_composition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
NSError *compositionError = nil;
CMTime currentTime = kCMTimeZero;
AVAsset *asset = nil;
for (int i = (int)assets.count - 1; i >= 0; i--) //For some reason videos are compiled in reverse order. Find the bug later. 06.10.14
{
asset = assets[i];
AVAssetTrack *assetVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject];
BOOL success = [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, assetVideoTrack.timeRange.duration)
ofTrack:assetVideoTrack
atTime:currentTime
error:&compositionError];
if (success)
{
CMTimeAdd(currentTime, asset.duration);
}
else
{
NSLog(#"VideoStitcher: something went wrong during inserting time range in composition");
if (compositionError != nil)
{
NSLog(#"%#", compositionError);
_completionBlock(nil, compositionError);
_composition = nil;
return;
}
}
}
AVMutableVideoCompositionInstruction *videoCompositionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
videoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, _composition.duration);
videoCompositionInstruction.backgroundColor = [[UIColor redColor] CGColor];
_videoComposition.instructions = #[videoCompositionInstruction];
_videoComposition.renderSize = [self calculateOptimalRenderSizeFromAssets:assets];
_videoComposition.frameDuration = CMTimeMake(1, 600);
}
- (void)exportComposition
{
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:#"testVideo.mov"];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
NSString *filePath = [url path];
NSFileManager *fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:filePath]) {
NSError *error;
if ([fileManager removeItemAtPath:filePath error:&error] == NO) {
NSLog(#"removeItemAtPath %# error:%#", filePath, error);
}
}
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:_composition
presetName:AVAssetExportPreset1280x720];
exporter.outputURL = url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.shouldOptimizeForNetworkUse = YES;
exporter.videoComposition = _videoComposition;
[exporter exportAsynchronouslyWithCompletionHandler:^{
[self exportDidFinish:exporter];
}];
}
- (void)exportDidFinish:(AVAssetExportSession*)session
{
NSLog(#"%li", session.status);
if (session.status == AVAssetExportSessionStatusCompleted)
{
NSURL *outputURL = session.outputURL;
// time to call delegate methods, but for testing purposes we save the video in 'photos' app
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputURL])
{
[library writeVideoAtPathToSavedPhotosAlbum:outputURL completionBlock:^(NSURL *assetURL, NSError *error){
if (error == nil)
{
NSLog(#"successfully saved video");
}
else
{
NSLog(#"saving video failed.\n%#", error);
}
}];
}
}
else if (session.status == AVAssetExportSessionStatusFailed)
{
NSLog(#"VideoStitcher: exporting failed.\n%#", session.error);
}
}
- (CGSize)calculateOptimalRenderSizeFromAssets:(NSArray *)assets
{
AVAsset *firstAsset = assets[0];
AVAssetTrack *firstAssetVideoTrack = [[firstAsset tracksWithMediaType:AVMediaTypeVideo] firstObject];
CGFloat maxWidth = firstAssetVideoTrack.naturalSize.height;
CGFloat maxHeight = firstAssetVideoTrack.naturalSize.width;
for (AVAsset *asset in assets)
{
AVAssetTrack *assetVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject];
if (assetVideoTrack.naturalSize.width > maxWidth)
maxWidth = assetVideoTrack.naturalSize.width;
if (assetVideoTrack.naturalSize.height > maxHeight)
maxHeight = assetVideoTrack.naturalSize.height;
}
return CGSizeMake(maxWidth, maxHeight);
}
#end
Thank you for your attention. I am really tired, I've been trying to find the bug for four hours straight. I'll go to sleep now.
I've finally found the solution. The description of error lead me in the wrong direction: "Cannot Decode. The media data could not be decoded. It may be damaged.". From this description you may think that there is something wrong with your video files. I've spent 5 hours experimenting with formats, debugging and etc.
Well, THE ANSWER IS COMPLETELY DIFFERENT!
My mistake was that I forgot that CMTimeADD() returns value. I thought that it changes the value of its first argument, and in the code you can see this:
CMTime currentTime = kCMTimeZero;
for (int i = (int)assets.count - 1; i >= 0; i--)
{
CMTimeAdd(currentTime, asset.duration); //HERE!! I don't actually increment the value! currentTime is always kCMTimeZero
}
videoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, _composition.duration); // And that's where everything breaks!
The lesson that I've learned: When working with AVFoundation always check your time values! It's very important, otherwise you'll get a lot of bugs.
Error:-
domain: "AVFoundationErrorDomain" - code: 18446744073709539816
Solution:- [Swift 5.5]
Stop running mutiple av player in back ground thread.
I am attempting to rotate video prior to upload on my iOS device because other platforms (such as android) do not properly interpret the rotation information in iOS-recorded videos and, as a result, play them improperly rotated.
I have looked at the following stack posts but have not had success apply any of them to my case:
iOS rotate every frame of video
Rotating Video w/ AVMutableVideoCompositionLayerInstruction
AVMutableVideoComposition rotated video captured in portrait mode
iOS AVFoundation: Setting Orientation of Video
I coped the Apple AVSimpleEditor project sample, but unfortunately all that ever happens is, upon creating an AVAssetExportSession and calling exportAsynchronouslyWithCompletionHandler, no rotation is performed, and what's worse, rotation metadata is stripped out of the resulting file.
Here is the code that runs the export:
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:[_mutableComposition copy] presetName:AVAssetExportPresetPassthrough];
exportSession.outputURL = outputURL;
exportSession.outputFileType = AVFileType3GPP;
exportSession.shouldOptimizeForNetworkUse = YES;
exportSession.videoComposition = _mutableVideoComposition;
[exportSession exportAsynchronouslyWithCompletionHandler:^(void)
{
NSLog(#"Status is %d %#", exportSession.status, exportSession.error);
handler(exportSession);
[exportSession release];
}];
The values _mutableComposition and _mutableVideoComposition are initialized by this method here:
- (void) getVideoComposition:(AVAsset*)asset
{
AVMutableComposition *mutableComposition = nil;
AVMutableVideoComposition *mutableVideoComposition = nil;
AVMutableVideoCompositionInstruction *instruction = nil;
AVMutableVideoCompositionLayerInstruction *layerInstruction = nil;
CGAffineTransform t1;
CGAffineTransform t2;
AVAssetTrack *assetVideoTrack = nil;
AVAssetTrack *assetAudioTrack = nil;
// Check if the asset contains video and audio tracks
if ([[asset tracksWithMediaType:AVMediaTypeVideo] count] != 0) {
assetVideoTrack = [asset tracksWithMediaType:AVMediaTypeVideo][0];
}
if ([[asset tracksWithMediaType:AVMediaTypeAudio] count] != 0) {
assetAudioTrack = [asset tracksWithMediaType:AVMediaTypeAudio][0];
}
CMTime insertionPoint = kCMTimeZero;
NSError *error = nil;
// Step 1
// Create a composition with the given asset and insert audio and video tracks into it from the asset
// Check whether a composition has already been created, i.e, some other tool has already been applied
// Create a new composition
mutableComposition = [AVMutableComposition composition];
// Insert the video and audio tracks from AVAsset
if (assetVideoTrack != nil) {
AVMutableCompositionTrack *compositionVideoTrack = [mutableComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [asset duration]) ofTrack:assetVideoTrack atTime:insertionPoint error:&error];
}
if (assetAudioTrack != nil) {
AVMutableCompositionTrack *compositionAudioTrack = [mutableComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [asset duration]) ofTrack:assetAudioTrack atTime:insertionPoint error:&error];
}
// Step 2
// Translate the composition to compensate the movement caused by rotation (since rotation would cause it to move out of frame)
t1 = CGAffineTransformMakeTranslation(assetVideoTrack.naturalSize.height, 0.0);
// Rotate transformation
t2 = CGAffineTransformRotate(t1, degreesToRadians(90.0));
// Step 3
// Set the appropriate render sizes and rotational transforms
// Create a new video composition
mutableVideoComposition = [AVMutableVideoComposition videoComposition];
mutableVideoComposition.renderSize = CGSizeMake(assetVideoTrack.naturalSize.height,assetVideoTrack.naturalSize.width);
mutableVideoComposition.frameDuration = CMTimeMake(1, 30);
// The rotate transform is set on a layer instruction
instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mutableComposition duration]);
layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:(mutableComposition.tracks)[0]];
[layerInstruction setTransform:t2 atTime:kCMTimeZero];
// Step 4
// Add the transform instructions to the video composition
instruction.layerInstructions = #[layerInstruction];
mutableVideoComposition.instructions = #[instruction];
TT_RELEASE_SAFELY(_mutableComposition);
_mutableComposition = [mutableComposition retain];
TT_RELEASE_SAFELY(_mutableVideoComposition);
_mutableVideoComposition = [mutableVideoComposition retain];
}
I pulled this method from AVSERotateCommand from here. Can anyone suggest why this method would not successfully rotate my video by the necessary 90 degrees?
because you are using AVAssetExportPresetPassthrough the AVAssetExportSession will ignore the videoComposition, use any other preset.