There's a strange behaviour I've found when trying to merge videos with AVFoundation. I'm pretty sure that I've made a mistake somewhere but I'm too blind to see it. My goal is just to merge 4 videos (later there will be crossfade transition between them).
Everytime I'm trying to export video I get this error:
Error Domain=AVFoundationErrorDomain Code=-11821 "Cannot Decode" UserInfo=0x7fd94073cc30 {NSLocalizedDescription=Cannot Decode, NSLocalizedFailureReason=The media data could not be decoded. It may be damaged.}
The funniest thing is that if I don't provide AVAssetExportSession with AVMutableVideoComposition, then everything works fine! I can't understand what I'm doing wrong. The source videos are downloaded from youtube and have .mp4 extension. I can play them with MPMoviePlayerController. While checking the source code, please, look carefully at AVMutableVideoComposition.
I was testing this code in Xcode 6.0.1 on iOS simulator.
#import "VideoStitcher.h"
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
#import <AssetsLibrary/AssetsLibrary.h>
#implementation VideoStitcher
{
VideoStitcherCompletionBlock _completionBlock;
AVMutableComposition *_composition;
AVMutableVideoComposition *_videoComposition;
}
- (instancetype)init
{
self = [super init];
if (self)
{
_composition = [AVMutableComposition composition];
_videoComposition = [AVMutableVideoComposition videoComposition];
}
return self;
}
- (void)compileVideoWithAssets:(NSArray *)assets completion:(VideoStitcherCompletionBlock)completion
{
_completionBlock = [completion copy];
if (assets == nil || assets.count < 2)
{
// We need at least two video to make a stitch, right?
NSAssert(NO, #"VideoStitcher: assets parameter is nil or has not enough items in it");
}
else
{
[self composeAssets:assets];
if (_composition != nil) // if stitching went good and no errors were found
[self exportComposition];
}
}
- (void)composeAssets:(NSArray *)assets
{
AVMutableCompositionTrack *compositionVideoTrack = [_composition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
NSError *compositionError = nil;
CMTime currentTime = kCMTimeZero;
AVAsset *asset = nil;
for (int i = (int)assets.count - 1; i >= 0; i--) //For some reason videos are compiled in reverse order. Find the bug later. 06.10.14
{
asset = assets[i];
AVAssetTrack *assetVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject];
BOOL success = [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, assetVideoTrack.timeRange.duration)
ofTrack:assetVideoTrack
atTime:currentTime
error:&compositionError];
if (success)
{
CMTimeAdd(currentTime, asset.duration);
}
else
{
NSLog(#"VideoStitcher: something went wrong during inserting time range in composition");
if (compositionError != nil)
{
NSLog(#"%#", compositionError);
_completionBlock(nil, compositionError);
_composition = nil;
return;
}
}
}
AVMutableVideoCompositionInstruction *videoCompositionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
videoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, _composition.duration);
videoCompositionInstruction.backgroundColor = [[UIColor redColor] CGColor];
_videoComposition.instructions = #[videoCompositionInstruction];
_videoComposition.renderSize = [self calculateOptimalRenderSizeFromAssets:assets];
_videoComposition.frameDuration = CMTimeMake(1, 600);
}
- (void)exportComposition
{
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:#"testVideo.mov"];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
NSString *filePath = [url path];
NSFileManager *fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:filePath]) {
NSError *error;
if ([fileManager removeItemAtPath:filePath error:&error] == NO) {
NSLog(#"removeItemAtPath %# error:%#", filePath, error);
}
}
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:_composition
presetName:AVAssetExportPreset1280x720];
exporter.outputURL = url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.shouldOptimizeForNetworkUse = YES;
exporter.videoComposition = _videoComposition;
[exporter exportAsynchronouslyWithCompletionHandler:^{
[self exportDidFinish:exporter];
}];
}
- (void)exportDidFinish:(AVAssetExportSession*)session
{
NSLog(#"%li", session.status);
if (session.status == AVAssetExportSessionStatusCompleted)
{
NSURL *outputURL = session.outputURL;
// time to call delegate methods, but for testing purposes we save the video in 'photos' app
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputURL])
{
[library writeVideoAtPathToSavedPhotosAlbum:outputURL completionBlock:^(NSURL *assetURL, NSError *error){
if (error == nil)
{
NSLog(#"successfully saved video");
}
else
{
NSLog(#"saving video failed.\n%#", error);
}
}];
}
}
else if (session.status == AVAssetExportSessionStatusFailed)
{
NSLog(#"VideoStitcher: exporting failed.\n%#", session.error);
}
}
- (CGSize)calculateOptimalRenderSizeFromAssets:(NSArray *)assets
{
AVAsset *firstAsset = assets[0];
AVAssetTrack *firstAssetVideoTrack = [[firstAsset tracksWithMediaType:AVMediaTypeVideo] firstObject];
CGFloat maxWidth = firstAssetVideoTrack.naturalSize.height;
CGFloat maxHeight = firstAssetVideoTrack.naturalSize.width;
for (AVAsset *asset in assets)
{
AVAssetTrack *assetVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject];
if (assetVideoTrack.naturalSize.width > maxWidth)
maxWidth = assetVideoTrack.naturalSize.width;
if (assetVideoTrack.naturalSize.height > maxHeight)
maxHeight = assetVideoTrack.naturalSize.height;
}
return CGSizeMake(maxWidth, maxHeight);
}
#end
Thank you for your attention. I am really tired, I've been trying to find the bug for four hours straight. I'll go to sleep now.
I've finally found the solution. The description of error lead me in the wrong direction: "Cannot Decode. The media data could not be decoded. It may be damaged.". From this description you may think that there is something wrong with your video files. I've spent 5 hours experimenting with formats, debugging and etc.
Well, THE ANSWER IS COMPLETELY DIFFERENT!
My mistake was that I forgot that CMTimeADD() returns value. I thought that it changes the value of its first argument, and in the code you can see this:
CMTime currentTime = kCMTimeZero;
for (int i = (int)assets.count - 1; i >= 0; i--)
{
CMTimeAdd(currentTime, asset.duration); //HERE!! I don't actually increment the value! currentTime is always kCMTimeZero
}
videoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, _composition.duration); // And that's where everything breaks!
The lesson that I've learned: When working with AVFoundation always check your time values! It's very important, otherwise you'll get a lot of bugs.
Error:-
domain: "AVFoundationErrorDomain" - code: 18446744073709539816
Solution:- [Swift 5.5]
Stop running mutiple av player in back ground thread.
Related
It's a well documented issue on SO, where AVAssets get rotated after writing them to file, either using AVAssetWriter or AVComposition. And there are solutions, such as looking at the video track transform and seeing how the asset is rotated so that it can be rotated to the desired orientation for your particular use case.
What I want to know however is why this happens and if it's possible to prevent it from happening. I run into issues not only with writing custom video files but also transforming videos into gifs using CGImageDestination where the output gif looks great except that it's rotated.
To give a quick reference point for my code that writes an asset to file:
let destinationURL = url ?? NSURL(fileURLWithPath: "\(NSTemporaryDirectory())\(String.random()).mp4")
if let writer = try? AVAssetWriter(URL: destinationURL, fileType: AVFileTypeMPEG4),
videoTrack = self.asset.tracksWithMediaType(AVMediaTypeVideo).last,
firstBuffer = buffers.first {
let videoCompressionProps = [AVVideoAverageBitRateKey: videoTrack.estimatedDataRate]
let outputSettings: [String: AnyObject] = [
AVVideoCodecKey: AVVideoCodecH264,
AVVideoWidthKey: width,
AVVideoHeightKey: height,
AVVideoCompressionPropertiesKey: videoCompressionProps
]
let writerInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: outputSettings, sourceFormatHint: (videoTrack.formatDescriptions.last as! CMFormatDescription))
writerInput.expectsMediaDataInRealTime = false
let rotateTransform = CGAffineTransformMakeRotation(Utils.degreesToRadians(-90))
writerInput.transform = CGAffineTransformScale(rotateTransform, -1, 1)
let pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: writerInput, sourcePixelBufferAttributes: nil)
writer.addInput(writerInput)
writer.startWriting()
writer.startSessionAtSourceTime(CMSampleBufferGetPresentationTimeStamp(firstBuffer))
for (sample, newTimestamp) in Array(Zip2Sequence(buffers, timestamps)) {
if let imageBuffer = CMSampleBufferGetImageBuffer(sample) {
while !writerInput.readyForMoreMediaData {
NSThread.sleepForTimeInterval(0.1)
}
pixelBufferAdaptor.appendPixelBuffer(imageBuffer, withPresentationTime: newTimestamp)
}
}
writer.finishWritingWithCompletionHandler {
// completion code
}
As you can see above, a simple transform rotates the outputted video back to portrait. However, if I have a landscape video, that transform no longer works. And as I mentioned before, transforming the video to a gif performs the exactly same 90 degrees rotation on my asset.
My feelings can be summed up in these two gifs:
http://giphy.com/gifs/jon-stewart-why-lYKvaJ8EQTzCU
http://giphy.com/gifs/the-office-no-steve-carell-12XMGIWtrHBl5e
i have also find same Problem then i changed rotated my video to 90'its works fine
Here is solution
//in videoorientation.h
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
#interface videoorientationViewController : UIViewController
#property AVMutableComposition *mutableComposition;
#property AVMutableVideoComposition *mutableVideoComposition;
#property AVMutableAudioMix *mutableAudioMix;
#property AVAssetExportSession *exportSession;
- (void)performWithAsset : (NSURL *)moviename;
#end
In //viewcontroller.m
- (void)performWithAsset : (NSURL *)moviename
{
self.mutableComposition=nil;
self.mutableVideoComposition=nil;
self.mutableAudioMix=nil;
// NSString* filename = [NSString stringWithFormat:#"temp1.mov"];
//
// NSLog(#"file name== %#",filename);
//
// [[NSUserDefaults standardUserDefaults]setObject:filename forKey:#"currentName"];
// NSString* path = [NSTemporaryDirectory() stringByAppendingPathComponent:filename];
// NSLog(#"file number %i",_currentFile);
// NSURL* url = [NSURL fileURLWithPath:path];
// NSString *videoURL = [[NSBundle mainBundle] pathForResource:#"Movie" ofType:#"m4v"];
AVAsset *asset = [[AVURLAsset alloc] initWithURL:moviename options:nil];
AVMutableVideoCompositionInstruction *instruction = nil;
AVMutableVideoCompositionLayerInstruction *layerInstruction = nil;
CGAffineTransform t1;
CGAffineTransform t2;
AVAssetTrack *assetVideoTrack = nil;
AVAssetTrack *assetAudioTrack = nil;
// Check if the asset contains video and audio tracks
if ([[asset tracksWithMediaType:AVMediaTypeVideo] count] != 0) {
assetVideoTrack = [asset tracksWithMediaType:AVMediaTypeVideo][0];
}
if ([[asset tracksWithMediaType:AVMediaTypeAudio] count] != 0) {
assetAudioTrack = [asset tracksWithMediaType:AVMediaTypeAudio][0];
}
CMTime insertionPoint = kCMTimeZero;
NSError *error = nil;
// Step 1
// Create a composition with the given asset and insert audio and video tracks into it from the asset
if (!self.mutableComposition) {
// Check whether a composition has already been created, i.e, some other tool has already been applied
// Create a new composition
self.mutableComposition = [AVMutableComposition composition];
// Insert the video and audio tracks from AVAsset
if (assetVideoTrack != nil) {
AVMutableCompositionTrack *compositionVideoTrack = [self.mutableComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [asset duration]) ofTrack:assetVideoTrack atTime:insertionPoint error:&error];
}
if (assetAudioTrack != nil) {
AVMutableCompositionTrack *compositionAudioTrack = [self.mutableComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [asset duration]) ofTrack:assetAudioTrack atTime:insertionPoint error:&error];
}
}
// Step 2
// Translate the composition to compensate the movement caused by rotation (since rotation would cause it to move out of frame)
t1 = CGAffineTransformMakeTranslation(assetVideoTrack.naturalSize.height, 0.0);
float width=assetVideoTrack.naturalSize.width;
float height=assetVideoTrack.naturalSize.height;
float toDiagonal=sqrt(width*width+height*height);
float toDiagonalAngle = radiansToDegrees(acosf(width/toDiagonal));
float toDiagonalAngle2=90-radiansToDegrees(acosf(width/toDiagonal));
float toDiagonalAngleComple;
float toDiagonalAngleComple2;
float finalHeight = 0.0;
float finalWidth = 0.0;
float degrees=90;
if(degrees>=0&°rees<=90){
toDiagonalAngleComple=toDiagonalAngle+degrees;
toDiagonalAngleComple2=toDiagonalAngle2+degrees;
finalHeight=ABS(toDiagonal*sinf(degreesToRadians(toDiagonalAngleComple)));
finalWidth=ABS(toDiagonal*sinf(degreesToRadians(toDiagonalAngleComple2)));
t1 = CGAffineTransformMakeTranslation(height*sinf(degreesToRadians(degrees)), 0.0);
}
else if(degrees>90&°rees<=180){
float degrees2 = degrees-90;
toDiagonalAngleComple=toDiagonalAngle+degrees2;
toDiagonalAngleComple2=toDiagonalAngle2+degrees2;
finalHeight=ABS(toDiagonal*sinf(degreesToRadians(toDiagonalAngleComple2)));
finalWidth=ABS(toDiagonal*sinf(degreesToRadians(toDiagonalAngleComple)));
t1 = CGAffineTransformMakeTranslation(width*sinf(degreesToRadians(degrees2))+height*cosf(degreesToRadians(degrees2)), height*sinf(degreesToRadians(degrees2)));
}
else if(degrees>=-90&°rees<0){
float degrees2 = degrees-90;
float degreesabs = ABS(degrees);
toDiagonalAngleComple=toDiagonalAngle+degrees2;
toDiagonalAngleComple2=toDiagonalAngle2+degrees2;
finalHeight=ABS(toDiagonal*sinf(degreesToRadians(toDiagonalAngleComple2)));
finalWidth=ABS(toDiagonal*sinf(degreesToRadians(toDiagonalAngleComple)));
t1 = CGAffineTransformMakeTranslation(0, width*sinf(degreesToRadians(degreesabs)));
}
else if(degrees>=-180&°rees<-90){
float degreesabs = ABS(degrees);
float degreesplus = degreesabs-90;
toDiagonalAngleComple=toDiagonalAngle+degrees;
toDiagonalAngleComple2=toDiagonalAngle2+degrees;
finalHeight=ABS(toDiagonal*sinf(degreesToRadians(toDiagonalAngleComple)));
finalWidth=ABS(toDiagonal*sinf(degreesToRadians(toDiagonalAngleComple2)));
t1 = CGAffineTransformMakeTranslation(width*sinf(degreesToRadians(degreesplus)), height*sinf(degreesToRadians(degreesplus))+width*cosf(degreesToRadians(degreesplus)));
}
// Rotate transformation
t2 = CGAffineTransformRotate(t1, degreesToRadians(degrees));
//t2 = CGAffineTransformRotate(t1, -90);
// Step 3
// Set the appropriate render sizes and rotational transforms
if (!self.mutableVideoComposition) {
// Create a new video composition
self.mutableVideoComposition = [AVMutableVideoComposition videoComposition];
// self.mutableVideoComposition.renderSize = CGSizeMake(assetVideoTrack.naturalSize.height,assetVideoTrack.naturalSize.width);
self.mutableVideoComposition.renderSize = CGSizeMake(finalWidth,finalHeight);
self.mutableVideoComposition.frameDuration = CMTimeMake(1,30);
// The rotate transform is set on a layer instruction
instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [self.mutableComposition duration]);
layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:(self.mutableComposition.tracks)[0]];
[layerInstruction setTransform:t2 atTime:kCMTimeZero];
} else {
self.mutableVideoComposition.renderSize = CGSizeMake(self.mutableVideoComposition.renderSize.height, self.mutableVideoComposition.renderSize.width);
// Extract the existing layer instruction on the mutableVideoComposition
instruction = (self.mutableVideoComposition.instructions)[0];
layerInstruction = (instruction.layerInstructions)[0];
// Check if a transform already exists on this layer instruction, this is done to add the current transform on top of previous edits
CGAffineTransform existingTransform;
if (![layerInstruction getTransformRampForTime:[self.mutableComposition duration] startTransform:&existingTransform endTransform:NULL timeRange:NULL]) {
[layerInstruction setTransform:t2 atTime:kCMTimeZero];
} else {
// Note: the point of origin for rotation is the upper left corner of the composition, t3 is to compensate for origin
CGAffineTransform t3 = CGAffineTransformMakeTranslation(-1*assetVideoTrack.naturalSize.height/2, 0.0);
CGAffineTransform newTransform = CGAffineTransformConcat(existingTransform, CGAffineTransformConcat(t2, t3));
[layerInstruction setTransform:newTransform atTime:kCMTimeZero];
}
}
// Step 4
// Add the transform instructions to the video composition
instruction.layerInstructions = #[layerInstruction];
self.mutableVideoComposition.instructions = #[instruction];
// Step 5
// Notify AVSEViewController about rotation operation completion
// [[NSNotificationCenter defaultCenter] postNotificationName:AVSEEditCommandCompletionNotification object:self];
[self performWithAssetExport];
}
- (void)performWithAssetExport
{
// Step 1
// Create an outputURL to which the exported movie will be saved
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *outputURL = paths[0];
NSFileManager *manager = [NSFileManager defaultManager];
[manager createDirectoryAtPath:outputURL withIntermediateDirectories:YES attributes:nil error:nil];
outputURL = [outputURL stringByAppendingPathComponent:#"output.mov"];
// Remove Existing File
[manager removeItemAtPath:outputURL error:nil];
// Step 2
// Create an export session with the composition and write the exported movie to the photo library
self.exportSession = [[AVAssetExportSession alloc] initWithAsset:[self.mutableComposition copy] presetName:AVAssetExportPreset1280x720];
self.exportSession.videoComposition = self.mutableVideoComposition;
self.exportSession.audioMix = self.mutableAudioMix;
self.exportSession.outputURL = [NSURL fileURLWithPath:outputURL];
self.exportSession.outputFileType=AVFileTypeQuickTimeMovie;
[self.exportSession exportAsynchronouslyWithCompletionHandler:^(void){
switch (self.exportSession.status) {
case AVAssetExportSessionStatusCompleted:
//[self playfunction];
[[NSNotificationCenter defaultCenter]postNotificationName:#"Backhome" object:nil];
// Step 3
// Notify AVSEViewController about export completion
break;
case AVAssetExportSessionStatusFailed:
NSLog(#"Failed:%#",self.exportSession.error);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Canceled:%#",self.exportSession.error);
break;
default:
break;
}
}];
}
I have a set of video clips that I would like to merge together and then put a watermark on it.
I am able to do both functions individually, however problems arise when performing the them together.
All clips that will be merged are either 1920x1080 or 960x540.
For some reason, AVAssetExportSession does not display them well together.
Here are the 2 bugs based on 3 different scenarios:
This image is a result of:
Merging Clips together
As you can see, there is nothing wrong here, the output video produces the desired effect.
However, when I then try to add a watermark, it creates the following issue:
This image is a result of:
Merging Clips together
Putting a watermark on it
BUG 1: Some clips in the video get resized for whatever reason while other clips do not.
This image is a result of:
Merging Clips together
Resizing clips that are 960x540 to 1920x1080
Putting a watermark on it
Bug 2 Now the clips that need to be resized get resized, however the old unresized clip is still there.
Merging/Resizing Code:
-(void) mergeClips{
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
AVMutableCompositionTrack *mutableVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *mutableAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
// loop through the list of videos and add them to the track
CMTime currentTime = kCMTimeZero;
NSMutableArray* instructionArray = [[NSMutableArray alloc] init];
if (_clipsArray){
for (int i = 0; i < (int)[_clipsArray count]; i++){
NSURL* url = [_clipsArray objectAtIndex:i];
AVAsset *asset = [AVAsset assetWithURL:url];
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVAssetTrack *audioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
CGSize size = videoTrack.naturalSize;
CGFloat widthScale = 1920.0f/size.width;
CGFloat heightScale = 1080.0f/size.height;
// lines that performs resizing
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:mutableVideoTrack];
CGAffineTransform scale = CGAffineTransformMakeScale(widthScale,heightScale);
CGAffineTransform move = CGAffineTransformMakeTranslation(0,0);
[layerInstruction setTransform:CGAffineTransformConcat(scale, move) atTime:currentTime];
[instructionArray addObject:layerInstruction];
[mutableVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration)
ofTrack:videoTrack
atTime:currentTime error:nil];
[mutableAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration)
ofTrack:audioTrack
atTime:currentTime error:nil];
currentTime = CMTimeMakeWithSeconds(CMTimeGetSeconds(asset.duration) + CMTimeGetSeconds(currentTime), asset.duration.timescale);
}
}
AVMutableVideoCompositionInstruction * mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, currentTime);
mainInstruction.layerInstructions = instructionArray;
// 4 - Get path
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *lastPostedDayPath = [documentsDirectory stringByAppendingPathComponent:#"lastPostedDay"];
//Check if folder exists, if not create folder
if (![[NSFileManager defaultManager] fileExistsAtPath:lastPostedDayPath]){
[[NSFileManager defaultManager] createDirectoryAtPath:lastPostedDayPath withIntermediateDirectories:NO attributes:nil error:nil];
}
NSString *fileName = [NSString stringWithFormat:#"%li_%li_%li.mov", (long)_month, (long)_day, (long)_year];
NSString *finalDayPath = [lastPostedDayPath stringByAppendingPathComponent:fileName];
NSURL *url = [NSURL fileURLWithPath:finalDayPath];
BOOL fileExists = [[NSFileManager defaultManager] fileExistsAtPath:finalDayPath];
if (fileExists){
NSLog(#"file exists");
[[NSFileManager defaultManager] removeItemAtURL:url error:nil];
}
AVMutableVideoComposition *mainComposition = [AVMutableVideoComposition videoComposition];
mainComposition.instructions = [NSArray arrayWithObject:mainInstruction];
mainComposition.frameDuration = CMTimeMake(1, 30);
mainComposition.renderSize = CGSizeMake(1920.0f, 1080.0f);
// 5 - Create exporter
_exportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetHighestQuality];
_exportSession.outputURL=url;
_exportSession.outputFileType = AVFileTypeQuickTimeMovie;
_exportSession.shouldOptimizeForNetworkUse = YES;
_exportSession.videoComposition = mainComposition;
[_exportSession exportAsynchronouslyWithCompletionHandler:^{
[merge_timer invalidate];
merge_timer = nil;
switch (_exportSession.status) {
case AVAssetExportSessionStatusFailed:
NSLog(#"Export failed -> Reason: %#, User Info: %#",
_exportSession.error.localizedDescription,
_exportSession.error.userInfo.description);
[self showSavingFailedDialog];
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export cancelled");
[self showSavingFailedDialog];
break;
case AVAssetExportSessionStatusCompleted:
NSLog(#"Export finished");
[self addWatermarkToExportSession:_exportSession];
break;
default:
break;
}
}];
});
}
Once it finishes this, I run it through a different Export Session that just simply adds a watermark.
Is there something I am doing wrong in my code or process?
Is there an easier way for achieving this?
Thank you for your time!
I was able to solve my issue.
For some reason, AVAssetExportSession will not actually create a 'flat' video file of the merged clips, so it still recognized the lower resolution clips and their locations when adding the watermark which caused them to resize.
What I did to solve this was, first use AVAssetWriter to merge my clips and create one 'flat' file. I then could add a watermark without having a resizing issue.
Hope this helps anyone who may come across this problem in the future!
I also encountered the same problem,
you can set opacity after one video end like this:
[layerInstruction setOpacity:0.0 atTime:duration];
I have an app which combines video files together to make a long video. There could be a delay between videos (e.g. V1 starts at t=0s and runs for 5 seconds, V1 starts at t=10s). In this case, I want the video to freeze the last frame of V1 until V2 starts.
I'm using the code below, but between videos, the whole video goes white.
Any ideas how I can get the effect I'm looking for?
Thanks!
#interface VideoJoins : NSObject
-(instancetype)initWithURL:(NSURL*)url
andDelay:(NSTimeInterval)delay;
#property (nonatomic, strong) NSURL* url;
#property (nonatomic) NSTimeInterval delay;
#end
and
+(void)joinVideosSequentially:(NSArray*)videoJoins
withFileType:(NSString*)fileType
toOutput:(NSURL*)outputVideoURL
onCompletion:(dispatch_block_t) onCompletion
onError:(ErrorBlock) onError
onCancel:(dispatch_block_t) onCancel
{
//From original question on http://stackoverflow.com/questions/6575128/how-to-combine-video-clips-with-different-orientation-using-avfoundation
// Didn't add support for portrait+landscape.
AVMutableComposition *composition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
CMTime startTime = kCMTimeZero;
/*videoClipPaths is a array of paths of the video clips recorded*/
//for loop to combine clips into a single video
for (NSInteger i=0; i < [videoJoins count]; i++)
{
VideoJoins* vj = videoJoins[i];
NSURL *url = vj.url;
NSTimeInterval nextDelayTI = 0;
if(i+1 < [videoJoins count])
{
VideoJoins* vjNext = videoJoins[i+1];
nextDelayTI = vjNext.delay;
}
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:url options:nil];
CMTime assetDuration = [asset duration];
CMTime assetDurationWithNextDelay = assetDuration;
if(nextDelayTI != 0)
{
CMTime nextDelay = CMTimeMakeWithSeconds(nextDelayTI, 1000000);
assetDurationWithNextDelay = CMTimeAdd(assetDuration, nextDelay);
}
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVAssetTrack *audioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
//set the orientation
if(i == 0)
{
[compositionVideoTrack setPreferredTransform:videoTrack.preferredTransform];
}
BOOL ok = [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, assetDurationWithNextDelay) ofTrack:videoTrack atTime:startTime error:nil];
ok = [compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, assetDuration) ofTrack:audioTrack atTime:startTime error:nil];
startTime = CMTimeAdd(startTime, assetDurationWithNextDelay);
}
//Delete output video if it exists
NSString* outputVideoString = [outputVideoURL absoluteString];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputVideoString])
{
[[NSFileManager defaultManager] removeItemAtPath:outputVideoString error:nil];
}
//export the combined video
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:composition
presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL = outputVideoURL;
exporter.outputFileType = fileType;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^(void)
{
switch (exporter.status)
{
case AVAssetExportSessionStatusCompleted: {
onCompletion();
break;
}
case AVAssetExportSessionStatusFailed:
{
NSLog(#"Export Failed");
NSError* err = exporter.error;
NSLog(#"ExportSessionError: %#", [err localizedDescription]);
onError(err);
break;
}
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export Cancelled");
NSLog(#"ExportSessionError: %#", [exporter.error localizedDescription]);
onCancel();
break;
}
}];
}
EDIT: Got it working. Here is how I extract the images and generate the videos from those images:
+ (void)writeImageAsMovie:(UIImage*)image
toPath:(NSURL*)url
fileType:(NSString*)fileType
duration:(NSTimeInterval)duration
completion:(VoidBlock)completion
{
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:url
fileType:fileType
error:&error];
NSParameterAssert(videoWriter);
CGSize size = image.size;
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* writerInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
[videoWriter addInput:writerInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
//Write samples:
CMTime halfTime = CMTimeMakeWithSeconds(duration/2, 100000);
CMTime endTime = CMTimeMakeWithSeconds(duration, 100000);
CVPixelBufferRef buffer = [VideoCreator pixelBufferFromCGImage:image.CGImage];
[adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];
[adaptor appendPixelBuffer:buffer withPresentationTime:halfTime];
[adaptor appendPixelBuffer:buffer withPresentationTime:endTime];
//Finish the session:
[writerInput markAsFinished];
[videoWriter endSessionAtSourceTime:endTime];
[videoWriter finishWritingWithCompletionHandler:^{
if(videoWriter.error)
{
NSLog(#"Error:%#", [error localizedDescription]);
}
if(completion)
{
completion();
}
}];
}
+(void)generateVideoImageFromURL:(NSURL*)url
atTime:(CMTime)thumbTime
withMaxSize:(CGSize)maxSize
completion:(ImageBlock)handler
{
AVURLAsset *asset=[[AVURLAsset alloc] initWithURL:url options:nil];
if(!asset)
{
if(handler)
{
handler(nil);
return;
}
}
if(CMTIME_IS_POSITIVE_INFINITY(thumbTime))
{
thumbTime = asset.duration;
}
else if(CMTIME_IS_NEGATIVE_INFINITY(thumbTime) || CMTIME_IS_INVALID(thumbTime) || CMTIME_IS_INDEFINITE(thumbTime))
{
thumbTime = CMTimeMake(0, 30);
}
AVAssetImageGenerator *generator = [[AVAssetImageGenerator alloc] initWithAsset:asset];
generator.appliesPreferredTrackTransform=TRUE;
generator.maximumSize = maxSize;
CMTime actualTime;
NSError* error;
CGImageRef image = [generator copyCGImageAtTime:thumbTime actualTime:&actualTime error:&error];
UIImage *thumb = [[UIImage alloc] initWithCGImage:image];
CGImageRelease(image);
if(handler)
{
handler(thumb);
}
}
AVMutableComposition can only stitch videos together. I did it by doing two things:
Extracting last frame of the first video as image.
Making a video using this image(duration depends on your requirement).
Then you can compose these three videos (V1,V2 and your single image video). Both tasks are very easy to do.
For extracting the image out of the video, look at this link. If you don't want to use MPMoviePlayerController,which is used by accepted answer, then look at other answer by Steve.
For making video using the image check out this link. Question is about the issue of audio but I don't think you need audio. So just look at the method mentioned in question itself.
UPDATE:
There is an easier way but it comes with a disadvantage. You can have two AVPlayer. First one plays your video which has white frames in between. Other one sits behind paused at last frame of video 1. So when the middle part comes, you will see the second AVPlayer loaded with last frame. So as a whole it will look like video 1 is paused. And trust me naked eye can't make out when player got changed. But the obvious disadvantage is that your exported video will be same with blank frames. So if you are just going to play it back in your app only, you can go with this approach.
The first frame of video asset is always black or white
CMTime delta = CMTimeMake(1, 25); //1 frame (if fps = 25)
CMTimeRange timeRangeInVideoAsset = CMTimeRangeMake(delta,clipVideoTrack.timeRange.duration);
nextVideoClipStartTime = CMTimeAdd(nextVideoClipStartTime, timeRangeInVideoAsset.duration);
Merged more then 400 shirt videos in one.
I'm trying to use AVMutableComposition to play a sequence of sound files at precise times.
When the view loads, I create the composition with the intent of playing 4 sounds evenly spaced over 1 second. It shouldn't matter how long or short the sounds are, I just want to fire them at exactly 0, 0.25, 0.5 and 0.75 seconds:
AVMutableComposition *composition = [[AVMutableComposition alloc] init];
NSDictionary *options = #{AVURLAssetPreferPreciseDurationAndTimingKey : #YES};
for (NSInteger i = 0; i < 4; i++)
{
AVMutableCompositionTrack* track = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
NSURL *url = [[NSBundle mainBundle] URLForResource:[NSString stringWithFormat:#"sound_file_%i", i] withExtension:#"caf"];
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:url options:options];
AVAssetTrack *assetTrack = [asset tracksWithMediaType:AVMediaTypeAudio].firstObject;
CMTimeRange timeRange = [assetTrack timeRange];
Float64 t = i * 0.25;
NSError *error;
BOOL success = [track insertTimeRange:timeRange ofTrack:assetTrack atTime:CMTimeMakeWithSeconds(t, 1) error:&error];
if (!success)
{
NSLog(#"unsuccesful creation of composition");
}
if (error)
{
NSLog(#"composition creation error: %#", error);
}
}
AVPlayerItem* playerItem = [AVPlayerItem playerItemWithAsset:composition];
self.avPlayer = [[AVPlayer alloc] initWithPlayerItem:playerItem];
The composition is created successfully with no errors. Later, when I want to play the sequence I do this:
[self.avPlayer seekToTime:CMTimeMakeWithSeconds(0, 1)];
[self.avPlayer play];
For some reason, the sounds are not evenly spaced at all - but play almost all at once. I tried the same thing spaced over 4 seconds, replacing the time calculation like this:
Float64 t = i * 1.0;
And this plays perfectly. Any time interval under 1 second seems to generate unexpected results. What am I missing? Are AVCompositions not supposed to be used for time intervals under 1 second? Or perhaps I'm misunderstanding the time intervals?
Your CMTimeMakeWithSeconds(t, 1) is in whole second 'slices' because your timescale is set to 1. No matter what fraction t is, the atTime: will always end up as 0. This is why it works when you increase it to 1 second (t=i*1).
You need to set the timescale to 4 to get your desired 0.25 second slices. Since the CMTime is now in .25 second slices, you won't need the i * 0.25 calculcation. Just use the i directly; atTime:CMTimeMake(i, 4)
If you might need to get more precise in the future, you should account for it now so you won't have to adjust your code later. Apple recommends using a timescale of 600 as it is a multiple of the common video framerates (24, 25, and 30 FPS) but it works fine for audio-only too. So for your situation, you would use 24 slices to get your .25 second value; Float64 t = i * 24; atTime:CMTimeMake(t, 600)
As for your issue of all 4 sounds playing almost all at once, be aware of this unanswered SO question where it only happens on the first play. Even with the changes above, you might still run into this issue.
Unless each track is exactly 0.25 seconds long this is your problem:
Float64 t = i * 0.25;
NSError *error;
BOOL success = [track insertTimeRange:timeRange ofTrack:assetTrack atTime:CMTimeMakeWithSeconds(t, 1) error:&error];
You need to be keeping track of the cumulative time range added so far, and inserting the next track at that time:
CMTime currentTime = kCMTimeZero;
for (NSInteger i = 0; i < 4; i++) {
/* Code to create track for insertion */
CMTimeRange trackTimeRange = [assetTrack timeRange];
BOOL success = [track insertTimeRange:trackTimeRange
ofTrack:assetTrack
atTime:currentTime
error:&error];
/* Error checking code */
//Update time range for insertion
currentTime = CMTimeAdd(currentTime,trackTimeRange.duration);
}
i changed a bit your code, sorry i had no time to test it.
AVMutableComposition *composition = [AVMutableComposition composition];
NSDictionary *options = #{AVURLAssetPreferPreciseDurationAndTimingKey : #YES};
CMTime totalDuration = kCMTimeZero;
for (NSInteger i = 0; i < 4; i++)
{
AVMutableCompositionTrack* track = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
NSURL *url = [NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:[NSString stringWithFormat:#"Record_%i", i] ofType:#"caf"]];
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:url options:options];
AVAssetTrack *assetTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
CMTimeRange timeRange = [assetTrack timeRange];
NSError *error;
BOOL success = [track insertTimeRange:timeRange ofTrack:assetTrack atTime:CMTIME_COMPARE_INLINE(totalDuration, >, kCMTimeZero)? CMTimeAdd(totalDuration, CMTimeMake(1, 4)): totalDuration error:&error];
if (!success)
{
NSLog(#"unsuccesful creation of composition");
}
if (error)
{
NSLog(#"composition creation error: %#", error);
}
totalDuration = CMTimeAdd(CMTimeAdd(totalDuration,CMTimeMake(1, 4)), asset.duration);
}
AVPlayerItem* playerItem = [AVPlayerItem playerItemWithAsset:composition];
self.avPlayer = [[AVPlayer alloc] initWithPlayerItem:playerItem];
P.S. use kCMTimeZero instead of CMTimeMakeWithSeconds(0, 1).
I have a requirement where user will be allowed to trim a audio file before submitting to server. The trimming function works fine in iOS 6 and not in iOS 7.
This happens in iOS 7 when user chooses a song from iTunes library and start trimming. It appears as trimmed. The new file which is created after trimming plays upto trimmed and rest will be blank. Also the duration shows the original song duration. This doesn't happen for all files. It happens only for some files. Also I did check the exportable and hasProtectedContent . Both have correct values (exportable - yes, hasProtectedContent - no). Can I know what could be issue in iOS 7.
I am pasting the trimming audio file code for reference
- (void)trimAndExportAudio:(AVAsset *)avAsset withDuration:(NSInteger)durationInSeconds withStartTime:(NSInteger)startTime endTime:(NSInteger)endTime toFileName:(NSString *)filename withTrimCompleteBlock:(TrimCompleteBlock)trimCompleteBlock
{
if (startTime < 0 || startTime > durationInSeconds || startTime >= endTime)
{
CGLog(#"start time = %d endTime %d durationInSeconds %d", startTime, endTime, durationInSeconds);
trimCompleteBlock(NO, #"Invalid Start Time");
return;
}
if (endTime > durationInSeconds)
{
CGLog(#"start time = %d endTime %d durationInSeconds %d", startTime, endTime, durationInSeconds);
trimCompleteBlock(NO, #"Invalid End Time");
return;
}
// create the export session
AVAssetExportSession *exportSession = [AVAssetExportSession exportSessionWithAsset:avAsset presetName:AVAssetExportPresetAppleM4A];
if (exportSession == nil)
{
trimCompleteBlock(NO, #"Could not create an Export Session.");
return;
}
//export file path
NSError *removeError = nil;
NSString *filePath = [[CGUtilities applicationLibraryMyRecordingsDirectory] stringByAppendingPathComponent:filename];
if ([[NSFileManager defaultManager] fileExistsAtPath:filePath])
{
[[NSFileManager defaultManager] removeItemAtPath:filePath error:&removeError];
}
if (removeError)
{
CGLog(#"Error removing existing file = %#", removeError);
}
// create trim time range
CMTime exportStartTime = CMTimeMake(startTime, 1);
CMTime exportStopTime = CMTimeMake(endTime, 1);
CMTimeRange exportTimeRange = CMTimeRangeFromTimeToTime(exportStartTime, exportStopTime);
// configure export session output with all our parameters
exportSession.outputURL = [NSURL fileURLWithPath:filePath]; // output path
exportSession.outputFileType = AVFileTypeAppleM4A; // output file type
exportSession.timeRange = exportTimeRange; // trim time range
//perform the export
__weak AVAssetExportSession *weakExportSession = exportSession;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
if (AVAssetExportSessionStatusCompleted == exportSession.status)
{
if (![filename isEqualToString:kLibraryTempFileName])
{
//created a new recording
}
trimCompleteBlock(YES, nil);
}
else if (AVAssetExportSessionStatusFailed == exportSession.status)
{
// a failure may happen because of an event out of your control
// for example, an interruption like a phone call comming in
// make sure and handle this case appropriately
trimCompleteBlock(NO, weakExportSession.error.description);
}
else
{
trimCompleteBlock(NO, weakExportSession.error.description);
}
}];
}
Thanks
We can import AVFoundation/AVFoundation.h
-(BOOL)trimAudiofile{
float audioStartTime;//define start time of audio
float audioEndTime;//define end time of audio
NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init];
[dateFormatter setDateFormat:#"yyyy-MM-dd_HH-mm-ss"];
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSLibraryDirectory, NSUserDomainMask, YES);
NSString *libraryCachesDirectory = [paths objectAtIndex:0];
libraryCachesDirectory = [libraryCachesDirectory stringByAppendingPathComponent:#"Caches"];
NSString *OutputFilePath = [libraryCachesDirectory stringByAppendingFormat:#"/output_%#.mp4", [dateFormatter stringFromDate:[NSDate date]]];
NSURL *audioFileOutput = [NSURL fileURLWithPath:OutputFilePath];
NSURL *audioFileInput;//<Path of orignal audio file>
if (!audioFileInput || !audioFileOutput)
{
return NO;
}
[[NSFileManager defaultManager] removeItemAtURL:audioFileOutput error:NULL];
AVAsset *asset = [AVAsset assetWithURL:audioFileInput];
AVAssetExportSession *exportSession = [AVAssetExportSession exportSessionWithAsset:asset
presetName:AVAssetExportPresetAppleM4A];
if (exportSession == nil)
{
return NO;
}
CMTime startTime = CMTimeMake((int)(floor(audioStartTime * 100)), 100);
CMTime stopTime = CMTimeMake((int)(ceil(audioEndTime * 100)), 100);
CMTimeRange exportTimeRange = CMTimeRangeFromTimeToTime(startTime, stopTime);
exportSession.outputURL = audioFileOutput;
exportSession.timeRange = exportTimeRange;
exportSession.outputFileType = AVFileTypeAppleM4A;
[exportSession exportAsynchronouslyWithCompletionHandler:^
{
if (AVAssetExportSessionStatusCompleted == exportSession.status)
{
NSLog(#"Export OK");
}
else if (AVAssetExportSessionStatusFailed == exportSession.status)
{
NSLog(#"Export failed: %#", [[exportSession error] localizedDescription]);
}
}];
return YES;
}