I m using bellow method to add watermark image to video, but the problem is when I record the video and add watermark to it, video rotates by 90 degree.
+(void)createWatermarkForVideo:(NSURL*)videoURL watermark:(UIImage*)watermarkImage stickerContainerView:(UIView*)containerView completionAction:(VideoMergeCompletionBlock)completion{
AVURLAsset *audioAsset, *videoAsset;
//Create AVMutableComposition Object which will hold our multiple AVMutableCompositionTrack or we can say it will hold our video and audio files.
AVMutableComposition* mixComposition = [AVMutableComposition composition];
//Now first load your audio file using AVURLAsset. Make sure you give the correct path of your videos.
audioAsset = [[AVURLAsset alloc]initWithURL:videoURL options:nil];
//Now we are creating the first AVMutableCompositionTrack containing our audio and add it to our AVMutableComposition object.
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
//Now we will load video file.
videoAsset = [[AVURLAsset alloc]initWithURL:videoURL options:nil];
//Now we are creating the second AVMutableCompositionTrack containing our video and add it to our AVMutableComposition object.
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *aVideoAssetTrack = [videoAsset tracksWithMediaType:AVMediaTypeVideo][0];
AVAssetTrack *aAudioAssetTrack = [audioAsset tracksWithMediaType:AVMediaTypeAudio][0];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,aVideoAssetTrack.timeRange.duration);
[a_compositionVideoTrack setPreferredTransform:aVideoAssetTrack.preferredTransform];
#try{
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:aVideoAssetTrack atTime:kCMTimeZero error:nil];
if(CMTimeGetSeconds(audioAsset.duration) == CMTimeGetSeconds(videoAsset.duration)){
#try{
[b_compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,aVideoAssetTrack.timeRange.duration) ofTrack:aAudioAssetTrack atTime:kCMTimeZero error:nil];
}
#catch(NSError *error){
}
}
else{
#try{
[b_compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,aVideoAssetTrack.timeRange.duration) ofTrack:aAudioAssetTrack atTime:kCMTimeZero error:nil];
}
#catch(NSError *error){
}
}
}
#catch(NSError *error){
}
// create the layer with the watermark image
CALayer* aLayer = [CALayer layer];
aLayer.contents = (id)watermarkImage.CGImage;
CGSize videoSize = [aVideoAssetTrack naturalSize];
CGFloat videoScale = videoSize.width/containerView.frame.size.width;
aLayer.frame = CGRectMake(0, 0, containerView.frame.size.width * videoScale, containerView.frame.size.height * videoScale);
aLayer.opacity = 0.9;
//sorts the layer in proper order
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:aLayer];
printf("Video Size %f %f",videoSize.width,videoSize.height);
//create the composition and add the instructions to insert the layer:
AVMutableVideoComposition* videoComp = [AVMutableVideoComposition videoComposition];
videoComp.renderSize = videoSize;
videoComp.frameDuration = CMTimeMake(1, 30);
videoComp.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
/// instruction
AVMutableVideoCompositionInstruction* instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]);
AVAssetTrack* mixVideoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:mixVideoTrack];
instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
videoComp.instructions = [NSArray arrayWithObject: instruction];
AVAssetTrack *assetVideoTrack = [videoAsset tracksWithMediaType:AVMediaTypeVideo].lastObject;
// Apply the original transform.
if (assetVideoTrack && a_compositionVideoTrack) {
[a_compositionVideoTrack setPreferredTransform:assetVideoTrack.preferredTransform];
}
AVAssetExportSession *_assetExport;
// export video
_assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality];
_assetExport.videoComposition = videoComp;
NSLog (#"created exporter. supportedFileTypes: %#", _assetExport.supportedFileTypes);
NSString* videoName = #"NewWatermarkedVideo.mov";
NSString* exportPath = [NSTemporaryDirectory() stringByAppendingPathComponent:videoName];
NSURL* exportUrl = [NSURL fileURLWithPath:exportPath];
if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath])
[[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil];
_assetExport.outputFileType = AVFileTypeQuickTimeMovie;
_assetExport.outputURL = exportUrl;
_assetExport.shouldOptimizeForNetworkUse = YES;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
dispatch_async(dispatch_get_main_queue(), ^{
completion(_assetExport);
});
}
];
}
Related
I am adding image watermark on video using the following code but the resulted video's frame was rotated by 180 degree and i tried every possible solution to stop it. i just want the same video with watermark as a source video. please suggest solution.
-(void)watermarkVideoAtURL:(NSURL *)url fb:(BOOL)fb withCompletionHandler:(void(^)(bool success, NSURL *assetsURL, NSError *error))completionHandler {
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:url options:nil];
AVMutableComposition *mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *clipVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] lastObject];
AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *clipAudioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] lastObject];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:clipVideoTrack atTime:kCMTimeZero error:nil];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:clipAudioTrack atTime:kCMTimeZero error:nil];
[compositionVideoTrack setPreferredTransform:[[[videoAsset tracksWithMediaType:AVMediaTypeVideo] lastObject] preferredTransform]];
CGSize sizeOfVideo = [videoAsset naturalSize];
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, sizeOfVideo.width, sizeOfVideo.height);
videoLayer.frame = CGRectMake(0, 0, sizeOfVideo.width, sizeOfVideo.height);
// Image of watermark
UIImage *myImage = [self imageByApplyingAlpha:watermarkOpacityFactor toImage:[UIImage imageNamed:#"iconbig"]];
CALayer *layerCa = [CALayer layer];
layerCa.contents = (id)myImage.CGImage;
layerCa.frame = CGRectMake(10, sizeOfVideo.height - 50, 50, 50);
layerCa.opacity = 1.0;
CALayer *layerCa2 = [CALayer layer];
layerCa2.contents = (id)myImage.CGImage;
layerCa2.frame = CGRectMake(sizeOfVideo.width - 60, 10, 50, 50);
layerCa2.opacity = 1.0;
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:layerCa];
[parentLayer addSublayer:layerCa2];
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.frameDuration = CMTimeMake(1, 30);
videoComposition.renderSize = sizeOfVideo;
videoComposition.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]);
AVAssetTrack *videoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] lastObject];
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
videoComposition.instructions = [NSArray arrayWithObject:instruction];
NSString *documentsDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES)objectAtIndex:0];
NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init];
[dateFormatter setDateFormat:#"yyyy-MM-dd_HH-mm-ss"];
NSString *destinationPath = [documentsDirectory stringByAppendingFormat:#"/utput_%#.mov", [dateFormatter stringFromDate:[NSDate date]]];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exportSession.videoComposition = videoComposition;
exportSession.outputURL = [NSURL fileURLWithPath:destinationPath];
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch (exportSession.status) {
case AVAssetExportSessionStatusCompleted: {
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
[library writeVideoAtPathToSavedPhotosAlbum:exportSession.outputURL completionBlock:^(NSURL *assetURL, NSError *error) {
if (!error) {
completionHandler(YES, assetURL, nil);
} else {
completionHandler(NO, nil, error);
}
}];
}
break;
case AVAssetExportSessionStatusFailed: {
completionHandler(NO, nil, exportSession.error);
}
break;
case AVAssetExportSessionStatusCancelled: {
completionHandler(NO, nil, exportSession.error);
}
break;
default:
break;
}
}];
}
Try to set AVAssetTrack's preferredTransform to layer instruction
setTransform:atTime:
Sets a fixed transform to apply from the specified time until the next time at which a transform is set.[...] Before the first specified time for which a transform is set, the affine transform is held constant at the value of
CGAffineTransformIdentity
; after the last time for which a transform is set, the affine transform is held constant at that last value.
Getting this error when exporting file
Error Domain=AVFoundationErrorDomain Code=-11800 "The operation could not be completed" UserInfo={NSUnderlyingError=0x608000642310 {Error Domain=NSOSStatusErrorDomain Code=-12120 "(null)"}
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:url options:nil];
AVMutableComposition* miComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [miComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *clipVideoTrack = [videoAsset tracksWithMediaType:AVMediaTypeVideo].firstObject;
AVMutableCompositionTrack *compositionAudioTrack = [miComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *clipAudioTrack = [videoAsset tracksWithMediaType:AVMediaTypeAudio].firstObject;
//If you need audio as well add the Asset Track for audio here
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [videoAsset duration]) ofTrack:clipVideoTrack atTime:kCMTimeZero error:nil];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [videoAsset duration]) ofTrack:clipAudioTrack atTime:kCMTimeZero error:nil];
[compositionVideoTrack setPreferredTransform:[[videoAsset tracksWithMediaType:AVMediaTypeVideo].firstObject preferredTransform]];
CGSize sizeOfVideo=[clipVideoTrack naturalSize];
//Image of watermark
UIImage *myImage=[UIImage imageNamed:#"_minutestory.png"];
CALayer *layerCa = [CALayer layer];
layerCa.contents = (id)myImage.CGImage;
layerCa.frame = CGRectMake((sizeOfVideo.width - 100),20 , 100, 100);
layerCa.opacity = 0.6;
CALayer *optionalLayer=[CALayer layer];
optionalLayer.frame=CGRectMake(0, 0, sizeOfVideo.width, sizeOfVideo.height);
[optionalLayer setMasksToBounds:YES];
CALayer *parentLayer=[CALayer layer];
CALayer *videoLayer=[CALayer layer];
parentLayer.frame=CGRectMake(0, 0, sizeOfVideo.width, sizeOfVideo.height);
videoLayer.frame=CGRectMake(0, 0, sizeOfVideo.width, sizeOfVideo.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:optionalLayer];
[parentLayer addSublayer:layerCa];
AVMutableVideoComposition *videoComposition=[AVMutableVideoComposition videoComposition] ;
videoComposition.frameDuration=CMTimeMake(1, 30);
videoComposition.renderSize=sizeOfVideo;
videoComposition.animationTool=[AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [miComposition duration]);
AVAssetTrack *vidTrack = [[miComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:vidTrack];
instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
videoComposition.instructions = [NSArray arrayWithObject: instruction];
[[NSFileManager defaultManager] removeItemAtPath:url error:NULL];
exporter2 =
[[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exporter2.videoComposition = videoComposition;
exporter2.outputURL=url;
exporter2.outputFileType = AVFileTypeMPEG4;
exporter2.shouldOptimizeForNetworkUse = YES;
exportProgressBarTimer = [NSTimer scheduledTimerWithTimeInterval:.1 target:self selector:#selector(updateExportDisplay) userInfo:nil repeats:YES];
[exporter2 exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
[self exportFinish:exporter2];
});
}];
First maybe you should used another url as outputURL for your exporter, the code above use AVURLAsset's source url as exporter's outputURL, and based on you do remove file at url before export(which means you delete source asset used to exported), there's no way you can achieve your goal.
By the way, if you want to remove file based on url. You should use
[[NSFileManager defaultManager] removeItemAtURL:url error:nil];
instead.
I want to add a watermark image on the bottom left corner of downloaded videos, and I find below code works when video's video track's preferredTransform property is CGAffineTransformIdentity(that means no real transformation), while for those videos have transformation, the code fails. How to fix it?
sample video (has transformation) url is here
BTW, the error code that AVFoundations reports is -11841
+(void) addWatermarkWithInputFile:(NSString *)inputFile outputFile:(NSString *)outputFile completion:(void (^)(BOOL))completion{
[[NSFileManager defaultManager] removeItemAtPath:outputFile error:nil];
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:[NSURL fileURLWithPath:inputFile] options:nil];
AVMutableComposition* composition = [AVMutableComposition composition];
CMTimeRange timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
AVAssetTrack *track = [videoAsset tracksWithMediaType:AVMediaTypeVideo].firstObject;
{
AVMutableCompositionTrack * composedTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
composedTrack.preferredTransform = track.preferredTransform;
[composedTrack insertTimeRange:timeRange
ofTrack:[videoAsset tracksWithMediaType:AVMediaTypeVideo].firstObject
atTime:kCMTimeZero
error:nil];
}
{
AVMutableCompositionTrack * composedTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
composedTrack.preferredTransform = track.preferredTransform;
[composedTrack insertTimeRange:timeRange
ofTrack:[videoAsset tracksWithMediaType:AVMediaTypeAudio].firstObject
atTime:kCMTimeZero
error:nil];
}
AVAssetTrack *clipVideoTrack = [videoAsset tracksWithMediaType:AVMediaTypeVideo].firstObject;
CGSize videoSize = clipVideoTrack.naturalSize;
AVMutableVideoComposition* videoComp = [AVMutableVideoComposition videoComposition];
videoComp.renderSize = videoSize;
videoComp.frameDuration = CMTimeMake(1, 30);
{
UIImage *myImage = [UIImage imageNamed:#"video-watermark"];
CALayer *aLayer = [CALayer layer];
aLayer.contents = (id)myImage.CGImage;
aLayer.frame = CGRectMake(10, 10, myImage.size.width * myImage.scale, myImage.size.height * myImage.scale);
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:aLayer];
videoComp.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
}
/// instruction
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = timeRange;
AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:clipVideoTrack];
instruction.layerInstructions = #[layerInstruction];
videoComp.instructions = #[instruction];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:composition
presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL = [NSURL fileURLWithPath:outputFile];
exporter.outputFileType = AVFileTypeMPEG4;
exporter.shouldOptimizeForNetworkUse = YES;
exporter.videoComposition = videoComp;
exporter.timeRange = timeRange;
[exporter exportAsynchronouslyWithCompletionHandler:^{
completion(exporter.status == AVAssetExportSessionStatusCompleted);
}];
}
I'm trying to merge an image overlay and a video into a new video file. This is the code I am using:
- (void)mergeVideoUrl:(NSURL *)videoURL andOverlayImage:(UIImage *)overlayImage
{
NSLog(#"%s", __PRETTY_FUNCTION__);
// Create an AVURLAsset
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:videoURL options:nil];
AVMutableComposition* mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *clipVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:clipVideoTrack
atTime:kCMTimeZero
error:nil];
[compositionVideoTrack setPreferredTransform:[[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] preferredTransform]];
CGSize videoSize = [clipVideoTrack naturalSize];
// Get our image layer
CALayer *aLayer = [CALayer layer];
aLayer.contents = (id)overlayImage.CGImage;
aLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
aLayer.opacity = 1.0;
// Sort the layer order
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:aLayer];
// Create the composition and add the instructions to insert the layer
AVMutableVideoComposition* videoComp = [AVMutableVideoComposition videoComposition];
videoComp.renderSize = videoSize;
videoComp.frameDuration = CMTimeMake(1, 30);
videoComp.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
// Instruction
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]);
AVAssetTrack *videoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
videoComp.instructions = [NSArray arrayWithObject: instruction];
// Export
AVAssetExportSession *assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
assetExport.videoComposition = videoComp;
NSString* videoName = #"mynewwatermarkedvideo.mov";
NSString *exportPath = [NSTemporaryDirectory() stringByAppendingPathComponent:videoName];
NSURL *exportUrl = [NSURL fileURLWithPath:exportPath];
if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath])
{
[[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil];
}
assetExport.outputFileType = AVFileTypeQuickTimeMovie;
assetExport.outputURL = exportUrl;
assetExport.shouldOptimizeForNetworkUse = YES;
[assetExport exportAsynchronouslyWithCompletionHandler:^(void)
{
//FINALIZATION CODE HERE
NSLog(#"DONE: %#", exportUrl);
}];
}
The problem is that it screws up the video. Any idea why?
I have a feeling the video frame size might have something to do with it or the aspect ratio as if I set a size of (2000,2000), it seems to work ok but with a lot of black edges.
EDIT-----
It seems that if I set CGSize videoSize to a ratio of 2/3 e.g.320x480 or 1280x1920 then it works, however the resulting video has a black bar down the right hand side. Why would [clipVideoTrack naturalSize] report a size of 1080x1920 but it screws up unless I use 1280x1920? I think it might be a transform/crop issue?
EDIT2-----
Changing the AVAssetExportSession presetName between AVAssetExportPresetLowQuality, AVAssetExportPresetMediumQuality & AVAssetExportPresetHighestQuality results in different sized videos. So I think that is overriding the output video frame size somehow.
CGSize videoSize = [clipVideoTrack naturalSize];
This gives you the dimensions of the recording, but you need to look at the track's prefferedTransform to find the orientation of the recording. You might well find that the video width is actually the naturalSize 'height' depending on how the device was held during recording.
I see you set it on the composition, but you need to check it in order to assign the correct frames to your CALayers
I tried asking this before, but got no responses. So I'll try again. I'm trying to lay images on top of a video (like a doodle, or a border). I call the "addAnimation" method three times, and it runs, but the exportAsynchronouslyWithCompletionHandler only gets called on the first try. Any ideas?
- (void)addAnimation:(UIImage *)background for:(NSString *)socialSite
{
// 1 - Load Asset
AVURLAsset* videoAsset = [AVURLAsset URLAssetWithURL:self.videoUrl options:nil];
// 2 - Create AVMutableComposition Object
AVMutableComposition* mixComposition = [AVMutableComposition composition];
// 2.1 - VideoTrack
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
// 2.2 - AudioTrack
AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
//If either of them are nil, then alloc
if( (self.clipVideoTrack == nil) || (self.clipAudioTrack == nil)){
self.clipVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
self.clipAudioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
}
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:self.clipVideoTrack atTime:kCMTimeZero error:nil];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:self.clipAudioTrack atTime:kCMTimeZero error:nil];
CGSize videoSize = [self.clipVideoTrack naturalSize];
// 3 - Background layer
UIImage *myImage = background;
CALayer *aLayer = [CALayer layer];
if(socialSite == nil) {
aLayer.contents = (id)myImage.CGImage;
if(__IPHONE_4_INCH)
aLayer.frame = CGRectMake(0, 0, 640, 640);
else
aLayer.frame = CGRectMake(0, 0, 320, 320);
}
else{
aLayer.contents = (id)myImage.CGImage;
aLayer.frame = CGRectMake(0, 0, myImage.size.width, myImage.size.height);
}
// 3.1 - Video layer
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, myImage.size.width, myImage.size.height);
if(socialSite == nil){
videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
}
else{
videoLayer.frame = CGRectMake(0, 110, videoSize.width, videoSize.height);
}
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:aLayer];
// 3.2 - Add instructions
AVMutableVideoComposition* videoComp = [AVMutableVideoComposition videoComposition];
videoComp.renderSize = videoSize;
videoComp.frameDuration = CMTimeMake(1, 30);
videoComp.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]);
AVAssetTrack *videoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
videoComp.instructions = [NSArray arrayWithObject: instruction];
AVAssetExportSession *assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];//AVAssetExportPresetPassthrough
assetExport.videoComposition = videoComp;
// 4 - Get path
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:
[NSString stringWithFormat:#"FinalVideo-%d.mov",arc4random() % 1000]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
myPathDocs = [myPathDocs stringByAddingPercentEscapesUsingEncoding:NSUTF8StringEncoding];
// 5 - Create exporter
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.shouldOptimizeForNetworkUse = YES;
exporter.videoComposition = videoComp;
[exporter exportAsynchronouslyWithCompletionHandler:^ {
NSData *video;
/*********************************** INSTAGRAM *********************************************/
if([socialSite isEqualToString:#"instagram"]){
video = [NSData dataWithContentsOfURL:exporter.outputURL];
[MPOCreateMPManager sharedManager].createdMarcoPolo.instagramVideo = video;
NSLog(#"IG SIZE (In CreateViewController): %lu",(unsigned long)[MPOCreateMPManager sharedManager].createdMarcoPolo.instagramVideo.length);
}
/*********************************** FB/TWITTER *********************************************/
else if ([socialSite isEqualToString:#"facebook"]){
video = [NSData dataWithContentsOfURL:exporter.outputURL];
[MPOCreateMPManager sharedManager].createdMarcoPolo.fBTwitterVideo = video;
NSLog(#"FB SIZE (In CreateViewController): %lu",(unsigned long)[MPOCreateMPManager sharedManager].createdMarcoPolo.fBTwitterVideo.length);
}
/*********************************** DOODLE *********************************************/
else if (socialSite == nil){
video = [NSData dataWithContentsOfURL:exporter.outputURL];
[MPOCreateMPManager sharedManager].createdMarcoPolo.video = video;
[MPOCreateMPManager sharedManager].createdMarcoPolo.image = [self loadImage:exporter.outputURL];
NSLog(#"*******************************DOODLE DONE*************************************");
}
}];
}