I am trying to merge multiple videos using AVMutableComposition. The problem I face is that whenever I try to add any AVMutableVideoComposition for applying any instructions, my playback freezes in AVPlayer at exact 6 seconds duration.
Another interesting thing is that it plays fine if I play it in Photos app of iPad after exporting using same videoComposition. So why does it freezes in AVPlayer at 6 seconds?
Code:
AVMutableComposition *mutableComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *videoCompositionTrack = [mutableComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *audioCompositionTrack = [mutableComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
for (AVURLAsset *asset in assets)
{
AVAssetTrack *assetTrack;
assetTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVAssetTrack *audioAssetTrack = [asset tracksWithMediaType:AVMediaTypeAudio].firstObject;
NSError *error;
[videoCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, assetTrack.timeRange.duration )
ofTrack:assetTrack
atTime:time
error:&error];
if (error) {
NSLog(#"asset url :: %#",assetTrack.asset);
NSLog(#"Error1 - %#", error.debugDescription);
}
[audioCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioAssetTrack.timeRange.duration)
ofTrack:audioAssetTrack
atTime:time
error:&error];
if (error) {
NSLog(#"Error2 - %#", error.debugDescription);
}
time = CMTimeAdd(time, assetTrack.timeRange.duration);
if (CGSizeEqualToSize(size, CGSizeZero)) {
size = assetTrack.naturalSize;;
}
}
AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
AVMutableVideoCompositionLayerInstruction *videoTrackLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoCompositionTrack];
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, time);
mainInstruction.layerInstructions = [NSArray arrayWithObjects:videoTrackLayerInstruction, nil];
AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
mainCompositionInst.instructions = [NSArray arrayWithObject:mainInstruction];
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
mainCompositionInst.renderSize = size;
pi = [AVPlayerItem playerItemWithAsset:mutableComposition];
pi.videoComposition = mainCompositionInst;
Also, I know problem is mostly videoComposition because if I remove the videoComposition, then it plays fine on AVPlayer.
UPDATE 1: I just found out that when it freezes after 6 seconds, if I drag the slider back or forward(i.e. use seekToTime), it starts playing normally again without any further freeze.
Also audio keeps on playing fine even when video is frozen.
UPDATE 2: If I just go ahead and export it using AVAssetExportSession with same AVMutableComposition, and load asset from of exported video, it works fine. So its just when I play AVMutableComposition directly, problem arises.
Finally, I got the solution to fix this.
You should play after playerItem's status is changed to .ReadyToPlay.
Please see as below.
func startVideoPlayer() {
let playerItem = AVPlayerItem(asset: self.composition!)
playerItem.videoComposition = self.videoComposition!
let player = AVPlayer(playerItem: playerItem)
player.actionAtItemEnd = .None
videoPlayerLayer = AVPlayerLayer(player: player)
videoPlayerLayer!.frame = self.bounds
/* add playerItem's observer */
player.addObserver(self, forKeyPath: "player.currentItem.status", options: .New, context: nil)
NSNotificationCenter.defaultCenter().addObserver(self, selector: "playerItemDidReachEnd:", name: AVPlayerItemDidPlayToEndTimeNotification, object: playerItem);
self.layer.addSublayer(videoPlayerLayer!)
}
override func observeValueForKeyPath(keyPath: String?, ofObject object: AnyObject?, change: [String : AnyObject]?, context: UnsafeMutablePointer<Void>) {
if keyPath != nil && keyPath! == "player.currentItem.status" {
if let newValue = change?[NSKeyValueChangeNewKey] {
if AVPlayerStatus(rawValue: newValue as! Int) == .ReadyToPlay {
playVideo() /* play after status is changed to .ReadyToPlay */
}
}
} else {
super.observeValueForKeyPath(keyPath, ofObject: object, change: change, context: context)
}
}
func playerItemDidReachEnd(notification: NSNotification) {
let playerItem = notification.object as! AVPlayerItem
playerItem.seekToTime(kCMTimeZero)
playVideo()
}
func playVideo() {
videoPlayerLayer?.player!.play()
}
Related
I have taken a png image and a video for watermarking, both are portrait. And I have done it as watermark an image at video.
After watermarking, got a merged video in landscape mode and it is flipped by 90 degree in anti clock direction.
I am not able to find out the exact reason why video got flipped from portrait to landscape mode. While image is showing stretch portrait.
Please help. Thanks in advance.
Used below code:-
- (void)addWatermarkAtVideoFile:(NSURL *)videoURL image:(UIImage *)image withConvertedVideoUUID:(NSString *)convertedVideoUUID response:(void(^)(BOOL success, NSString *videoUUID, NSURL *videoPath))responseBlock {
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:videoURL options:nil];
AVMutableComposition* mixComposition = [AVMutableComposition composition];
AVAssetTrack *clipVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
if(clipVideoTrack) {
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:clipVideoTrack atTime:kCMTimeZero error:nil];
[compositionVideoTrack setPreferredTransform:[[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] preferredTransform]];
}
AVAssetTrack *clipAudioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
if(clipAudioTrack) {
AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:clipAudioTrack atTime:kCMTimeZero error:nil];
}
CGSize sizeOfVideo=[videoAsset naturalSize];
//Image of watermark
UIImage *myImage = image;
CALayer *layerCa = [CALayer layer];
layerCa.contents = (id)myImage.CGImage;
layerCa.frame = CGRectMake(0, 0, sizeOfVideo.width, sizeOfVideo.height);
layerCa.opacity = 1.0;
CALayer *parentLayer=[CALayer layer];
CALayer *videoLayer=[CALayer layer];
parentLayer.frame=CGRectMake(0, 0, sizeOfVideo.width, sizeOfVideo.height);
videoLayer.frame=CGRectMake(0, 0, sizeOfVideo.width, sizeOfVideo.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:layerCa];
AVMutableVideoComposition *videoComposition=[AVMutableVideoComposition videoComposition] ;
videoComposition.frameDuration=CMTimeMake(1, 30);
videoComposition.renderSize=sizeOfVideo;
videoComposition.animationTool=[AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]);
AVAssetTrack *videoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
videoComposition.instructions = [NSArray arrayWithObject: instruction];
//Creating temp path to save the converted video
NSString* myDocumentPath = [self getDocumentDirectoryPathWithFileName:convertedVideoUUID];
NSURL *outputFileURL = [[NSURL alloc] initFileURLWithPath:myDocumentPath];
//Check if the file already exists then remove the previous file
[self removeFileIfExistAtPAth:myDocumentPath];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exportSession.videoComposition=videoComposition;
exportSession.outputURL = outputFileURL;
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch (exportSession.status)
{
case AVAssetExportSessionStatusCompleted:
NSLog(#"Export OK");
[self saveInPhotoAlbum:myDocumentPath];
break;
case AVAssetExportSessionStatusFailed:
NSLog (#"AVAssetExportSessionStatusFailed: %#", exportSession.error);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export Cancelled");
break;
}
BOOL statusSuccess = [exportSession status] == AVAssetExportSessionStatusCompleted;
responseBlock(statusSuccess ? YES : NO, statusSuccess ? convertedVideoUUID : nil, statusSuccess ? outputFileURL : nil);
}];
}
I think it's the default behavior with AVFoundation, it's probably not linked to the watermark feature.
You could use CGAffineTransform :
if(height > width) {
CGAffineTransform rotationTransform = CGAffineTransformMakeRotation(M_PI_2);
[layerInstruction setTransform:rotationTransform atTime:kCMTimeZero];
}
The naturalSize of AVAsset doesn't consider the eventual rotation of a video.
If you want to place tour watermark correctly consider using the renderSize of the AVMutableVideoComposition or apply some transformations.
This snippet gives you the actual orientation for an asset:
func orientationForAsset(_ asset: AVAsset) -> (orientation: UIImageOrientation, isPortrait: Bool) {
let videoTrack = asset.tracks(withMediaType: AVMediaTypeVideo).first!
let transformMatrix = videoTrack.preferredTransform
return orientationFromTransform(transformMatrix)
}
func orientationFromTransform(_ transform: CGAffineTransform) -> (orientation: UIImageOrientation, isPortrait: Bool) {
var assetOrientation = UIImageOrientation.up
var isPortrait = false
if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 {
assetOrientation = .right
isPortrait = true
} else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0 {
assetOrientation = .left
isPortrait = true
} else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0 {
assetOrientation = .up
} else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 {
assetOrientation = .down
}
return (assetOrientation, isPortrait)
}
With this one you can get the actual size considering the rotation.
func resolutionSizeForAsset(_ asset: AVAsset) -> CGSize? {
guard let track = asset.tracks(withMediaType: AVMediaTypeVideo).first else { return nil }
let size = track.naturalSize.applying(track.preferredTransform)
return CGSize(width: fabs(size.width), height: fabs(size.height))
}
I add watermark on video. I set origin watermark layer (10;10), but after export, watermark is located in the lower left corner. If I increase y, the watermark is moved up. As if y-axis increases from the bottom up.
Can anyone suggest something?
NSString *path = [[NSBundle mainBundle] pathForResource:#"video" ofType:#"mov"];
NSURL *file = [NSURL fileURLWithPath:path];
AVURLAsset *videoAsset = [[AVURLAsset alloc]initWithURL:file options:nil];
self.asset = videoAsset;
AVMutableComposition *mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *clipVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] firstObject];
AVAssetTrack *clipAudioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] firstObject];
if (clipAudioTrack)
{
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:clipAudioTrack
atTime:kCMTimeZero
error:nil];
}
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:clipVideoTrack
atTime:kCMTimeZero
error:nil];
[compositionVideoTrack setPreferredTransform:clipVideoTrack.preferredTransform];
CGSize videoSize = [clipVideoTrack naturalSize];
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
UIImage *myImage = [UIImage imageNamed:#"watermark"];
CALayer *aLayer = [CALayer layer];
CGRect frame = CGRectMake(10, 10, CGImageGetWidth(myImage.CGImage), CGImageGetHeight(myImage.CGImage));
aLayer.frame = frame;
aLayer.contents = (id)myImage.CGImage;
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:aLayer];
AVMutableVideoComposition *videoComposition=[AVMutableVideoComposition videoComposition] ;
videoComposition.frameDuration=CMTimeMake(1, 30);
videoComposition.renderSize = videoSize;
videoComposition.animationTool=[AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]);
AVAssetTrack *videoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] firstObject];
AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
instruction.layerInstructions = #[layerInstruction];
videoComposition.instructions = #[instruction];
NSString *documentsDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) firstObject];
NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init];
[dateFormatter setDateFormat:#"yyyy-MM-dd_HH-mm-ss"];
NSString *destinationPath = [documentsDirectory stringByAppendingFormat:#"/utput_%#.mov", [dateFormatter stringFromDate:[NSDate date]]];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality];
exportSession.videoComposition = videoComposition;
exportSession.outputURL = [NSURL fileURLWithPath:destinationPath];
exportSession.outputFileType = AVFileTypeMPEG4;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch (exportSession.status)
{
case AVAssetExportSessionStatusCompleted:
NSLog(#"Export OK");
if (UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(destinationPath)) {
UISaveVideoAtPathToSavedPhotosAlbum(destinationPath, self, nil, nil);
}
break;
case AVAssetExportSessionStatusFailed:
NSLog (#"AVAssetExportSessionStatusFailed: %#", exportSession.error);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export Cancelled");
break;
default:
break;
}
}];
I the past I had a lot of problems to find the correct anchor point to add a water mark. I'll post my code. Keep in mind that it generates a video with a watermark on the bottom-right corner (5 pixel margin).
Pay attention at:
logoImageView.frame = CGRectMake(finalSize.width - ((logoImageView.frame.size.width/2) + 5),
finalSize.height - ((logoImageView.frame.size.height/2) + 5),
logoImageView.frame.size.width/2,
logoImageView.frame.size.height/2)
Final code:
final class QUWatermarkManager {
static func watermark(video videoAsset:AVAsset, imageLayer : CALayer, saveToLibrary flag : Bool, completion : ((status : AVAssetExportSessionStatus!, session: AVAssetExportSession!, outputURL : NSURL!) -> ())?) {
var finalSize = CGSizeMake(imageLayer.frame.size.width, imageLayer.frame.size.width)
var computedVideoSize = finalSize.width
while (computedVideoSize%16>0) { // find the right resolution that can be divided by 16
computedVideoSize++;
}
finalSize = CGSizeMake(computedVideoSize,computedVideoSize)
let clipVideoTrack = videoAsset.tracksWithMediaType(AVMediaTypeVideo).first! as AVAssetTrack
let videoSize = clipVideoTrack.naturalSize
let scale = finalSize.width / videoSize.width
let composition = AVMutableComposition()
composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
let videoComposition = AVMutableVideoComposition()
videoComposition.renderSize = CGSizeMake(finalSize.width,finalSize.width)
videoComposition.frameDuration = CMTimeMake(1, 30)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(180, 30))
let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: clipVideoTrack)
let t4 = CGAffineTransformScale(clipVideoTrack.preferredTransform, scale, scale)
layerInstruction.setTransform(t4, atTime: kCMTimeZero)
instruction.layerInstructions = [layerInstruction]
videoComposition.instructions = [instruction]
// Parent Layer
let parentLayer = CALayer()
parentLayer.frame = CGRectMake(0, 0, finalSize.width, finalSize.height)
parentLayer.geometryFlipped = true
//Video Layer
let videoLayer = CALayer()
videoLayer.frame = CGRectMake(0, 0, finalSize.width, finalSize.height)
videoLayer.geometryFlipped = true
let logoImageView = UIImageView(image: UIImage(named: "logo_nav2"))
logoImageView.alpha = 0.5
logoImageView.contentMode = .ScaleAspectFit
logoImageView.frame = CGRectMake(finalSize.width - ((logoImageView.frame.size.width/2) + 5),
finalSize.height - ((logoImageView.frame.size.height/2) + 5),
logoImageView.frame.size.width/2,
logoImageView.frame.size.height/2)
parentLayer.addSublayer(videoLayer)
parentLayer.addSublayer(imageLayer)
parentLayer.addSublayer(logoImageView.layer)
videoComposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, inLayer: parentLayer)
// 4 - Get path
let documentDirectory = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0]
let dateFormatter = NSDateFormatter()
dateFormatter.dateStyle = .LongStyle
dateFormatter.timeStyle = .FullStyle
let date = dateFormatter.stringFromDate(NSDate())
let savePath = documentDirectory.stringByAppendingPathComponent("watermarkVideo-\(date).mov")
let url = NSURL(fileURLWithPath: savePath)
let exporter = AVAssetExportSession(asset: videoAsset, presetName: AVAssetExportPresetHighestQuality)!
exporter.videoComposition = videoComposition
exporter.outputURL = url
exporter.outputFileType = AVFileTypeMPEG4
exporter.shouldOptimizeForNetworkUse = true
exporter.canPerformMultiplePassesOverSourceMediaData = true
exporter.exportAsynchronouslyWithCompletionHandler() {
dispatch_async(dispatch_get_main_queue(), { () -> Void in
if exporter.status == AVAssetExportSessionStatus.Completed {
let outputURL = exporter.outputURL
if flag {
// Save to library
let library = ALAssetsLibrary()
if library.videoAtPathIsCompatibleWithSavedPhotosAlbum(outputURL) {
library.writeVideoAtPathToSavedPhotosAlbum(outputURL,
completionBlock: { (assetURL:NSURL!, error:NSError!) -> Void in
completion!(status: .Completed, session: exporter, outputURL: outputURL)
})
}
} else {
// Dont svae to library
completion!(status: .Completed, session: exporter, outputURL: outputURL)
}
} else {
// Error
completion!(status: exporter.status, session: exporter, outputURL: exporter.outputURL)
}
})
}
}
}
I have requirement some times mute video send enduser so I required remove audio file form the video.
Passing video for single process for mute.
// Remove audio from video
- (void) RemoveAudioFromVideo:(NSString *)VideoLocalPath {
NSString * initPath1 = VideoLocalPath;
AVMutableComposition *composition = [AVMutableComposition composition];
NSString *inputVideoPath = initPath1;
AVURLAsset * sourceAsset = [[AVURLAsset alloc] initWithURL:[NSURL fileURLWithPath:inputVideoPath] options:nil];
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
BOOL ok = NO;
AVAssetTrack * sourceVideoTrack = [[sourceAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
CMTimeRange x = CMTimeRangeMake(kCMTimeZero, [sourceAsset duration]);
ok = [compositionVideoTrack insertTimeRange:x ofTrack:sourceVideoTrack atTime:kCMTimeZero error:nil];
if([[NSFileManager defaultManager] fileExistsAtPath:initPath1]) {
[[NSFileManager defaultManager] removeItemAtPath:initPath1 error:nil];
}
NSURL *url = [[NSURL alloc] initFileURLWithPath: initPath1];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
NSLog(#";%#", [exporter supportedFileTypes]);
exporter.outputFileType = #"com.apple.quicktime-movie";
[exporter exportAsynchronouslyWithCompletionHandler:^{
[self savefinalVideoFileToDocuments:exporter.outputURL];
}];
}
// Write final Video
-(void)savefinalVideoFileToDocuments:(NSURL *)url {
NSString *storePath = [[self applicationCacheDirectory] stringByAppendingPathComponent:#"Videos"];
NSData * movieData = [NSData dataWithContentsOfURL:url];
[movieData writeToFile:storePath atomically:YES];
}
// Directory Path
- (NSString *)applicationCacheDirectory {
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = ([paths count] > 0) ? [paths objectAtIndex:0] : nil;
return documentsDirectory;
}
In Swift 3,
func removeAudioFromVideo(_ videoPath: String) {
let initPath1: String = videoPath
let composition = AVMutableComposition()
let inputVideoPath: String = initPath1
let sourceAsset = AVURLAsset(url: URL(fileURLWithPath: inputVideoPath), options: nil)
let compositionVideoTrack: AVMutableCompositionTrack? = composition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
let sourceVideoTrack: AVAssetTrack? = sourceAsset.tracks(withMediaType: AVMediaTypeVideo)[0]
let x: CMTimeRange = CMTimeRangeMake(kCMTimeZero, sourceAsset.duration)
_ = try? compositionVideoTrack!.insertTimeRange(x, of: sourceVideoTrack!, at: kCMTimeZero)
if FileManager.default.fileExists(atPath: initPath1) {
try? FileManager.default.removeItem(atPath: initPath1)
}
let url = URL(fileURLWithPath: initPath1)
let exporter = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality)
exporter?.outputURL = url
exporter?.outputFileType = "com.apple.quicktime-movie"
exporter?.exportAsynchronously(completionHandler: {() -> Void in
self.saveFinalVideoFile(toDocuments: exporter!.outputURL!)
})
}
func saveFinalVideoFile(toDocuments url: URL) {
let fileURL = try! FileManager.default.url(for: .documentDirectory, in: .userDomainMask, appropriateFor: nil, create: false).appendingPathComponent("Videos")
let movieData = try? Data(contentsOf: url)
try? movieData?.write(to: fileURL, options: .atomic)
}
In my app, I'm recording small videos and adding them into an NSMutableArray as AVAsset so that i keep record of what has been captured. when the user press a button to merge them, the final result is only the first video taken (example, if three short videos where taken, the final result after merging is only the first video and the others do not appear). my code on iterating in the NSMutableArray and stitching the videos together is here:
if (self.capturedVideos.count != 0) {
//Create AVMutableComposition Object.This object will hold our multiple AVMutableCompositionTrack.
AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];
for (AVAsset *asset in self.capturedVideos) {
//check if the video is the first one captures so that it is placed at time 0.
if ([self.capturedVideos indexOfObject:asset] == 0) {
AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:[[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
previousAsset = asset;
} else{
AVMutableCompositionTrack *track = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[track insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:[[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:previousAsset.duration error:nil];
previousAsset = asset;
}
}
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:
[NSString stringWithFormat:#"mergeVideo-%d.mov",arc4random() % 1000]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
// 5 - Create exporter
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:exporter];
});
}];
}
what's after the for loop is for exporting the video to be saved in camera roll. so where is my mistake? the durations are right (so there is no over lapping). however, i'm doubting in something. There is an instance variable i added after #implementation in braces which is previousAsset which tracks the previous asset added thus knowing where to place the next one. it's of class AVAsset so i didn't initialize it because when i try to it's showing me an error.
previousAsset = [[AVAsset alloc] init];
Swift version
func merge(arrayVideos:[AVAsset], completion:#escaping (_ exporter: AVAssetExportSession) -> ()) -> Void {
let mainComposition = AVMutableComposition()
let compositionVideoTrack = mainComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid)
compositionVideoTrack?.preferredTransform = CGAffineTransform(rotationAngle: .pi / 2)
let soundtrackTrack = mainComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid)
var insertTime = kCMTimeZero
for videoAsset in arrayVideos {
try! compositionVideoTrack?.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: videoAsset.tracks(withMediaType: .video)[0], at: insertTime)
try! soundtrackTrack?.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: videoAsset.tracks(withMediaType: .audio)[0], at: insertTime)
insertTime = CMTimeAdd(insertTime, videoAsset.duration)
}
let outputFileURL = URL(fileURLWithPath: NSTemporaryDirectory() + "merge.mp4")
let fileManager = FileManager()
fileManager.removeItemIfExisted(outputFileURL)
let exporter = AVAssetExportSession(asset: mainComposition, presetName: AVAssetExportPresetHighestQuality)
exporter?.outputURL = outputFileURL
exporter?.outputFileType = AVFileType.mp4
exporter?.shouldOptimizeForNetworkUse = true
exporter?.exportAsynchronously {
DispatchQueue.main.async {
completion(exporter!)
}
}
}
This will work fine
AVMutableComposition *mainComposition = [[AVMutableComposition alloc] init];
AVMutableCompositionTrack *compositionVideoTrack = [mainComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *soundtrackTrack = [mainComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
CMTime insertTime = kCMTimeZero;
for (AVAsset *videoAsset in assets) {
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:insertTime error:nil];
[soundtrackTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:insertTime error:nil];
// Updating the insertTime for the next insert
insertTime = CMTimeAdd(insertTime, videoAsset.duration);
}
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
// Creating a full path and URL to the exported video
NSString *outputVideoPath = [documentsDirectory stringByAppendingPathComponent:
[NSString stringWithFormat:#"mergeVideo-%d.mov",arc4random() % 1000]];
// NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:
current_name];
NSURL *outptVideoUrl = [NSURL fileURLWithPath:myPathDocs];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mainComposition presetName:AVAssetExportPreset640x480];
// Setting attributes of the exporter
exporter.outputURL=outptVideoUrl;
exporter.outputFileType =AVFileTypeMPEG4; //AVFileTypeQuickTimeMovie;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
//completion(exporter);
[self exportDidFinish:exporter];
// [self exportDidFinish:exporter:assets];
});
}];
this will work fine..
updated #brenoxp's answer for Swift 5.1
func merge(arrayVideos:[AVAsset], completion:#escaping (URL?, Error?) -> ()) {
let mainComposition = AVMutableComposition()
let compositionVideoTrack = mainComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid)
compositionVideoTrack?.preferredTransform = CGAffineTransform(rotationAngle: .pi / 2)
let soundtrackTrack = mainComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid)
var insertTime = CMTime.zero
for videoAsset in arrayVideos {
try! compositionVideoTrack?.insertTimeRange(CMTimeRangeMake(start: .zero, duration: videoAsset.duration), of: videoAsset.tracks(withMediaType: .video)[0], at: insertTime)
try! soundtrackTrack?.insertTimeRange(CMTimeRangeMake(start: .zero, duration: videoAsset.duration), of: videoAsset.tracks(withMediaType: .audio)[0], at: insertTime)
insertTime = CMTimeAdd(insertTime, videoAsset.duration)
}
let outputFileURL = URL(fileURLWithPath: NSTemporaryDirectory() + "merge.mp4")
let fileManager = FileManager()
try? fileManager.removeItem(at: outputFileURL)
let exporter = AVAssetExportSession(asset: mainComposition, presetName: AVAssetExportPresetHighestQuality)
exporter?.outputURL = outputFileURL
exporter?.outputFileType = AVFileType.mp4
exporter?.shouldOptimizeForNetworkUse = true
exporter?.exportAsynchronously {
if let url = exporter?.outputURL{
completion(url, nil)
}
if let error = exporter?.error {
completion(nil, error)
}
}
}
There is a great example project on Git Hub that is a really good starting point for how to do this in a more reusable way within an app
https://github.com/khoavd-dev/MergeVideos/blob/master/MergeVideos/VideoManager/KVVideoManager.swift
I have to do "slow motion" in a video file along with audio, in-between some frames and need to store the ramped video as a new video.
Ref: http://www.youtube.com/watch?v=BJ3_xMGzauk (watch from 0 to 10s)
From my analysis, I've found that AVFoundation framework can be helpful.
Ref:
http://developer.apple.com/library/ios/#documentation/AudioVideo/Conceptual/AVFoundationPG/Articles/00_Introduction.html
Copy and pasted from the above link:
"
Editing
AV Foundation uses compositions to create new assets from existing pieces of media (typically, one or more video and audio tracks). You use a mutable composition to add and remove tracks, and adjust their temporal orderings. You can also set the relative volumes and ramping of audio tracks; and set the opacity, and opacity ramps, of video tracks. A composition is an assemblage of pieces of media held in memory. When you export a composition using an export session, it's collapsed to a file.
On iOS 4.1 and later, you can also create an asset from media such as sample buffers or still images using an asset writer.
"
Questions:
Can I do " slow motion " the video/audio file using the AVFoundation framework ? Or Is there any other package available? If i want to handle audio and video separately, please guide me how to do?
Update :: Code For AV Export Session :
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *outputURL = paths[0];
NSFileManager *manager = [NSFileManager defaultManager];
[manager createDirectoryAtPath:outputURL withIntermediateDirectories:YES attributes:nil error:nil];
outputURL = [outputURL stringByAppendingPathComponent:#"output.mp4"];
// Remove Existing File
[manager removeItemAtPath:outputURL error:nil];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:self.inputAsset presetName:AVAssetExportPresetLowQuality];
exportSession.outputURL = [NSURL fileURLWithPath:outputURL]; // output path;
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
[exportSession exportAsynchronouslyWithCompletionHandler:^(void) {
if (exportSession.status == AVAssetExportSessionStatusCompleted) {
[self writeVideoToPhotoLibrary:[NSURL fileURLWithPath:outputURL]];
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
[library writeVideoAtPathToSavedPhotosAlbum:[NSURL fileURLWithPath:outputURL] completionBlock:^(NSURL *assetURL, NSError *error){
if (error) {
NSLog(#"Video could not be saved");
}
}];
} else {
NSLog(#"error: %#", [exportSession error]);
}
}];
You could scale video using AVFoundation and CoreMedia frameworks.
Take a look at the AVMutableCompositionTrack method:
- (void)scaleTimeRange:(CMTimeRange)timeRange toDuration:(CMTime)duration;
Sample:
AVURLAsset* videoAsset = nil; //self.inputAsset;
//create mutable composition
AVMutableComposition *mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
NSError *videoInsertError = nil;
BOOL videoInsertResult = [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero
error:&videoInsertError];
if (!videoInsertResult || nil != videoInsertError) {
//handle error
return;
}
//slow down whole video by 2.0
double videoScaleFactor = 2.0;
CMTime videoDuration = videoAsset.duration;
[compositionVideoTrack scaleTimeRange:CMTimeRangeMake(kCMTimeZero, videoDuration)
toDuration:CMTimeMake(videoDuration.value*videoScaleFactor, videoDuration.timescale)];
//export
AVAssetExportSession* assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetLowQuality];
(Probably audio track from videoAsset should also be added to mixComposition)
Slower + Faster with or without audio track
I have tried and able to Slower the asset.
compositionVideoTrack?.scaleTimeRange(timeRange, toDuration: scaledVideoDuration) did the trick.
I made a class which will help you to generate a slower video from AVAsset.
+ point is you can also make it faster and another + point is it will handle the audio too.
Here is my custom class sample:
import UIKit
import AVFoundation
enum SpeedoMode {
case Slower
case Faster
}
class VSVideoSpeeder: NSObject {
/// Singleton instance of `VSVideoSpeeder`
static var shared: VSVideoSpeeder = {
return VSVideoSpeeder()
}()
/// Range is b/w 1x, 2x and 3x. Will not happen anything if scale is out of range. Exporter will be nil in case url is invalid or unable to make asset instance.
func scaleAsset(fromURL url: URL, by scale: Int64, withMode mode: SpeedoMode, completion: #escaping (_ exporter: AVAssetExportSession?) -> Void) {
/// Check the valid scale
if scale < 1 || scale > 3 {
/// Can not proceed, Invalid range
completion(nil)
return
}
/// Asset
let asset = AVAsset(url: url)
/// Video Tracks
let videoTracks = asset.tracks(withMediaType: AVMediaType.video)
if videoTracks.count == 0 {
/// Can not find any video track
completion(nil)
return
}
/// Get the scaled video duration
let scaledVideoDuration = (mode == .Faster) ? CMTimeMake(asset.duration.value / scale, asset.duration.timescale) : CMTimeMake(asset.duration.value * scale, asset.duration.timescale)
let timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration)
/// Video track
let videoTrack = videoTracks.first!
let mixComposition = AVMutableComposition()
let compositionVideoTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: kCMPersistentTrackID_Invalid)
/// Audio Tracks
let audioTracks = asset.tracks(withMediaType: AVMediaType.audio)
if audioTracks.count > 0 {
/// Use audio if video contains the audio track
let compositionAudioTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: kCMPersistentTrackID_Invalid)
/// Audio track
let audioTrack = audioTracks.first!
do {
try compositionAudioTrack?.insertTimeRange(timeRange, of: audioTrack, at: kCMTimeZero)
compositionAudioTrack?.scaleTimeRange(timeRange, toDuration: scaledVideoDuration)
} catch _ {
/// Ignore audio error
}
}
do {
try compositionVideoTrack?.insertTimeRange(timeRange, of: videoTrack, at: kCMTimeZero)
compositionVideoTrack?.scaleTimeRange(timeRange, toDuration: scaledVideoDuration)
/// Keep original transformation
compositionVideoTrack?.preferredTransform = videoTrack.preferredTransform
/// Initialize Exporter now
let outputFileURL = URL(fileURLWithPath: "/Users/thetiger/Desktop/scaledVideo.mov")
/// Note:- Please use directory path if you are testing with device.
if FileManager.default.fileExists(atPath: outputFileURL.absoluteString) {
try FileManager.default.removeItem(at: outputFileURL)
}
let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)
exporter?.outputURL = outputFileURL
exporter?.outputFileType = AVFileType.mov
exporter?.shouldOptimizeForNetworkUse = true
exporter?.exportAsynchronously(completionHandler: {
completion(exporter)
})
} catch let error {
print(error.localizedDescription)
completion(nil)
return
}
}
}
I took 1x, 2x and 3x as a valid scale. Class contains the proper validation and handling. Below is the sample of how to use this function.
let url = Bundle.main.url(forResource: "1", withExtension: "mp4")!
VSVideoSpeeder.shared.scaleAsset(fromURL: url, by: 3, withMode: SpeedoMode.Slower) { (exporter) in
if let exporter = exporter {
switch exporter.status {
case .failed: do {
print(exporter.error?.localizedDescription ?? "Error in exporting..")
}
case .completed: do {
print("Scaled video has been generated successfully!")
}
case .unknown: break
case .waiting: break
case .exporting: break
case .cancelled: break
}
}
else {
/// Error
print("Exporter is not initialized.")
}
}
This line will handle the audio scaling
compositionAudioTrack?.scaleTimeRange(timeRange, toDuration: scaledVideoDuration)
I have achieved on adding slow motion to video including audio as well with proper output orientation.
- (void)SlowMotion:(NSURL *)URl
{
AVURLAsset* videoAsset = [AVURLAsset URLAssetWithURL:URl options:nil]; //self.inputAsset;
AVAsset *currentAsset = [AVAsset assetWithURL:URl];
AVAssetTrack *vdoTrack = [[currentAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
//create mutable composition
AVMutableComposition *mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
NSError *videoInsertError = nil;
BOOL videoInsertResult = [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero
error:&videoInsertError];
if (!videoInsertResult || nil != videoInsertError) {
//handle error
return;
}
NSError *audioInsertError =nil;
BOOL audioInsertResult =[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
atTime:kCMTimeZero
error:&audioInsertError];
if (!audioInsertResult || nil != audioInsertError) {
//handle error
return;
}
CMTime duration =kCMTimeZero;
duration=CMTimeAdd(duration, currentAsset.duration);
//slow down whole video by 2.0
double videoScaleFactor = 2.0;
CMTime videoDuration = videoAsset.duration;
[compositionVideoTrack scaleTimeRange:CMTimeRangeMake(kCMTimeZero, videoDuration)
toDuration:CMTimeMake(videoDuration.value*videoScaleFactor, videoDuration.timescale)];
[compositionAudioTrack scaleTimeRange:CMTimeRangeMake(kCMTimeZero, videoDuration)
toDuration:CMTimeMake(videoDuration.value*videoScaleFactor, videoDuration.timescale)];
[compositionVideoTrack setPreferredTransform:vdoTrack.preferredTransform];
NSArray *dirPaths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *docsDir = [dirPaths objectAtIndex:0];
NSString *outputFilePath = [docsDir stringByAppendingPathComponent:[NSString stringWithFormat:#"slowMotion.mov"]];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
NSURL *_filePath = [NSURL fileURLWithPath:outputFilePath];
//export
AVAssetExportSession* assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetLowQuality];
assetExport.outputURL=_filePath;
assetExport.outputFileType = AVFileTypeQuickTimeMovie;
exporter.shouldOptimizeForNetworkUse = YES;
[assetExport exportAsynchronouslyWithCompletionHandler:^
{
switch ([assetExport status]) {
case AVAssetExportSessionStatusFailed:
{
NSLog(#"Export session faiied with error: %#", [assetExport error]);
dispatch_async(dispatch_get_main_queue(), ^{
// completion(nil);
});
}
break;
case AVAssetExportSessionStatusCompleted:
{
NSLog(#"Successful");
NSURL *outputURL = assetExport.outputURL;
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputURL]) {
[self writeExportedVideoToAssetsLibrary:outputURL];
}
dispatch_async(dispatch_get_main_queue(), ^{
// completion(_filePath);
});
}
break;
default:
break;
}
}];
}
- (void)writeExportedVideoToAssetsLibrary :(NSURL *)url {
NSURL *exportURL = url;
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:exportURL]) {
[library writeVideoAtPathToSavedPhotosAlbum:exportURL completionBlock:^(NSURL *assetURL, NSError *error){
dispatch_async(dispatch_get_main_queue(), ^{
if (error) {
UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:[error localizedDescription]
message:[error localizedRecoverySuggestion]
delegate:nil
cancelButtonTitle:#"OK"
otherButtonTitles:nil];
[alertView show];
}
if(!error)
{
// [activityView setHidden:YES];
UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:#"Sucess"
message:#"video added to gallery successfully"
delegate:nil
cancelButtonTitle:#"OK"
otherButtonTitles:nil];
[alertView show];
}
#if !TARGET_IPHONE_SIMULATOR
[[NSFileManager defaultManager] removeItemAtURL:exportURL error:nil];
#endif
});
}];
} else {
NSLog(#"Video could not be exported to assets library.");
}
}
I would extract all frames from initial video using ffmpeg and then collect together using AVAssetWriter but with lower frame rate. For getting more fulid slow motion maybe you will need to apply some blur effect, or even generate frame between existing, which will be mix from two frames.
An example in swift :
I
var asset: AVAsset?
func configureAssets(){
let videoAsset = AVURLAsset(url: Bundle.main.url(forResource: "sample", withExtension: "m4v")!)
let audioAsset = AVURLAsset(url: Bundle.main.url(forResource: "sample", withExtension: "m4a")!)
// let audioAsset2 = AVURLAsset(url: Bundle.main.url(forResource: "audio2", withExtension: "m4a")!)
let comp = AVMutableComposition()
let videoAssetSourceTrack = videoAsset.tracks(withMediaType: AVMediaTypeVideo).first! as AVAssetTrack
let audioAssetSourceTrack = videoAsset.tracks(withMediaType: AVMediaTypeAudio).first! as AVAssetTrack
// let audioAssetSourceTrack2 = audioAsset2.tracks(withMediaType: AVMediaTypeAudio).first! as AVAssetTrack
let videoCompositionTrack = comp.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
let audioCompositionTrack = comp.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
do {
try videoCompositionTrack.insertTimeRange(
CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(9 , 600)),
of: videoAssetSourceTrack,
at: kCMTimeZero)
try audioCompositionTrack.insertTimeRange(
CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(9, 600)),
of: audioAssetSourceTrack,
at: kCMTimeZero)
//
// try audioCompositionTrack.insertTimeRange(
// CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(3, 600)),
// of: audioAssetSourceTrack2,
// at: CMTimeMakeWithSeconds(7, 600))
let videoScaleFactor = Int64(2.0)
let videoDuration: CMTime = videoAsset.duration
videoCompositionTrack.scaleTimeRange(CMTimeRangeMake(kCMTimeZero, videoDuration), toDuration: CMTimeMake(videoDuration.value * videoScaleFactor, videoDuration.timescale))
audioCompositionTrack.scaleTimeRange(CMTimeRangeMake(kCMTimeZero, videoDuration), toDuration: CMTimeMake(videoDuration.value * videoScaleFactor, videoDuration.timescale))
videoCompositionTrack.preferredTransform = videoAssetSourceTrack.preferredTransform
}catch { print(error) }
asset = comp
}
II
func createFileFromAsset(_ asset: AVAsset){
let documentsDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] as URL
let filePath = documentsDirectory.appendingPathComponent("rendered-audio.m4v")
deleteFile(filePath)
if let exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetLowQuality){
exportSession.canPerformMultiplePassesOverSourceMediaData = true
exportSession.outputURL = filePath
exportSession.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration)
exportSession.outputFileType = AVFileTypeQuickTimeMovie
exportSession.exportAsynchronously {
_ in
print("finished: \(filePath) : \(exportSession.status.rawValue) ")
}
}
}
func deleteFile(_ filePath:URL) {
guard FileManager.default.fileExists(atPath: filePath.path) else {
return
}
do {
try FileManager.default.removeItem(atPath: filePath.path)
}catch{
fatalError("Unable to delete file: \(error) : \(#function).")
}
}
Swift 5
Here is #TheTiger's code converted to SwiftUI:
import UIKit
import AVFoundation
enum SpeedoMode {
case Slower
case Faster
}
class VSVideoSpeeder: NSObject {
/// Singleton instance of `VSVideoSpeeder`
static var shared: VSVideoSpeeder = {
return VSVideoSpeeder()
}()
/// Range is b/w 1x, 2x and 3x. Will not happen anything if scale is out of range. Exporter will be nil in case url is invalid or unable to make asset instance.
func scaleAsset(fromURL url: URL, by scale: Int64, withMode mode: SpeedoMode, completion: #escaping (_ exporter: AVAssetExportSession?) -> Void) {
/// Check the valid scale
if scale < 1 || scale > 3 {
/// Can not proceed, Invalid range
completion(nil)
return
}
/// Asset
let asset = AVAsset(url: url)
/// Video Tracks
let videoTracks = asset.tracks(withMediaType: AVMediaType.video)
if videoTracks.count == 0 {
/// Can not find any video track
completion(nil)
return
}
/// Get the scaled video duration
let scaledVideoDuration = (mode == .Faster) ? CMTimeMake(value: asset.duration.value / scale, timescale: asset.duration.timescale) : CMTimeMake(value: asset.duration.value * scale, timescale: asset.duration.timescale)
let timeRange = CMTimeRangeMake(start: CMTime.zero, duration: asset.duration)
/// Video track
let videoTrack = videoTracks.first!
let mixComposition = AVMutableComposition()
let compositionVideoTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: kCMPersistentTrackID_Invalid)
/// Audio Tracks
let audioTracks = asset.tracks(withMediaType: AVMediaType.audio)
if audioTracks.count > 0 {
/// Use audio if video contains the audio track
let compositionAudioTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: kCMPersistentTrackID_Invalid)
/// Audio track
let audioTrack = audioTracks.first!
do {
try compositionAudioTrack?.insertTimeRange(timeRange, of: audioTrack, at: CMTime.zero)
compositionAudioTrack?.scaleTimeRange(timeRange, toDuration: scaledVideoDuration)
} catch _ {
/// Ignore audio error
}
}
do {
try compositionVideoTrack?.insertTimeRange(timeRange, of: videoTrack, at: CMTime.zero)
compositionVideoTrack?.scaleTimeRange(timeRange, toDuration: scaledVideoDuration)
/// Keep original transformation
compositionVideoTrack?.preferredTransform = videoTrack.preferredTransform
/// Initialize Exporter now
let outputFileURL = URL(fileURLWithPath: "/Users/thetiger/Desktop/scaledVideo.mov")
/// Note:- Please use directory path if you are testing with device.
if FileManager.default.fileExists(atPath: outputFileURL.absoluteString) {
try FileManager.default.removeItem(at: outputFileURL)
}
let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)
exporter?.outputURL = outputFileURL
exporter?.outputFileType = AVFileType.mov
exporter?.shouldOptimizeForNetworkUse = true
exporter?.exportAsynchronously(completionHandler: {
completion(exporter)
})
} catch let error {
print(error.localizedDescription)
completion(nil)
return
}
}
}
}
With the same use case:
let url = Bundle.main.url(forResource: "1", withExtension: "mp4")!
VSVideoSpeeder.shared.scaleAsset(fromURL: url, by: 3, withMode: SpeedoMode.Slower) { (exporter) in
if let exporter = exporter {
switch exporter.status {
case .failed: do {
print(exporter.error?.localizedDescription ?? "Error in exporting..")
}
case .completed: do {
print("Scaled video has been generated successfully!")
}
case .unknown: break
case .waiting: break
case .exporting: break
case .cancelled: break
}
}
else {
/// Error
print("Exporter is not initialized.")
}
}
Creating "Slow motion" video in iOS swift is not easy, that I came across many "slow motion" that came to know not working or some of the codes in them are depreciated. And so I finally figured a way to make slow motion in Swift.
note: This code can be used for 120fps are greater than that too.
You can make audio in slow motion in the same way I did
Here is the "code snippet I created for achieving slow motion"
func slowMotion(pathUrl: URL) {
let videoAsset = AVURLAsset.init(url: pathUrl, options: nil)
let currentAsset = AVAsset.init(url: pathUrl)
let vdoTrack = currentAsset.tracks(withMediaType: .video)[0]
let mixComposition = AVMutableComposition()
let compositionVideoTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid)
let videoInsertError: Error? = nil
var videoInsertResult = false
do {
try compositionVideoTrack?.insertTimeRange(
CMTimeRangeMake(start: .zero, duration: videoAsset.duration),
of: videoAsset.tracks(withMediaType: .video)[0],
at: .zero)
videoInsertResult = true
} catch let videoInsertError {
}
if !videoInsertResult || videoInsertError != nil {
//handle error
return
}
var duration: CMTime = .zero
duration = CMTimeAdd(duration, currentAsset.duration)
//MARK: You see this constant (videoScaleFactor) this helps in achieving the slow motion that you wanted. This increases the time scale of the video that makes slow motion
// just increase the videoScaleFactor value in order to play video in higher frames rates(more slowly)
let videoScaleFactor = 2.0
let videoDuration = videoAsset.duration
compositionVideoTrack?.scaleTimeRange(
CMTimeRangeMake(start: .zero, duration: videoDuration),
toDuration: CMTimeMake(value: videoDuration.value * Int64(videoScaleFactor), timescale: videoDuration.timescale))
compositionVideoTrack?.preferredTransform = vdoTrack.preferredTransform
let dirPaths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).map(\.path)
let docsDir = dirPaths[0]
let outputFilePath = URL(fileURLWithPath: docsDir).appendingPathComponent("slowMotion\(UUID().uuidString).mp4").path
if FileManager.default.fileExists(atPath: outputFilePath) {
do {
try FileManager.default.removeItem(atPath: outputFilePath)
} catch {
}
}
let filePath = URL(fileURLWithPath: outputFilePath)
let assetExport = AVAssetExportSession(
asset: mixComposition,
presetName: AVAssetExportPresetHighestQuality)
assetExport?.outputURL = filePath
assetExport?.outputFileType = .mp4
assetExport?.exportAsynchronously(completionHandler: {
switch assetExport?.status {
case .failed:
print("asset output media url = \(String(describing: assetExport?.outputURL))")
print("Export session faiied with error: \(String(describing: assetExport?.error))")
DispatchQueue.main.async(execute: {
// completion(nil);
})
case .completed:
print("Successful")
let outputURL = assetExport!.outputURL
print("url path = \(String(describing: outputURL))")
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: outputURL!)
}) { saved, error in
if saved {
print("video successfully saved in photos gallery view video in photos gallery")
}
if (error != nil) {
print("error in saing video \(String(describing: error?.localizedDescription))")
}
}
DispatchQueue.main.async(execute: {
// completion(_filePath);
})
case .none:
break
case .unknown:
break
case .waiting:
break
case .exporting:
break
case .cancelled:
break
case .some(_):
break
}
})
}