I have requirement some times mute video send enduser so I required remove audio file form the video.
Passing video for single process for mute.
// Remove audio from video
- (void) RemoveAudioFromVideo:(NSString *)VideoLocalPath {
NSString * initPath1 = VideoLocalPath;
AVMutableComposition *composition = [AVMutableComposition composition];
NSString *inputVideoPath = initPath1;
AVURLAsset * sourceAsset = [[AVURLAsset alloc] initWithURL:[NSURL fileURLWithPath:inputVideoPath] options:nil];
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
BOOL ok = NO;
AVAssetTrack * sourceVideoTrack = [[sourceAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
CMTimeRange x = CMTimeRangeMake(kCMTimeZero, [sourceAsset duration]);
ok = [compositionVideoTrack insertTimeRange:x ofTrack:sourceVideoTrack atTime:kCMTimeZero error:nil];
if([[NSFileManager defaultManager] fileExistsAtPath:initPath1]) {
[[NSFileManager defaultManager] removeItemAtPath:initPath1 error:nil];
}
NSURL *url = [[NSURL alloc] initFileURLWithPath: initPath1];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
NSLog(#";%#", [exporter supportedFileTypes]);
exporter.outputFileType = #"com.apple.quicktime-movie";
[exporter exportAsynchronouslyWithCompletionHandler:^{
[self savefinalVideoFileToDocuments:exporter.outputURL];
}];
}
// Write final Video
-(void)savefinalVideoFileToDocuments:(NSURL *)url {
NSString *storePath = [[self applicationCacheDirectory] stringByAppendingPathComponent:#"Videos"];
NSData * movieData = [NSData dataWithContentsOfURL:url];
[movieData writeToFile:storePath atomically:YES];
}
// Directory Path
- (NSString *)applicationCacheDirectory {
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = ([paths count] > 0) ? [paths objectAtIndex:0] : nil;
return documentsDirectory;
}
In Swift 3,
func removeAudioFromVideo(_ videoPath: String) {
let initPath1: String = videoPath
let composition = AVMutableComposition()
let inputVideoPath: String = initPath1
let sourceAsset = AVURLAsset(url: URL(fileURLWithPath: inputVideoPath), options: nil)
let compositionVideoTrack: AVMutableCompositionTrack? = composition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
let sourceVideoTrack: AVAssetTrack? = sourceAsset.tracks(withMediaType: AVMediaTypeVideo)[0]
let x: CMTimeRange = CMTimeRangeMake(kCMTimeZero, sourceAsset.duration)
_ = try? compositionVideoTrack!.insertTimeRange(x, of: sourceVideoTrack!, at: kCMTimeZero)
if FileManager.default.fileExists(atPath: initPath1) {
try? FileManager.default.removeItem(atPath: initPath1)
}
let url = URL(fileURLWithPath: initPath1)
let exporter = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality)
exporter?.outputURL = url
exporter?.outputFileType = "com.apple.quicktime-movie"
exporter?.exportAsynchronously(completionHandler: {() -> Void in
self.saveFinalVideoFile(toDocuments: exporter!.outputURL!)
})
}
func saveFinalVideoFile(toDocuments url: URL) {
let fileURL = try! FileManager.default.url(for: .documentDirectory, in: .userDomainMask, appropriateFor: nil, create: false).appendingPathComponent("Videos")
let movieData = try? Data(contentsOf: url)
try? movieData?.write(to: fileURL, options: .atomic)
}
Related
I add watermark on video. I set origin watermark layer (10;10), but after export, watermark is located in the lower left corner. If I increase y, the watermark is moved up. As if y-axis increases from the bottom up.
Can anyone suggest something?
NSString *path = [[NSBundle mainBundle] pathForResource:#"video" ofType:#"mov"];
NSURL *file = [NSURL fileURLWithPath:path];
AVURLAsset *videoAsset = [[AVURLAsset alloc]initWithURL:file options:nil];
self.asset = videoAsset;
AVMutableComposition *mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *clipVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] firstObject];
AVAssetTrack *clipAudioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] firstObject];
if (clipAudioTrack)
{
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:clipAudioTrack
atTime:kCMTimeZero
error:nil];
}
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:clipVideoTrack
atTime:kCMTimeZero
error:nil];
[compositionVideoTrack setPreferredTransform:clipVideoTrack.preferredTransform];
CGSize videoSize = [clipVideoTrack naturalSize];
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
UIImage *myImage = [UIImage imageNamed:#"watermark"];
CALayer *aLayer = [CALayer layer];
CGRect frame = CGRectMake(10, 10, CGImageGetWidth(myImage.CGImage), CGImageGetHeight(myImage.CGImage));
aLayer.frame = frame;
aLayer.contents = (id)myImage.CGImage;
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:aLayer];
AVMutableVideoComposition *videoComposition=[AVMutableVideoComposition videoComposition] ;
videoComposition.frameDuration=CMTimeMake(1, 30);
videoComposition.renderSize = videoSize;
videoComposition.animationTool=[AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]);
AVAssetTrack *videoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] firstObject];
AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
instruction.layerInstructions = #[layerInstruction];
videoComposition.instructions = #[instruction];
NSString *documentsDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) firstObject];
NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init];
[dateFormatter setDateFormat:#"yyyy-MM-dd_HH-mm-ss"];
NSString *destinationPath = [documentsDirectory stringByAppendingFormat:#"/utput_%#.mov", [dateFormatter stringFromDate:[NSDate date]]];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality];
exportSession.videoComposition = videoComposition;
exportSession.outputURL = [NSURL fileURLWithPath:destinationPath];
exportSession.outputFileType = AVFileTypeMPEG4;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch (exportSession.status)
{
case AVAssetExportSessionStatusCompleted:
NSLog(#"Export OK");
if (UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(destinationPath)) {
UISaveVideoAtPathToSavedPhotosAlbum(destinationPath, self, nil, nil);
}
break;
case AVAssetExportSessionStatusFailed:
NSLog (#"AVAssetExportSessionStatusFailed: %#", exportSession.error);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export Cancelled");
break;
default:
break;
}
}];
I the past I had a lot of problems to find the correct anchor point to add a water mark. I'll post my code. Keep in mind that it generates a video with a watermark on the bottom-right corner (5 pixel margin).
Pay attention at:
logoImageView.frame = CGRectMake(finalSize.width - ((logoImageView.frame.size.width/2) + 5),
finalSize.height - ((logoImageView.frame.size.height/2) + 5),
logoImageView.frame.size.width/2,
logoImageView.frame.size.height/2)
Final code:
final class QUWatermarkManager {
static func watermark(video videoAsset:AVAsset, imageLayer : CALayer, saveToLibrary flag : Bool, completion : ((status : AVAssetExportSessionStatus!, session: AVAssetExportSession!, outputURL : NSURL!) -> ())?) {
var finalSize = CGSizeMake(imageLayer.frame.size.width, imageLayer.frame.size.width)
var computedVideoSize = finalSize.width
while (computedVideoSize%16>0) { // find the right resolution that can be divided by 16
computedVideoSize++;
}
finalSize = CGSizeMake(computedVideoSize,computedVideoSize)
let clipVideoTrack = videoAsset.tracksWithMediaType(AVMediaTypeVideo).first! as AVAssetTrack
let videoSize = clipVideoTrack.naturalSize
let scale = finalSize.width / videoSize.width
let composition = AVMutableComposition()
composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
let videoComposition = AVMutableVideoComposition()
videoComposition.renderSize = CGSizeMake(finalSize.width,finalSize.width)
videoComposition.frameDuration = CMTimeMake(1, 30)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(180, 30))
let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: clipVideoTrack)
let t4 = CGAffineTransformScale(clipVideoTrack.preferredTransform, scale, scale)
layerInstruction.setTransform(t4, atTime: kCMTimeZero)
instruction.layerInstructions = [layerInstruction]
videoComposition.instructions = [instruction]
// Parent Layer
let parentLayer = CALayer()
parentLayer.frame = CGRectMake(0, 0, finalSize.width, finalSize.height)
parentLayer.geometryFlipped = true
//Video Layer
let videoLayer = CALayer()
videoLayer.frame = CGRectMake(0, 0, finalSize.width, finalSize.height)
videoLayer.geometryFlipped = true
let logoImageView = UIImageView(image: UIImage(named: "logo_nav2"))
logoImageView.alpha = 0.5
logoImageView.contentMode = .ScaleAspectFit
logoImageView.frame = CGRectMake(finalSize.width - ((logoImageView.frame.size.width/2) + 5),
finalSize.height - ((logoImageView.frame.size.height/2) + 5),
logoImageView.frame.size.width/2,
logoImageView.frame.size.height/2)
parentLayer.addSublayer(videoLayer)
parentLayer.addSublayer(imageLayer)
parentLayer.addSublayer(logoImageView.layer)
videoComposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, inLayer: parentLayer)
// 4 - Get path
let documentDirectory = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0]
let dateFormatter = NSDateFormatter()
dateFormatter.dateStyle = .LongStyle
dateFormatter.timeStyle = .FullStyle
let date = dateFormatter.stringFromDate(NSDate())
let savePath = documentDirectory.stringByAppendingPathComponent("watermarkVideo-\(date).mov")
let url = NSURL(fileURLWithPath: savePath)
let exporter = AVAssetExportSession(asset: videoAsset, presetName: AVAssetExportPresetHighestQuality)!
exporter.videoComposition = videoComposition
exporter.outputURL = url
exporter.outputFileType = AVFileTypeMPEG4
exporter.shouldOptimizeForNetworkUse = true
exporter.canPerformMultiplePassesOverSourceMediaData = true
exporter.exportAsynchronouslyWithCompletionHandler() {
dispatch_async(dispatch_get_main_queue(), { () -> Void in
if exporter.status == AVAssetExportSessionStatus.Completed {
let outputURL = exporter.outputURL
if flag {
// Save to library
let library = ALAssetsLibrary()
if library.videoAtPathIsCompatibleWithSavedPhotosAlbum(outputURL) {
library.writeVideoAtPathToSavedPhotosAlbum(outputURL,
completionBlock: { (assetURL:NSURL!, error:NSError!) -> Void in
completion!(status: .Completed, session: exporter, outputURL: outputURL)
})
}
} else {
// Dont svae to library
completion!(status: .Completed, session: exporter, outputURL: outputURL)
}
} else {
// Error
completion!(status: exporter.status, session: exporter, outputURL: exporter.outputURL)
}
})
}
}
}
I have one query in my application, for that i want to convert my AVI formatted video to MP4 movie format, so is there any way to do this programatically.
Any code snippet will be appreciated.
You need to use AVAssetExportSession to convert videos to .mp4 format, below method convert .avi format videos to .mp4.
Check the line exportSession.outputFileType = AVFileTypeMPEG4; it specify the output format of the video.
Here inputURL is an url of video which needs to be converted and outputURL will be the final destination of video.
One more thing don't forget to specify .mp4 extension in outputURL video file.
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *filePath = [documentsDirectory stringByAppendingPathComponent:#"MyVideo.mp4"];
NSURL *outputURL = [NSURL fileURLWithPath:filePath];
[self convertVideoToLowQuailtyWithInputURL:localUrl outputURL:outputURL handler:^(AVAssetExportSession *exportSession)
{
if (exportSession.status == AVAssetExportSessionStatusCompleted) {
// Video conversation completed
}
}];
- (void)convertVideoToLowQuailtyWithInputURL:(NSURL*)inputURL outputURL:(NSURL*)outputURL handler:(void (^)(AVAssetExportSession*))handler {
[[NSFileManager defaultManager] removeItemAtURL:outputURL error:nil];
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:inputURL options:nil];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetPassthrough];
exportSession.outputURL = outputURL;
exportSession.outputFileType = AVFileTypeMPEG4;
[exportSession exportAsynchronouslyWithCompletionHandler:^(void) {
handler(exportSession);
}];
}
Swift 4.2
An updated version of the accepted answer in Swift 4.2.
let paths = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)
let documentsDirectory = paths[0]
let filePath = URL(fileURLWithPath: documentsDirectory).appendingPathComponent("MyVideo.mp4").absoluteString
let outputURL = URL(fileURLWithPath: filePath)
convertVideoToLowQuailty(withInputURL: inputUrl, outputURL: outputURL, handler: { exportSession in
if exportSession?.status == .completed {
// Video conversation completed
}
})
func convertVideoToLowQuailty(withInputURL inputURL: URL?, outputURL: URL?, handler: #escaping (AVAssetExportSession?) -> Void) {
if let anURL = outputURL {
try? FileManager.default.removeItem(at: anURL)
}
var asset: AVURLAsset? = nil
if let anURL = inputURL {
asset = AVURLAsset(url: anURL, options: nil)
}
var exportSession: AVAssetExportSession? = nil
if let anAsset = asset {
exportSession = AVAssetExportSession(asset: anAsset, presetName: AVAssetExportPresetPassthrough)
}
exportSession?.outputURL = outputURL
exportSession?.outputFileType = .mp4
exportSession?.exportAsynchronously(completionHandler: {
handler(exportSession)
})
}
Took help from an online conversion tool.
I'm developing iOS App in objective C.
As per my requirement, I want to split video in multiple parts.
Suppose I have a video of 50 seconds and I want to divide it in 5 parts of 10 seconds each.
Please advice me if you guys have any idea about it.
Swift 5 version of #MilanPatel answer, that i found very useful :
func splitSecondVideo() {
guard did<splitdivide else {return}
guard let videourl = URL(string: "YOUR_VIDEO_URL_HERE") else {return}
let asset = AVURLAsset(url: videourl, options: nil)
let exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetHighestQuality)
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).map(\.path)
let documentsDirectory = paths[0]
var myPathDocs: String?
var starttime: CMTime
var duration: CMTime
myPathDocs = URL(fileURLWithPath: documentsDirectory).appendingPathComponent("SplitFinalVideo\(did).mov").path
let endt = CMTimeGetSeconds(asset.duration)
print("All Duration : \(endt)")
let divide = CMTimeGetSeconds(asset.duration) / splitdivide
print("All Duration : \(divide)")
starttime = CMTimeMakeWithSeconds(Float64(divide * did), preferredTimescale: 1)
duration = CMTimeMakeWithSeconds(Float64(divide), preferredTimescale: 1)
let fileManager = FileManager()
var error: Error?
if fileManager.fileExists(atPath: myPathDocs ?? "") == true {
do {
try fileManager.removeItem(atPath: myPathDocs ?? "")
} catch {
}
}
exportSession?.outputURL = URL(fileURLWithPath: myPathDocs ?? "")
exportSession?.shouldOptimizeForNetworkUse = true
exportSession?.outputFileType = .mov
// Trim to half duration
let secondrange = CMTimeRangeMake(start: starttime, duration: duration)
exportSession?.timeRange = secondrange
exportSession?.exportAsynchronously(completionHandler: { [self] in
exportDidFinish(exportSession)
did += 1
self.splitSecondVideo()
})
}
func exportDidFinish(_ session: AVAssetExportSession?) {
if session?.status == .completed {
let outputURL = session?.outputURL
print("Before Exported")
saveVideoAtPath(with: outputURL)
}
}
Another update:
session?.status was always coming failed. To solve this issue,
I added:
asset.resourceLoader.setDelegate(self, queue: .main)
and conform to AVAssetResourceLoaderDelegate protocol and it start working. Thanks to this answer by #panychyk.dima
Nice Question.The solution is here...
-(void)splitSecondVideo
{
if (did<splitdivide){
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:_videourl options:nil];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc]initWithAsset:asset presetName:AVAssetExportPresetHighestQuality];
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs;
CMTime starttime;
CMTime duration;
myPathDocs = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"SplitFinalVideo%d.mov",did]];
double endt=CMTimeGetSeconds([asset duration]);
NSLog(#"All Duration : %f",endt);
double divide=CMTimeGetSeconds([asset duration])/splitdivide;
NSLog(#"All Duration : %f",divide);
starttime = CMTimeMakeWithSeconds(divide*did, 1);
duration = CMTimeMakeWithSeconds(divide, 1);
NSFileManager *fileManager=[[NSFileManager alloc]init];
NSError *error;
if ([fileManager fileExistsAtPath:myPathDocs] == YES) {
[fileManager removeItemAtPath:myPathDocs error:&error];
}
exportSession.outputURL = [NSURL fileURLWithPath:myPathDocs];
exportSession.shouldOptimizeForNetworkUse = YES;
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
// Trim to half duration
CMTimeRange secondrange = CMTimeRangeMake(starttime, duration);
exportSession.timeRange = secondrange;
[exportSession exportAsynchronouslyWithCompletionHandler:^(void)
{
[self exportDidFinish:exportSession];
did++;
[self splitSecondVideo];
}];
}}
- (void)exportDidFinish:(AVAssetExportSession*)session {
if (session.status == AVAssetExportSessionStatusCompleted) {
NSURL *outputURL = session.outputURL;
NSLog(#"Before Exported");
[self SaveVideoAtPathWithURL:outputURL];
}}
Where did=0.0 in viewDidLoad method.& splitdivide is the value that you want to create the part of the video.In your question splitdivide=5;
Note : The did & split divide are both the Integer Value.
Hope this is helpful....Enjoy...
In my app, I'm recording small videos and adding them into an NSMutableArray as AVAsset so that i keep record of what has been captured. when the user press a button to merge them, the final result is only the first video taken (example, if three short videos where taken, the final result after merging is only the first video and the others do not appear). my code on iterating in the NSMutableArray and stitching the videos together is here:
if (self.capturedVideos.count != 0) {
//Create AVMutableComposition Object.This object will hold our multiple AVMutableCompositionTrack.
AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];
for (AVAsset *asset in self.capturedVideos) {
//check if the video is the first one captures so that it is placed at time 0.
if ([self.capturedVideos indexOfObject:asset] == 0) {
AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:[[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
previousAsset = asset;
} else{
AVMutableCompositionTrack *track = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[track insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:[[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:previousAsset.duration error:nil];
previousAsset = asset;
}
}
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:
[NSString stringWithFormat:#"mergeVideo-%d.mov",arc4random() % 1000]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
// 5 - Create exporter
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:exporter];
});
}];
}
what's after the for loop is for exporting the video to be saved in camera roll. so where is my mistake? the durations are right (so there is no over lapping). however, i'm doubting in something. There is an instance variable i added after #implementation in braces which is previousAsset which tracks the previous asset added thus knowing where to place the next one. it's of class AVAsset so i didn't initialize it because when i try to it's showing me an error.
previousAsset = [[AVAsset alloc] init];
Swift version
func merge(arrayVideos:[AVAsset], completion:#escaping (_ exporter: AVAssetExportSession) -> ()) -> Void {
let mainComposition = AVMutableComposition()
let compositionVideoTrack = mainComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid)
compositionVideoTrack?.preferredTransform = CGAffineTransform(rotationAngle: .pi / 2)
let soundtrackTrack = mainComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid)
var insertTime = kCMTimeZero
for videoAsset in arrayVideos {
try! compositionVideoTrack?.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: videoAsset.tracks(withMediaType: .video)[0], at: insertTime)
try! soundtrackTrack?.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: videoAsset.tracks(withMediaType: .audio)[0], at: insertTime)
insertTime = CMTimeAdd(insertTime, videoAsset.duration)
}
let outputFileURL = URL(fileURLWithPath: NSTemporaryDirectory() + "merge.mp4")
let fileManager = FileManager()
fileManager.removeItemIfExisted(outputFileURL)
let exporter = AVAssetExportSession(asset: mainComposition, presetName: AVAssetExportPresetHighestQuality)
exporter?.outputURL = outputFileURL
exporter?.outputFileType = AVFileType.mp4
exporter?.shouldOptimizeForNetworkUse = true
exporter?.exportAsynchronously {
DispatchQueue.main.async {
completion(exporter!)
}
}
}
This will work fine
AVMutableComposition *mainComposition = [[AVMutableComposition alloc] init];
AVMutableCompositionTrack *compositionVideoTrack = [mainComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *soundtrackTrack = [mainComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
CMTime insertTime = kCMTimeZero;
for (AVAsset *videoAsset in assets) {
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:insertTime error:nil];
[soundtrackTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:insertTime error:nil];
// Updating the insertTime for the next insert
insertTime = CMTimeAdd(insertTime, videoAsset.duration);
}
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
// Creating a full path and URL to the exported video
NSString *outputVideoPath = [documentsDirectory stringByAppendingPathComponent:
[NSString stringWithFormat:#"mergeVideo-%d.mov",arc4random() % 1000]];
// NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:
current_name];
NSURL *outptVideoUrl = [NSURL fileURLWithPath:myPathDocs];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mainComposition presetName:AVAssetExportPreset640x480];
// Setting attributes of the exporter
exporter.outputURL=outptVideoUrl;
exporter.outputFileType =AVFileTypeMPEG4; //AVFileTypeQuickTimeMovie;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
//completion(exporter);
[self exportDidFinish:exporter];
// [self exportDidFinish:exporter:assets];
});
}];
this will work fine..
updated #brenoxp's answer for Swift 5.1
func merge(arrayVideos:[AVAsset], completion:#escaping (URL?, Error?) -> ()) {
let mainComposition = AVMutableComposition()
let compositionVideoTrack = mainComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid)
compositionVideoTrack?.preferredTransform = CGAffineTransform(rotationAngle: .pi / 2)
let soundtrackTrack = mainComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid)
var insertTime = CMTime.zero
for videoAsset in arrayVideos {
try! compositionVideoTrack?.insertTimeRange(CMTimeRangeMake(start: .zero, duration: videoAsset.duration), of: videoAsset.tracks(withMediaType: .video)[0], at: insertTime)
try! soundtrackTrack?.insertTimeRange(CMTimeRangeMake(start: .zero, duration: videoAsset.duration), of: videoAsset.tracks(withMediaType: .audio)[0], at: insertTime)
insertTime = CMTimeAdd(insertTime, videoAsset.duration)
}
let outputFileURL = URL(fileURLWithPath: NSTemporaryDirectory() + "merge.mp4")
let fileManager = FileManager()
try? fileManager.removeItem(at: outputFileURL)
let exporter = AVAssetExportSession(asset: mainComposition, presetName: AVAssetExportPresetHighestQuality)
exporter?.outputURL = outputFileURL
exporter?.outputFileType = AVFileType.mp4
exporter?.shouldOptimizeForNetworkUse = true
exporter?.exportAsynchronously {
if let url = exporter?.outputURL{
completion(url, nil)
}
if let error = exporter?.error {
completion(nil, error)
}
}
}
There is a great example project on Git Hub that is a really good starting point for how to do this in a more reusable way within an app
https://github.com/khoavd-dev/MergeVideos/blob/master/MergeVideos/VideoManager/KVVideoManager.swift
I have to do "slow motion" in a video file along with audio, in-between some frames and need to store the ramped video as a new video.
Ref: http://www.youtube.com/watch?v=BJ3_xMGzauk (watch from 0 to 10s)
From my analysis, I've found that AVFoundation framework can be helpful.
Ref:
http://developer.apple.com/library/ios/#documentation/AudioVideo/Conceptual/AVFoundationPG/Articles/00_Introduction.html
Copy and pasted from the above link:
"
Editing
AV Foundation uses compositions to create new assets from existing pieces of media (typically, one or more video and audio tracks). You use a mutable composition to add and remove tracks, and adjust their temporal orderings. You can also set the relative volumes and ramping of audio tracks; and set the opacity, and opacity ramps, of video tracks. A composition is an assemblage of pieces of media held in memory. When you export a composition using an export session, it's collapsed to a file.
On iOS 4.1 and later, you can also create an asset from media such as sample buffers or still images using an asset writer.
"
Questions:
Can I do " slow motion " the video/audio file using the AVFoundation framework ? Or Is there any other package available? If i want to handle audio and video separately, please guide me how to do?
Update :: Code For AV Export Session :
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *outputURL = paths[0];
NSFileManager *manager = [NSFileManager defaultManager];
[manager createDirectoryAtPath:outputURL withIntermediateDirectories:YES attributes:nil error:nil];
outputURL = [outputURL stringByAppendingPathComponent:#"output.mp4"];
// Remove Existing File
[manager removeItemAtPath:outputURL error:nil];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:self.inputAsset presetName:AVAssetExportPresetLowQuality];
exportSession.outputURL = [NSURL fileURLWithPath:outputURL]; // output path;
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
[exportSession exportAsynchronouslyWithCompletionHandler:^(void) {
if (exportSession.status == AVAssetExportSessionStatusCompleted) {
[self writeVideoToPhotoLibrary:[NSURL fileURLWithPath:outputURL]];
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
[library writeVideoAtPathToSavedPhotosAlbum:[NSURL fileURLWithPath:outputURL] completionBlock:^(NSURL *assetURL, NSError *error){
if (error) {
NSLog(#"Video could not be saved");
}
}];
} else {
NSLog(#"error: %#", [exportSession error]);
}
}];
You could scale video using AVFoundation and CoreMedia frameworks.
Take a look at the AVMutableCompositionTrack method:
- (void)scaleTimeRange:(CMTimeRange)timeRange toDuration:(CMTime)duration;
Sample:
AVURLAsset* videoAsset = nil; //self.inputAsset;
//create mutable composition
AVMutableComposition *mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
NSError *videoInsertError = nil;
BOOL videoInsertResult = [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero
error:&videoInsertError];
if (!videoInsertResult || nil != videoInsertError) {
//handle error
return;
}
//slow down whole video by 2.0
double videoScaleFactor = 2.0;
CMTime videoDuration = videoAsset.duration;
[compositionVideoTrack scaleTimeRange:CMTimeRangeMake(kCMTimeZero, videoDuration)
toDuration:CMTimeMake(videoDuration.value*videoScaleFactor, videoDuration.timescale)];
//export
AVAssetExportSession* assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetLowQuality];
(Probably audio track from videoAsset should also be added to mixComposition)
Slower + Faster with or without audio track
I have tried and able to Slower the asset.
compositionVideoTrack?.scaleTimeRange(timeRange, toDuration: scaledVideoDuration) did the trick.
I made a class which will help you to generate a slower video from AVAsset.
+ point is you can also make it faster and another + point is it will handle the audio too.
Here is my custom class sample:
import UIKit
import AVFoundation
enum SpeedoMode {
case Slower
case Faster
}
class VSVideoSpeeder: NSObject {
/// Singleton instance of `VSVideoSpeeder`
static var shared: VSVideoSpeeder = {
return VSVideoSpeeder()
}()
/// Range is b/w 1x, 2x and 3x. Will not happen anything if scale is out of range. Exporter will be nil in case url is invalid or unable to make asset instance.
func scaleAsset(fromURL url: URL, by scale: Int64, withMode mode: SpeedoMode, completion: #escaping (_ exporter: AVAssetExportSession?) -> Void) {
/// Check the valid scale
if scale < 1 || scale > 3 {
/// Can not proceed, Invalid range
completion(nil)
return
}
/// Asset
let asset = AVAsset(url: url)
/// Video Tracks
let videoTracks = asset.tracks(withMediaType: AVMediaType.video)
if videoTracks.count == 0 {
/// Can not find any video track
completion(nil)
return
}
/// Get the scaled video duration
let scaledVideoDuration = (mode == .Faster) ? CMTimeMake(asset.duration.value / scale, asset.duration.timescale) : CMTimeMake(asset.duration.value * scale, asset.duration.timescale)
let timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration)
/// Video track
let videoTrack = videoTracks.first!
let mixComposition = AVMutableComposition()
let compositionVideoTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: kCMPersistentTrackID_Invalid)
/// Audio Tracks
let audioTracks = asset.tracks(withMediaType: AVMediaType.audio)
if audioTracks.count > 0 {
/// Use audio if video contains the audio track
let compositionAudioTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: kCMPersistentTrackID_Invalid)
/// Audio track
let audioTrack = audioTracks.first!
do {
try compositionAudioTrack?.insertTimeRange(timeRange, of: audioTrack, at: kCMTimeZero)
compositionAudioTrack?.scaleTimeRange(timeRange, toDuration: scaledVideoDuration)
} catch _ {
/// Ignore audio error
}
}
do {
try compositionVideoTrack?.insertTimeRange(timeRange, of: videoTrack, at: kCMTimeZero)
compositionVideoTrack?.scaleTimeRange(timeRange, toDuration: scaledVideoDuration)
/// Keep original transformation
compositionVideoTrack?.preferredTransform = videoTrack.preferredTransform
/// Initialize Exporter now
let outputFileURL = URL(fileURLWithPath: "/Users/thetiger/Desktop/scaledVideo.mov")
/// Note:- Please use directory path if you are testing with device.
if FileManager.default.fileExists(atPath: outputFileURL.absoluteString) {
try FileManager.default.removeItem(at: outputFileURL)
}
let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)
exporter?.outputURL = outputFileURL
exporter?.outputFileType = AVFileType.mov
exporter?.shouldOptimizeForNetworkUse = true
exporter?.exportAsynchronously(completionHandler: {
completion(exporter)
})
} catch let error {
print(error.localizedDescription)
completion(nil)
return
}
}
}
I took 1x, 2x and 3x as a valid scale. Class contains the proper validation and handling. Below is the sample of how to use this function.
let url = Bundle.main.url(forResource: "1", withExtension: "mp4")!
VSVideoSpeeder.shared.scaleAsset(fromURL: url, by: 3, withMode: SpeedoMode.Slower) { (exporter) in
if let exporter = exporter {
switch exporter.status {
case .failed: do {
print(exporter.error?.localizedDescription ?? "Error in exporting..")
}
case .completed: do {
print("Scaled video has been generated successfully!")
}
case .unknown: break
case .waiting: break
case .exporting: break
case .cancelled: break
}
}
else {
/// Error
print("Exporter is not initialized.")
}
}
This line will handle the audio scaling
compositionAudioTrack?.scaleTimeRange(timeRange, toDuration: scaledVideoDuration)
I have achieved on adding slow motion to video including audio as well with proper output orientation.
- (void)SlowMotion:(NSURL *)URl
{
AVURLAsset* videoAsset = [AVURLAsset URLAssetWithURL:URl options:nil]; //self.inputAsset;
AVAsset *currentAsset = [AVAsset assetWithURL:URl];
AVAssetTrack *vdoTrack = [[currentAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
//create mutable composition
AVMutableComposition *mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
NSError *videoInsertError = nil;
BOOL videoInsertResult = [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero
error:&videoInsertError];
if (!videoInsertResult || nil != videoInsertError) {
//handle error
return;
}
NSError *audioInsertError =nil;
BOOL audioInsertResult =[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
atTime:kCMTimeZero
error:&audioInsertError];
if (!audioInsertResult || nil != audioInsertError) {
//handle error
return;
}
CMTime duration =kCMTimeZero;
duration=CMTimeAdd(duration, currentAsset.duration);
//slow down whole video by 2.0
double videoScaleFactor = 2.0;
CMTime videoDuration = videoAsset.duration;
[compositionVideoTrack scaleTimeRange:CMTimeRangeMake(kCMTimeZero, videoDuration)
toDuration:CMTimeMake(videoDuration.value*videoScaleFactor, videoDuration.timescale)];
[compositionAudioTrack scaleTimeRange:CMTimeRangeMake(kCMTimeZero, videoDuration)
toDuration:CMTimeMake(videoDuration.value*videoScaleFactor, videoDuration.timescale)];
[compositionVideoTrack setPreferredTransform:vdoTrack.preferredTransform];
NSArray *dirPaths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *docsDir = [dirPaths objectAtIndex:0];
NSString *outputFilePath = [docsDir stringByAppendingPathComponent:[NSString stringWithFormat:#"slowMotion.mov"]];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
NSURL *_filePath = [NSURL fileURLWithPath:outputFilePath];
//export
AVAssetExportSession* assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetLowQuality];
assetExport.outputURL=_filePath;
assetExport.outputFileType = AVFileTypeQuickTimeMovie;
exporter.shouldOptimizeForNetworkUse = YES;
[assetExport exportAsynchronouslyWithCompletionHandler:^
{
switch ([assetExport status]) {
case AVAssetExportSessionStatusFailed:
{
NSLog(#"Export session faiied with error: %#", [assetExport error]);
dispatch_async(dispatch_get_main_queue(), ^{
// completion(nil);
});
}
break;
case AVAssetExportSessionStatusCompleted:
{
NSLog(#"Successful");
NSURL *outputURL = assetExport.outputURL;
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputURL]) {
[self writeExportedVideoToAssetsLibrary:outputURL];
}
dispatch_async(dispatch_get_main_queue(), ^{
// completion(_filePath);
});
}
break;
default:
break;
}
}];
}
- (void)writeExportedVideoToAssetsLibrary :(NSURL *)url {
NSURL *exportURL = url;
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:exportURL]) {
[library writeVideoAtPathToSavedPhotosAlbum:exportURL completionBlock:^(NSURL *assetURL, NSError *error){
dispatch_async(dispatch_get_main_queue(), ^{
if (error) {
UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:[error localizedDescription]
message:[error localizedRecoverySuggestion]
delegate:nil
cancelButtonTitle:#"OK"
otherButtonTitles:nil];
[alertView show];
}
if(!error)
{
// [activityView setHidden:YES];
UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:#"Sucess"
message:#"video added to gallery successfully"
delegate:nil
cancelButtonTitle:#"OK"
otherButtonTitles:nil];
[alertView show];
}
#if !TARGET_IPHONE_SIMULATOR
[[NSFileManager defaultManager] removeItemAtURL:exportURL error:nil];
#endif
});
}];
} else {
NSLog(#"Video could not be exported to assets library.");
}
}
I would extract all frames from initial video using ffmpeg and then collect together using AVAssetWriter but with lower frame rate. For getting more fulid slow motion maybe you will need to apply some blur effect, or even generate frame between existing, which will be mix from two frames.
An example in swift :
I
var asset: AVAsset?
func configureAssets(){
let videoAsset = AVURLAsset(url: Bundle.main.url(forResource: "sample", withExtension: "m4v")!)
let audioAsset = AVURLAsset(url: Bundle.main.url(forResource: "sample", withExtension: "m4a")!)
// let audioAsset2 = AVURLAsset(url: Bundle.main.url(forResource: "audio2", withExtension: "m4a")!)
let comp = AVMutableComposition()
let videoAssetSourceTrack = videoAsset.tracks(withMediaType: AVMediaTypeVideo).first! as AVAssetTrack
let audioAssetSourceTrack = videoAsset.tracks(withMediaType: AVMediaTypeAudio).first! as AVAssetTrack
// let audioAssetSourceTrack2 = audioAsset2.tracks(withMediaType: AVMediaTypeAudio).first! as AVAssetTrack
let videoCompositionTrack = comp.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
let audioCompositionTrack = comp.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
do {
try videoCompositionTrack.insertTimeRange(
CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(9 , 600)),
of: videoAssetSourceTrack,
at: kCMTimeZero)
try audioCompositionTrack.insertTimeRange(
CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(9, 600)),
of: audioAssetSourceTrack,
at: kCMTimeZero)
//
// try audioCompositionTrack.insertTimeRange(
// CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(3, 600)),
// of: audioAssetSourceTrack2,
// at: CMTimeMakeWithSeconds(7, 600))
let videoScaleFactor = Int64(2.0)
let videoDuration: CMTime = videoAsset.duration
videoCompositionTrack.scaleTimeRange(CMTimeRangeMake(kCMTimeZero, videoDuration), toDuration: CMTimeMake(videoDuration.value * videoScaleFactor, videoDuration.timescale))
audioCompositionTrack.scaleTimeRange(CMTimeRangeMake(kCMTimeZero, videoDuration), toDuration: CMTimeMake(videoDuration.value * videoScaleFactor, videoDuration.timescale))
videoCompositionTrack.preferredTransform = videoAssetSourceTrack.preferredTransform
}catch { print(error) }
asset = comp
}
II
func createFileFromAsset(_ asset: AVAsset){
let documentsDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] as URL
let filePath = documentsDirectory.appendingPathComponent("rendered-audio.m4v")
deleteFile(filePath)
if let exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetLowQuality){
exportSession.canPerformMultiplePassesOverSourceMediaData = true
exportSession.outputURL = filePath
exportSession.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration)
exportSession.outputFileType = AVFileTypeQuickTimeMovie
exportSession.exportAsynchronously {
_ in
print("finished: \(filePath) : \(exportSession.status.rawValue) ")
}
}
}
func deleteFile(_ filePath:URL) {
guard FileManager.default.fileExists(atPath: filePath.path) else {
return
}
do {
try FileManager.default.removeItem(atPath: filePath.path)
}catch{
fatalError("Unable to delete file: \(error) : \(#function).")
}
}
Swift 5
Here is #TheTiger's code converted to SwiftUI:
import UIKit
import AVFoundation
enum SpeedoMode {
case Slower
case Faster
}
class VSVideoSpeeder: NSObject {
/// Singleton instance of `VSVideoSpeeder`
static var shared: VSVideoSpeeder = {
return VSVideoSpeeder()
}()
/// Range is b/w 1x, 2x and 3x. Will not happen anything if scale is out of range. Exporter will be nil in case url is invalid or unable to make asset instance.
func scaleAsset(fromURL url: URL, by scale: Int64, withMode mode: SpeedoMode, completion: #escaping (_ exporter: AVAssetExportSession?) -> Void) {
/// Check the valid scale
if scale < 1 || scale > 3 {
/// Can not proceed, Invalid range
completion(nil)
return
}
/// Asset
let asset = AVAsset(url: url)
/// Video Tracks
let videoTracks = asset.tracks(withMediaType: AVMediaType.video)
if videoTracks.count == 0 {
/// Can not find any video track
completion(nil)
return
}
/// Get the scaled video duration
let scaledVideoDuration = (mode == .Faster) ? CMTimeMake(value: asset.duration.value / scale, timescale: asset.duration.timescale) : CMTimeMake(value: asset.duration.value * scale, timescale: asset.duration.timescale)
let timeRange = CMTimeRangeMake(start: CMTime.zero, duration: asset.duration)
/// Video track
let videoTrack = videoTracks.first!
let mixComposition = AVMutableComposition()
let compositionVideoTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: kCMPersistentTrackID_Invalid)
/// Audio Tracks
let audioTracks = asset.tracks(withMediaType: AVMediaType.audio)
if audioTracks.count > 0 {
/// Use audio if video contains the audio track
let compositionAudioTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: kCMPersistentTrackID_Invalid)
/// Audio track
let audioTrack = audioTracks.first!
do {
try compositionAudioTrack?.insertTimeRange(timeRange, of: audioTrack, at: CMTime.zero)
compositionAudioTrack?.scaleTimeRange(timeRange, toDuration: scaledVideoDuration)
} catch _ {
/// Ignore audio error
}
}
do {
try compositionVideoTrack?.insertTimeRange(timeRange, of: videoTrack, at: CMTime.zero)
compositionVideoTrack?.scaleTimeRange(timeRange, toDuration: scaledVideoDuration)
/// Keep original transformation
compositionVideoTrack?.preferredTransform = videoTrack.preferredTransform
/// Initialize Exporter now
let outputFileURL = URL(fileURLWithPath: "/Users/thetiger/Desktop/scaledVideo.mov")
/// Note:- Please use directory path if you are testing with device.
if FileManager.default.fileExists(atPath: outputFileURL.absoluteString) {
try FileManager.default.removeItem(at: outputFileURL)
}
let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)
exporter?.outputURL = outputFileURL
exporter?.outputFileType = AVFileType.mov
exporter?.shouldOptimizeForNetworkUse = true
exporter?.exportAsynchronously(completionHandler: {
completion(exporter)
})
} catch let error {
print(error.localizedDescription)
completion(nil)
return
}
}
}
}
With the same use case:
let url = Bundle.main.url(forResource: "1", withExtension: "mp4")!
VSVideoSpeeder.shared.scaleAsset(fromURL: url, by: 3, withMode: SpeedoMode.Slower) { (exporter) in
if let exporter = exporter {
switch exporter.status {
case .failed: do {
print(exporter.error?.localizedDescription ?? "Error in exporting..")
}
case .completed: do {
print("Scaled video has been generated successfully!")
}
case .unknown: break
case .waiting: break
case .exporting: break
case .cancelled: break
}
}
else {
/// Error
print("Exporter is not initialized.")
}
}
Creating "Slow motion" video in iOS swift is not easy, that I came across many "slow motion" that came to know not working or some of the codes in them are depreciated. And so I finally figured a way to make slow motion in Swift.
note: This code can be used for 120fps are greater than that too.
You can make audio in slow motion in the same way I did
Here is the "code snippet I created for achieving slow motion"
func slowMotion(pathUrl: URL) {
let videoAsset = AVURLAsset.init(url: pathUrl, options: nil)
let currentAsset = AVAsset.init(url: pathUrl)
let vdoTrack = currentAsset.tracks(withMediaType: .video)[0]
let mixComposition = AVMutableComposition()
let compositionVideoTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid)
let videoInsertError: Error? = nil
var videoInsertResult = false
do {
try compositionVideoTrack?.insertTimeRange(
CMTimeRangeMake(start: .zero, duration: videoAsset.duration),
of: videoAsset.tracks(withMediaType: .video)[0],
at: .zero)
videoInsertResult = true
} catch let videoInsertError {
}
if !videoInsertResult || videoInsertError != nil {
//handle error
return
}
var duration: CMTime = .zero
duration = CMTimeAdd(duration, currentAsset.duration)
//MARK: You see this constant (videoScaleFactor) this helps in achieving the slow motion that you wanted. This increases the time scale of the video that makes slow motion
// just increase the videoScaleFactor value in order to play video in higher frames rates(more slowly)
let videoScaleFactor = 2.0
let videoDuration = videoAsset.duration
compositionVideoTrack?.scaleTimeRange(
CMTimeRangeMake(start: .zero, duration: videoDuration),
toDuration: CMTimeMake(value: videoDuration.value * Int64(videoScaleFactor), timescale: videoDuration.timescale))
compositionVideoTrack?.preferredTransform = vdoTrack.preferredTransform
let dirPaths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).map(\.path)
let docsDir = dirPaths[0]
let outputFilePath = URL(fileURLWithPath: docsDir).appendingPathComponent("slowMotion\(UUID().uuidString).mp4").path
if FileManager.default.fileExists(atPath: outputFilePath) {
do {
try FileManager.default.removeItem(atPath: outputFilePath)
} catch {
}
}
let filePath = URL(fileURLWithPath: outputFilePath)
let assetExport = AVAssetExportSession(
asset: mixComposition,
presetName: AVAssetExportPresetHighestQuality)
assetExport?.outputURL = filePath
assetExport?.outputFileType = .mp4
assetExport?.exportAsynchronously(completionHandler: {
switch assetExport?.status {
case .failed:
print("asset output media url = \(String(describing: assetExport?.outputURL))")
print("Export session faiied with error: \(String(describing: assetExport?.error))")
DispatchQueue.main.async(execute: {
// completion(nil);
})
case .completed:
print("Successful")
let outputURL = assetExport!.outputURL
print("url path = \(String(describing: outputURL))")
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: outputURL!)
}) { saved, error in
if saved {
print("video successfully saved in photos gallery view video in photos gallery")
}
if (error != nil) {
print("error in saing video \(String(describing: error?.localizedDescription))")
}
}
DispatchQueue.main.async(execute: {
// completion(_filePath);
})
case .none:
break
case .unknown:
break
case .waiting:
break
case .exporting:
break
case .cancelled:
break
case .some(_):
break
}
})
}