I'm trying to play an slow motion video (filmed by the user's iPhone) in an AVPlayer.
I am retrieving the AVAsset with a request on a PHAsset from a picker:
[manager requestAVAssetForVideo:PHAsset
options:videoRequestOptions
resultHandler:^(AVAsset * avasset, AVAudioMix * audioMix, NSDictionary * info) {}];
The problem is once it plays, I get this error:
-[AVComposition URL]: unrecognized selector sent to instance 0x138d17f40
However, if I set this option on the manager request, it will play as normal speed video at 120/240fps and no crashes:
videoRequestOptions.version = PHVideoRequestOptionsVersionOriginal;
Whats going on? The default version property is PHVideoRequestOptionsVersionCurrent which incorporates slow motion, user edits and trims, etc.
I would like to play that video version. Thanks
So it turns out that slow motion videos are passed as AVComposition.
You can export that into a video file / URL, and then handle it like any other video.
Solution here: https://overflow.buffer.com/2016/02/29/slow-motion-video-ios/
//Output URL
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = paths.firstObject;
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"mergeSlowMoVideo-%d.mov",arc4random() % 1000]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
//Begin slow mo video export
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL = url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
if (exporter.status == AVAssetExportSessionStatusCompleted) {
NSURL *URL = exporter.outputURL;
NSData *videoData = [NSData dataWithContentsOfURL:URL];
// Upload
[self uploadSelectedVideo:video data:videoData];
}
});
}];
For those coming here looking for a swift answer, this is the swift translation, which I use in my project, where I need the url of the slow-motion Video to play it with the AVPlayerViewController:
else if asset is AVComposition {
//Slow-Motion Assets are passed as AVComposition
let paths = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)
let documentsDirectory: NSString? = paths.first as NSString?
if documentsDirectory != nil {
let random = Int(arc4random() % 1000)
let pathToAppend = String(format: "mergeSlowMoVideo-%d.mov", random)
let myPathDocs = documentsDirectory!.strings(byAppendingPaths: [pathToAppend])
let myPath = myPathDocs.first
if myPath != nil {
let url = URL(fileURLWithPath: myPath!)
let exporter = AVAssetExportSession(asset: asset!, presetName: AVAssetExportPresetHighestQuality)
if exporter != nil {
exporter!.outputURL = url
exporter!.outputFileType = AVFileTypeQuickTimeMovie
exporter!.shouldOptimizeForNetworkUse = true
exporter!.exportAsynchronously(completionHandler: {
AsyncUtil.asyncMain {
let url = exporter!.outputURL
if url != nil {
let player = AVPlayer(url: url!)
let playerViewController = AVPlayerViewController()
playerViewController.player = player
playerViewController.modalTransitionStyle = .crossDissolve
view.present(playerViewController, animated: true) {
playerViewController.player!.play()
}
}
}
})
}
}
Playing Slo-mo & Library Video in Swift 4 n above with Custom View
var vcPlayer = AVPlayerViewController()
var player = AVPlayer()
func playallVideo(_ customView: UIView, asset: PHAsset) {
guard asset.mediaType == .video
else {
print("Not a valid video media type")
return
}
let options = PHVideoRequestOptions()
options.isNetworkAccessAllowed = true
PHCachingImageManager().requestPlayerItem(forVideo: asset, options: options) { (playerItem, info) in
DispatchQueue.main.async {
self.player = AVPlayer(playerItem: playerItem)
self.vcPlayer.player = self.player
self.vcPlayer.view.frame = customView.bounds
self.vcPlayer.videoGravity = .resizeAspectFill
self.vcPlayer.showsPlaybackControls = true
//self.vcPlayer.allowsPictureInPicturePlayback = true
self.playerView.addSubview(self.vcPlayer.view)
self.player.play()
}
}
}
/**********Function Call ********/
self.playallVideo(self.playerView/*YourCustomView*/, asset: currentAssetArr[currentIndex]/*Current PHAsset Fetched from Library*/)
:) enjoy
Looking answer in Swift?
here's how I did it
Creating "Slow motion" video in iOS swift is not easy, that I came across many "slow motion" that came to know not working or some of the codes in them are depreciated. And so I finally figured a way to make slow motion in Swift.
This code can be used for 120fps are greater than that too.
Here is the "code snippet I created for achieving slow motion"
Give me a UPVOTE if this code works.
func slowMotion(pathUrl: URL) {
let videoAsset = AVURLAsset.init(url: pathUrl, options: nil)
let currentAsset = AVAsset.init(url: pathUrl)
let vdoTrack = currentAsset.tracks(withMediaType: .video)[0]
let mixComposition = AVMutableComposition()
let compositionVideoTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid)
let videoInsertError: Error? = nil
var videoInsertResult = false
do {
try compositionVideoTrack?.insertTimeRange(
CMTimeRangeMake(start: .zero, duration: videoAsset.duration),
of: videoAsset.tracks(withMediaType: .video)[0],
at: .zero)
videoInsertResult = true
} catch let videoInsertError {
}
if !videoInsertResult || videoInsertError != nil {
//handle error
return
}
var duration: CMTime = .zero
duration = CMTimeAdd(duration, currentAsset.duration)
//MARK: You see this constant (videoScaleFactor) this helps in achieving the slow motion that you wanted. This increases the time scale of the video that makes slow motion
// just increase the videoScaleFactor value in order to play video in higher frames rates(more slowly)
let videoScaleFactor = 2.0
let videoDuration = videoAsset.duration
compositionVideoTrack?.scaleTimeRange(
CMTimeRangeMake(start: .zero, duration: videoDuration),
toDuration: CMTimeMake(value: videoDuration.value * Int64(videoScaleFactor), timescale: videoDuration.timescale))
compositionVideoTrack?.preferredTransform = vdoTrack.preferredTransform
let dirPaths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).map(\.path)
let docsDir = dirPaths[0]
let outputFilePath = URL(fileURLWithPath: docsDir).appendingPathComponent("slowMotion\(UUID().uuidString).mp4").path
if FileManager.default.fileExists(atPath: outputFilePath) {
do {
try FileManager.default.removeItem(atPath: outputFilePath)
} catch {
}
}
let filePath = URL(fileURLWithPath: outputFilePath)
let assetExport = AVAssetExportSession(
asset: mixComposition,
presetName: AVAssetExportPresetHighestQuality)
assetExport?.outputURL = filePath
assetExport?.outputFileType = .mp4
assetExport?.exportAsynchronously(completionHandler: {
switch assetExport?.status {
case .failed:
print("asset output media url = \(String(describing: assetExport?.outputURL))")
print("Export session faiied with error: \(String(describing: assetExport?.error))")
DispatchQueue.main.async(execute: {
// completion(nil);
})
case .completed:
print("Successful")
let outputURL = assetExport!.outputURL
print("url path = \(String(describing: outputURL))")
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: outputURL!)
}) { saved, error in
if saved {
print("video successfully saved in photos gallery view video in photos gallery")
}
if (error != nil) {
print("error in saing video \(String(describing: error?.localizedDescription))")
}
}
DispatchQueue.main.async(execute: {
// completion(_filePath);
})
case .none:
break
case .unknown:
break
case .waiting:
break
case .exporting:
break
case .cancelled:
break
case .some(_):
break
}
})
}
Related
I'm trying to convert mp4 video file to m4a audio format by AVAssetExportSession on my iOS app.
This is the conversion code:
let outputUrl = URL(fileURLWithPath: NSTemporaryDirectory() + "out.m4a")
if FileManager.default.fileExists(atPath: outputUrl.path) {
try? FileManager.default.removeItem(atPath: outputUrl.path)
}
let asset = AVURLAsset(url: inputUrl)
// tried the `AVAssetExportPresetAppleM4A` preset name but the same result
let exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetPassthrough)!
exportSession.outputFileType = AVFileType.m4a
exportSession.outputURL = outputUrl
await exportSession.export()
switch exportSession.status {
case .completed:
return outputUrl
default:
// This becomes `4` which is `.failed`
print("Status: \(exportSession.status)")
throw exportSession.error!
}
Currently, it seems to work on iPhone simulators (confirmed on iOS 16.1/15.5) but it doesn't on my iPhone 7 (iOS 15.7.1) real device. It doesn't seem to work as well on my colleague's iOS 16.1 real device, so it shouldn't be a matter of the iOS version.
The mp4 file is located in the iOS Files app and the inputUrl in the above code becomes something like this (I get this URL via UIDocumentPickerViewController):
file:///private/var/mobile/Library/Mobile%20Documents/com~apple~CloudDocs/Downloads/%E3%81%8A%E3%81%97%E3%82%83%E3%81%B8%E3%82%99%E3%82%8A%E3%81%B2%E3%82%8D%E3%82%86%E3%81%8D.mp4
and the error is:
Error Domain=AVFoundationErrorDomain Code=-11800 "The operation could not be completed" UserInfo={NSUnderlyingError=0x2808f30c0 {Error Domain=NSOSStatusErrorDomain Code=-16979 "(null)"}, NSLocalizedFailureReason=An unknown error occurred (-16979), NSLocalizedRecoverySuggestion=XXXXDEFAULTVALUEXXXX, NSURL=file:///private/var/mobile/Library/Mobile%20Documents/com~apple~CloudDocs/Downloads/%E3%81%8A%E3%81%97%E3%82%83%E3%81%B8%E3%82%99%E3%82%8A%E3%81%B2%E3%82%8D%E3%82%86%E3%81%8D.mp4, NSLocalizedDescription=The operation could not be completed}
It seems to be resolved by calling startAccessingSecurityScopedResource() to the inputUrl before exporting.
inputUrl.startAccessingSecurityScopedResource()
Not sure exactly why but that's probably because the inputUrl is under the file:///private namespace?
Use this function for extract audio from video :----
Export audio from video url into new path :-
func extractAudioFromVideo(videoUrl:URL) {
let mixComposition: AVMutableComposition = AVMutableComposition()
var mutableCompositionAudioVideoTrack: [AVMutableCompositionTrack] = []
let videoAsset: AVAsset = AVAsset(url: videoUrl)
if let audioVideoTrack = mixComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid){
mutableCompositionAudioVideoTrack.append(audioVideoTrack)
if let audioVideoAssetTrack: AVAssetTrack = videoAsset.tracks(withMediaType: .audio).first {
do {
try mutableCompositionAudioVideoTrack.first?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: videoAsset.duration), of: audioVideoAssetTrack, at: CMTime.zero)
} catch {
print(error)
}
}
}
if let documentsPath = NSSearchPathForDirectoriesInDomains(.cachesDirectory, .userDomainMask, true).first {
let outputURL = URL(fileURLWithPath: documentsPath).appendingPathComponent(".m4a")
do {
if FileManager.default.fileExists(atPath: outputURL.path) {
try FileManager.default.removeItem(at: outputURL)
}
} catch { }
if let exportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetAppleM4A) {
exportSession.outputURL = outputURL
exportSession.outputFileType = AVFileType.m4a
exportSession.shouldOptimizeForNetworkUse = true
exportSession.exportAsynchronously(completionHandler: {
switch exportSession.status {
case . completed:
DispatchQueue.main.async {
print("audio url :---- \(outputURL)")
// -------- play output audio URL in player ------
}
case .failed:
if let _error = exportSession.error {
print(_error.localizedDescription)
}
case .cancelled:
if let _error = exportSession.error {
print(_error.localizedDescription)
}
default:
print("")
}
})
}
}
}
AVMutableComposition Play :-
You can play direct AVMutableComposition without exporting audio track.
Benefits of play AVMutableComposition is that you can instant play audio into player.
var avplayer = AVPlayer()
var playerController : AVPlayerViewController?
#IBAction func btnAudioPlay(sender:UIButton) {
self.playAudioCompositionFromVideo(fromVideoURL: URL(string: "")!) { Composition in
let playerItem = AVPlayerItem(asset: Composition)
self.playerController = AVPlayerViewController()
self.avplayer = AVPlayer(playerItem: playerItem)
self.playerController?.player = self.avplayer
self.playerController?.player?.play()
} failure: { errore in
print(errore as Any)
}
}
func playAudioCompositionFromVideo(fromVideoURL url: URL, success: #escaping ((AVMutableComposition) -> Void), failure: #escaping ((String?) -> Void)) {
let asset = AVPlayerItem(url: url).asset
let mixComposition = AVMutableComposition()
let timeRange = CMTimeRangeMake(start: CMTime.zero, duration: asset.duration)
//------------ Get Audio Tracks From Asset ---------
let audioTracks = asset.tracks(withMediaType: AVMediaType.audio)
if audioTracks.count > 0 {
// ---- Use audio if video contains the audio track ---
let compositionAudioTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: kCMPersistentTrackID_Invalid)
// -------- Get First Audio track --------
guard let audioTrack = audioTracks.first else { return }
do {
try compositionAudioTrack?.insertTimeRange(timeRange, of: audioTrack, at: CMTime.zero)
compositionAudioTrack?.preferredTransform = audioTrack.preferredTransform
success(mixComposition)
} catch _ {
failure("audio track insert failed!")
}
} else {
failure("audio track is not available!")
}
}
I am working on Video based Application in Swift. As per the requirement I have to select multiple Videos from Device Gallery, setting up different different CIFilter effects and Volume for each Video Asset and then merge all the Videos and have to Save the Final Video. As an output, when I will play the Final Video then Video sound volume should change accordingly.
I have already merged all the selected Video Assets into one with different different CIFilter effects but my problem is when I am trying to set Volume for each Video Clips then it's not working. I am getting the default Volume for my Final Video. Here is my code:
func addFilerEffectAndVolumeToIndividualVideoClip(_ assetURL: URL, video: VideoFileModel, completion : ((_ session: AVAssetExportSession?, _ outputURL : URL?) -> ())?){
let videoFilteredAsset = AVAsset(url: assetURL)
print(videoFilteredAsset)
createVideoComposition(myAsset: videoFilteredAsset, videos: video)
let documentDirectory = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0]
let url = URL(fileURLWithPath: documentDirectory).appendingPathComponent("\(video.fileID)_\("FilterVideo").mov")
let filePath = url.path
let fileManager = FileManager.default
do {
if fileManager.fileExists(atPath: filePath) {
print("FILE AVAILABLE")
try fileManager.removeItem(atPath:filePath)
} else {
print("FILE NOT AVAILABLE")
}
} catch _ {
}
let composition: AVMutableComposition = AVMutableComposition()
let compositionVideo: AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
let compositionAudioVideo: AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
//Add video to the final record
do {
try compositionVideo.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoFilteredAsset.duration), of: videoFilteredAsset.tracks(withMediaType: AVMediaTypeVideo)[0], at: kCMTimeZero)
} catch _ {
}
//Extract audio from the video and the music
let audioMix: AVMutableAudioMix = AVMutableAudioMix()
var audioMixParam: [AVMutableAudioMixInputParameters] = []
let assetVideoTrack: AVAssetTrack = videoFilteredAsset.tracks(withMediaType: AVMediaTypeAudio)[0]
let videoParam: AVMutableAudioMixInputParameters = AVMutableAudioMixInputParameters(track: assetVideoTrack)
videoParam.trackID = compositionAudioVideo.trackID
//Set final volume of the audio record and the music
videoParam.setVolume(video.videoClipVolume, at: kCMTimeZero)
//Add setting
audioMixParam.append(videoParam)
//Add audio on final record
do {
try compositionAudioVideo.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoFilteredAsset.duration), of: assetVideoTrack, at: kCMTimeZero)
} catch _ {
assertionFailure()
}
//Fading volume out for background music
let durationInSeconds = CMTimeGetSeconds(videoFilteredAsset.duration)
let firstSecond = CMTimeRangeMake(CMTimeMakeWithSeconds(0, 1), CMTimeMakeWithSeconds(1, 1))
let lastSecond = CMTimeRangeMake(CMTimeMakeWithSeconds(durationInSeconds-1, 1), CMTimeMakeWithSeconds(1, 1))
videoParam.setVolumeRamp(fromStartVolume: 0, toEndVolume: video.videoClipVolume, timeRange: firstSecond)
videoParam.setVolumeRamp(fromStartVolume: video.videoClipVolume, toEndVolume: 0, timeRange: lastSecond)
//Add parameter
audioMix.inputParameters = audioMixParam
// Export part, left for facility
let exporter = AVAssetExportSession(asset: videoFilteredAsset, presetName: AVAssetExportPresetHighestQuality)!
exporter.videoComposition = videoFilterComposition
exporter.outputURL = url
exporter.outputFileType = AVFileTypeQuickTimeMovie
exporter.audioMix = audioMix
exporter.exportAsynchronously(completionHandler: { () -> Void in
completion!(exporter, url)
})
}
After that again I am using a method to merge all the Video Clips using AVAssetExportSession, there I am not setting any AudioMixInputParameters.
Note: When I am setting up volume in final merging method using AVAssetExportSession's AudioMixInputParameters, then Volume is getting change for full Video.
My question: Is it possible to set multiple volume for each Video Clips. Please suggest. Thank you!
Here is the working solution for my question:
func addVolumeToIndividualVideoClip(_ assetURL: URL, video: VideoFileModel, completion : ((_ session: AVAssetExportSession?, _ outputURL : URL?) -> ())?){
//Create Asset from Url
let filteredVideoAsset: AVAsset = AVAsset(url: assetURL)
video.fileID = String(video.videoID)
//Get the path of App Document Directory
let documentDirectory = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0]
let url = URL(fileURLWithPath: documentDirectory).appendingPathComponent("\(video.fileID)_\("FilterVideo").mov")
let filePath = url.path
let fileManager = FileManager.default
do {
if fileManager.fileExists(atPath: filePath) {
print("FILE AVAILABLE")
try fileManager.removeItem(atPath:filePath)
} else {
print("FILE NOT AVAILABLE")
}
} catch _ {
}
let composition: AVMutableComposition = AVMutableComposition()
let compositionVideo: AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
let compositionAudioVideo: AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
//Add video to the final record
do {
try compositionVideo.insertTimeRange(CMTimeRangeMake(kCMTimeZero, filteredVideoAsset.duration), of: filteredVideoAsset.tracks(withMediaType: AVMediaTypeVideo)[0], at: kCMTimeZero)
} catch _ {
}
//Extract audio from the video and the music
let audioMix: AVMutableAudioMix = AVMutableAudioMix()
var audioMixParam: [AVMutableAudioMixInputParameters] = []
let assetVideoTrack: AVAssetTrack = filteredVideoAsset.tracks(withMediaType: AVMediaTypeAudio)[0]
let videoParam: AVMutableAudioMixInputParameters = AVMutableAudioMixInputParameters(track: assetVideoTrack)
videoParam.trackID = compositionAudioVideo.trackID
//Set final volume of the audio record and the music
videoParam.setVolume(video.videoVolume, at: kCMTimeZero)
//Add setting
audioMixParam.append(videoParam)
//Add audio on final record
//First: the audio of the record and Second: the music
do {
try compositionAudioVideo.insertTimeRange(CMTimeRangeMake(kCMTimeZero, filteredVideoAsset.duration), of: assetVideoTrack, at: kCMTimeZero)
} catch _ {
assertionFailure()
}
//Fading volume out for background music
let durationInSeconds = CMTimeGetSeconds(filteredVideoAsset.duration)
let firstSecond = CMTimeRangeMake(CMTimeMakeWithSeconds(0, 1), CMTimeMakeWithSeconds(1, 1))
let lastSecond = CMTimeRangeMake(CMTimeMakeWithSeconds(durationInSeconds-1, 1), CMTimeMakeWithSeconds(1, 1))
videoParam.setVolumeRamp(fromStartVolume: 0, toEndVolume: video.videoVolume, timeRange: firstSecond)
videoParam.setVolumeRamp(fromStartVolume: video.videoVolume, toEndVolume: 0, timeRange: lastSecond)
//Add parameter
audioMix.inputParameters = audioMixParam
//Remove the previous temp video if exist
let filemgr = FileManager.default
do {
if filemgr.fileExists(atPath: "\(video.fileID)_\("FilterVideo").mov") {
try filemgr.removeItem(atPath: "\(video.fileID)_\("FilterVideo").mov")
} else {
}
} catch _ {
}
//Exporte the final record’
let exporter: AVAssetExportSession = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality)!
exporter.outputURL = url
exporter.outputFileType = AVFileTypeMPEG4
exporter.audioMix = audioMix
exporter.exportAsynchronously(completionHandler: { () -> Void in
completion!(exporter, url)
// self.saveVideoToLibrary(from: filePath)
})
}
I found, that exporting an asset with preset of AVAssetExportPresetPassthrough doesn't have an impact on output volume. When I tried to use AVAssetExportPresetLowQuality, volume change successfully applied.
I wish it is better documented somewhere :(
The working code:
// Assume we have:
let composition: AVMutableComposition
var inputParameters = [AVAudioMixInputParameters]()
// We add a track
let trackComposition = composition.addMutableTrack(...)
// Configure volume for this track
let inputParameter = AVMutableAudioMixInputParameters(track: trackComposition)
inputParameter.setVolume(desiredVolume, at: startTime)
// It works even without setting the `trackID`
// inputParameter.trackID = trackComposition.trackID
inputParameters.append(inputParameter)
// Apply gathered `inputParameters` before exporting
let audioMix = AVMutableAudioMix()
audioMix.inputParameters = inputParameters
// I found it's not working, if using `AVAssetExportPresetPassthrough`,
// so try `AVAssetExportPresetLowQuality` first
let export = AVAssetExportSession(..., presetName: AVAssetExportPresetLowQuality)
export.audioMix = audioMix
Tested this with multiple assetTrack insertions to the same compositionTrack, setting different volume for each insertion. Seems to be working.
I am new in swift and programming, I tried to concatenate some recorder files, which I made with success like that:
func concatenateFiles(audioFiles: [URL], completion: #escaping (_ concatenatedFile: NSURL?) -> ()) {
// Result file
var nextClipStartTime = kCMTimeZero
let composition = AVMutableComposition()
let track = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
// Add each track
for audio in audioFiles {
let asset = AVURLAsset(url: NSURL(fileURLWithPath: audio.path) as URL, options: nil)
if let assetTrack = asset.tracks(withMediaType: AVMediaTypeAudio).first {
let timeRange = CMTimeRange(start: kCMTimeZero, duration: asset.duration)
do {
try track.insertTimeRange(timeRange, of: assetTrack, at: nextClipStartTime)
nextClipStartTime = CMTimeAdd(nextClipStartTime, timeRange.duration)
} catch {
print("Error concatenating file - \(error)")
completion(nil)
return
}
}
}
// Export the new file
if let exportSession = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A) {
let paths = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)
let format = DateFormatter()
format.dateFormat = "yyyy:MM:dd-HH:mm:ss"
let currentFileName = "REC:\(format.string(from: Date()))"
let documentsDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
let fileURL = documentsDirectory.appendingPathComponent("\(currentFileName).m4a")
// Remove existing file
do {
print(audioFiles.count)
try FileManager.default.removeItem(atPath: fileURL.path)
print("Removed \(fileURL)")
} catch {
print("Could not remove file - \(error)")
}
// Configure export session output
exportSession.outputURL = fileURL as URL
exportSession.outputFileType = AVFileTypeAppleM4A
// Perform the export
exportSession.exportAsynchronously() { () -> Void in
switch exportSession.status
{
case AVAssetExportSessionStatus.completed:
print("Export complete")
DispatchQueue.main.async(execute: {
if self.concatinatedArray == nil
{
self.concatinatedArray = [URL]()
}
self.concatinatedArray?.append(exportSession.outputURL!)
completion(fileURL as NSURL?)
})
return print("success to Merge Video")
case AVAssetExportSessionStatus.failed:
completion(nil)
return print("failed to MERGE )")
case AVAssetExportSessionStatus.cancelled:
completion(nil)
return print("cancelled merge)")
default:
print("complete")
}
}
}
}
But now, when I want to Merge it with a video, I got crashes on moment:
let aAudioAssetTrack: AVAssetTrack = aAudioAsset.tracks(withMediaType: AVMediaTypeAudio)[0]
I use standard method of merging, it works with other sounds that I have, it doesn't only with the concatenated audio files.. please help how to manage it working?...
AVURLAsset* avAsset = [[AVURLAsset alloc]initWithURL:[NSURL fileURLWithPath:path2] options:nil];
if ([[avAsset tracksWithMediaType:AVMediaTypeAudio] count] > 0)
{
AVAssetTrack *clipAudioTrack = [[avAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[firstTrackAudio insertTimeRange:CMTimeRangeMake(kCMTimeZero, avAsset.duration) ofTrack:clipAudioTrack atTime:kCMTimeZero error:nil];
}
I am creating a app in which i need to record videos and upload it to a server. Now my project has a android version too. To support android version i have to record the videos in mp4 format. I followed this tutorial to set the UIImagePicker media type to movie format imagePicker.mediaTypes = [kUTTypeMovie as String]
The UIImagePickerController is perfect for my requirement and the only thing that i need to change is its saving format to mp4. I tried kUTTypeMPEG4 in mediaTypes but it throws error at the run time with no error description.
This is my video Capture function
func startCameraFromViewController() {
if UIImagePickerController.isSourceTypeAvailable(.Camera) == false {
return
}
viewBlack.hidden = false
presentViewController(cameraController, animated: false, completion: nil)
cameraController.sourceType = .Camera
cameraController.mediaTypes = [kUTTypeMovie as String]
//cameraController.mediaTypes = [kUTTypeMPEG4 as String]
cameraController.cameraCaptureMode = .Video
cameraController.videoQuality = .TypeMedium
if(getPurchaseId() as! Int == 0)
{
if(txtBenchMark.text?.isEmpty == false)
{
cameraController.videoMaximumDuration = NSTimeInterval(300.0)
}else{
cameraController.videoMaximumDuration = NSTimeInterval(60.0)
}
}else{
cameraController.videoMaximumDuration = NSTimeInterval(600.0)
}
cameraController.allowsEditing = false
}
I am using Swift 2.2 and Xcode 8 with Use Legacy swift Language version = Yes
Any Alternative Solutions are also appreciated. Thanks in advance.
EDIT:
I found out that there is no method to directly record videos in mp4 format in swift. only can be converted to required format from apple's quicktime mov format.
I made some modifications to the following 2 answers to make it compatible with Swift 5:
https://stackoverflow.com/a/40354948/2470084
https://stackoverflow.com/a/39329155/2470084
import AVFoundation
func encodeVideo(videoURL: URL){
let avAsset = AVURLAsset(url: videoURL)
let startDate = Date()
let exportSession = AVAssetExportSession(asset: avAsset, presetName: AVAssetExportPresetPassthrough)
let docDir = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0]
let myDocPath = NSURL(fileURLWithPath: docDir).appendingPathComponent("temp.mp4")?.absoluteString
let docDir2 = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] as NSURL
let filePath = docDir2.appendingPathComponent("rendered-Video.mp4")
deleteFile(filePath!)
if FileManager.default.fileExists(atPath: myDocPath!){
do{
try FileManager.default.removeItem(atPath: myDocPath!)
}catch let error{
print(error)
}
}
exportSession?.outputURL = filePath
exportSession?.outputFileType = AVFileType.mp4
exportSession?.shouldOptimizeForNetworkUse = true
let start = CMTimeMakeWithSeconds(0.0, preferredTimescale: 0)
let range = CMTimeRange(start: start, duration: avAsset.duration)
exportSession?.timeRange = range
exportSession!.exportAsynchronously{() -> Void in
switch exportSession!.status{
case .failed:
print("\(exportSession!.error!)")
case .cancelled:
print("Export cancelled")
case .completed:
let endDate = Date()
let time = endDate.timeIntervalSince(startDate)
print(time)
print("Successful")
print(exportSession?.outputURL ?? "")
default:
break
}
}
}
func deleteFile(_ filePath:URL) {
guard FileManager.default.fileExists(atPath: filePath.path) else{
return
}
do {
try FileManager.default.removeItem(atPath: filePath.path)
}catch{
fatalError("Unable to delete file: \(error) : \(#function).")
}
}
Here is some code that you can use to convert the recorded video into MP4:
func encodeVideo(videoURL: NSURL) {
let avAsset = AVURLAsset(URL: videoURL, options: nil)
var startDate = NSDate()
//Create Export session
exportSession = AVAssetExportSession(asset: avAsset, presetName: AVAssetExportPresetPassthrough)
// exportSession = AVAssetExportSession(asset: composition, presetName: mp4Quality)
//Creating temp path to save the converted video
let documentsDirectory = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0]
let myDocumentPath = NSURL(fileURLWithPath: documentsDirectory).URLByAppendingPathComponent("temp.mp4").absoluteString
let url = NSURL(fileURLWithPath: myDocumentPath)
let documentsDirectory2 = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)[0] as NSURL
let filePath = documentsDirectory2.URLByAppendingPathComponent("rendered-Video.mp4")
deleteFile(filePath)
//Check if the file already exists then remove the previous file
if NSFileManager.defaultManager().fileExistsAtPath(myDocumentPath) {
do {
try NSFileManager.defaultManager().removeItemAtPath(myDocumentPath)
}
catch let error {
print(error)
}
}
url
exportSession!.outputURL = filePath
exportSession!.outputFileType = AVFileTypeMPEG4
exportSession!.shouldOptimizeForNetworkUse = true
var start = CMTimeMakeWithSeconds(0.0, 0)
var range = CMTimeRangeMake(start, avAsset.duration)
exportSession.timeRange = range
exportSession!.exportAsynchronouslyWithCompletionHandler({() -> Void in
switch self.exportSession!.status {
case .Failed:
print("%#",self.exportSession?.error)
case .Cancelled:
print("Export canceled")
case .Completed:
//Video conversion finished
var endDate = NSDate()
var time = endDate.timeIntervalSinceDate(startDate)
print(time)
print("Successful!")
print(self.exportSession.outputURL)
default:
break
}
})
}
func deleteFile(filePath:NSURL) {
guard NSFileManager.defaultManager().fileExistsAtPath(filePath.path!) else {
return
}
do {
try NSFileManager.defaultManager().removeItemAtPath(filePath.path!)
}catch{
fatalError("Unable to delete file: \(error) : \(__FUNCTION__).")
}
}
Source: https://stackoverflow.com/a/39329155/4786204
A quick swift 4 update to the previous answers:
func encodeVideo(videoUrl: URL, outputUrl: URL? = nil, resultClosure: #escaping (URL?) -> Void ) {
var finalOutputUrl: URL? = outputUrl
if finalOutputUrl == nil {
var url = videoUrl
url.deletePathExtension()
url.appendPathExtension(".mp4")
finalOutputUrl = url
}
if FileManager.default.fileExists(atPath: finalOutputUrl!.path) {
print("Converted file already exists \(finalOutputUrl!.path)")
resultClosure(finalOutputUrl)
return
}
let asset = AVURLAsset(url: videoUrl)
if let exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetPassthrough) {
exportSession.outputURL = finalOutputUrl!
exportSession.outputFileType = AVFileType.mp4
let start = CMTimeMakeWithSeconds(0.0, 0)
let range = CMTimeRangeMake(start, asset.duration)
exportSession.timeRange = range
exportSession.shouldOptimizeForNetworkUse = true
exportSession.exportAsynchronously() {
switch exportSession.status {
case .failed:
print("Export failed: \(exportSession.error != nil ? exportSession.error!.localizedDescription : "No Error Info")")
case .cancelled:
print("Export canceled")
case .completed:
resultClosure(finalOutputUrl!)
default:
break
}
}
} else {
resultClosure(nil)
}
}
Swift 5.2 Update Solution
// Don't forget to import AVKit
func encodeVideo(at videoURL: URL, completionHandler: ((URL?, Error?) -> Void)?) {
let avAsset = AVURLAsset(url: videoURL, options: nil)
let startDate = Date()
//Create Export session
guard let exportSession = AVAssetExportSession(asset: avAsset, presetName: AVAssetExportPresetPassthrough) else {
completionHandler?(nil, nil)
return
}
//Creating temp path to save the converted video
let documentsDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] as URL
let filePath = documentsDirectory.appendingPathComponent("rendered-Video.mp4")
//Check if the file already exists then remove the previous file
if FileManager.default.fileExists(atPath: filePath.path) {
do {
try FileManager.default.removeItem(at: filePath)
} catch {
completionHandler?(nil, error)
}
}
exportSession.outputURL = filePath
exportSession.outputFileType = AVFileType.mp4
exportSession.shouldOptimizeForNetworkUse = true
let start = CMTimeMakeWithSeconds(0.0, preferredTimescale: 0)
let range = CMTimeRangeMake(start: start, duration: avAsset.duration)
exportSession.timeRange = range
exportSession.exportAsynchronously(completionHandler: {() -> Void in
switch exportSession.status {
case .failed:
print(exportSession.error ?? "NO ERROR")
completionHandler?(nil, exportSession.error)
case .cancelled:
print("Export canceled")
completionHandler?(nil, nil)
case .completed:
//Video conversion finished
let endDate = Date()
let time = endDate.timeIntervalSince(startDate)
print(time)
print("Successful!")
print(exportSession.outputURL ?? "NO OUTPUT URL")
completionHandler?(exportSession.outputURL, nil)
default: break
}
})
}
Minor refactoring of previous examples:
import AVFoundation
extension AVURLAsset {
func exportVideo(presetName: String = AVAssetExportPresetHighestQuality,
outputFileType: AVFileType = .mp4,
fileExtension: String = "mp4",
then completion: #escaping (URL?) -> Void)
{
let filename = url.deletingPathExtension().appendingPathExtension(fileExtension).lastPathComponent
let outputURL = FileManager.default.temporaryDirectory.appendingPathComponent(filename)
if let session = AVAssetExportSession(asset: self, presetName: presetName) {
session.outputURL = outputURL
session.outputFileType = outputFileType
let start = CMTimeMakeWithSeconds(0.0, 0)
let range = CMTimeRangeMake(start, duration)
session.timeRange = range
session.shouldOptimizeForNetworkUse = true
session.exportAsynchronously {
switch session.status {
case .completed:
completion(outputURL)
case .cancelled:
debugPrint("Video export cancelled.")
completion(nil)
case .failed:
let errorMessage = session.error?.localizedDescription ?? "n/a"
debugPrint("Video export failed with error: \(errorMessage)")
completion(nil)
default:
break
}
}
} else {
completion(nil)
}
}
}
Also: AVAssetExportPresetHighestQuality preset works when video is played on Android / Chrome.
P.S. Be aware that the completion handler of exportVideo method might not be returned on the main thread.
Run on iOS11, we will always received the nil value for the AVAssetExportSession. Do we have any solution for this case?
if let exportSession = AVAssetExportSession(asset: avAsset, presetName: AVAssetExportPresetPassthrough) {
//work on iOS 9 and 10
} else {
//always on iOS 11
}
I have to do "slow motion" in a video file along with audio, in-between some frames and need to store the ramped video as a new video.
Ref: http://www.youtube.com/watch?v=BJ3_xMGzauk (watch from 0 to 10s)
From my analysis, I've found that AVFoundation framework can be helpful.
Ref:
http://developer.apple.com/library/ios/#documentation/AudioVideo/Conceptual/AVFoundationPG/Articles/00_Introduction.html
Copy and pasted from the above link:
"
Editing
AV Foundation uses compositions to create new assets from existing pieces of media (typically, one or more video and audio tracks). You use a mutable composition to add and remove tracks, and adjust their temporal orderings. You can also set the relative volumes and ramping of audio tracks; and set the opacity, and opacity ramps, of video tracks. A composition is an assemblage of pieces of media held in memory. When you export a composition using an export session, it's collapsed to a file.
On iOS 4.1 and later, you can also create an asset from media such as sample buffers or still images using an asset writer.
"
Questions:
Can I do " slow motion " the video/audio file using the AVFoundation framework ? Or Is there any other package available? If i want to handle audio and video separately, please guide me how to do?
Update :: Code For AV Export Session :
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *outputURL = paths[0];
NSFileManager *manager = [NSFileManager defaultManager];
[manager createDirectoryAtPath:outputURL withIntermediateDirectories:YES attributes:nil error:nil];
outputURL = [outputURL stringByAppendingPathComponent:#"output.mp4"];
// Remove Existing File
[manager removeItemAtPath:outputURL error:nil];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:self.inputAsset presetName:AVAssetExportPresetLowQuality];
exportSession.outputURL = [NSURL fileURLWithPath:outputURL]; // output path;
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
[exportSession exportAsynchronouslyWithCompletionHandler:^(void) {
if (exportSession.status == AVAssetExportSessionStatusCompleted) {
[self writeVideoToPhotoLibrary:[NSURL fileURLWithPath:outputURL]];
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
[library writeVideoAtPathToSavedPhotosAlbum:[NSURL fileURLWithPath:outputURL] completionBlock:^(NSURL *assetURL, NSError *error){
if (error) {
NSLog(#"Video could not be saved");
}
}];
} else {
NSLog(#"error: %#", [exportSession error]);
}
}];
You could scale video using AVFoundation and CoreMedia frameworks.
Take a look at the AVMutableCompositionTrack method:
- (void)scaleTimeRange:(CMTimeRange)timeRange toDuration:(CMTime)duration;
Sample:
AVURLAsset* videoAsset = nil; //self.inputAsset;
//create mutable composition
AVMutableComposition *mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
NSError *videoInsertError = nil;
BOOL videoInsertResult = [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero
error:&videoInsertError];
if (!videoInsertResult || nil != videoInsertError) {
//handle error
return;
}
//slow down whole video by 2.0
double videoScaleFactor = 2.0;
CMTime videoDuration = videoAsset.duration;
[compositionVideoTrack scaleTimeRange:CMTimeRangeMake(kCMTimeZero, videoDuration)
toDuration:CMTimeMake(videoDuration.value*videoScaleFactor, videoDuration.timescale)];
//export
AVAssetExportSession* assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetLowQuality];
(Probably audio track from videoAsset should also be added to mixComposition)
Slower + Faster with or without audio track
I have tried and able to Slower the asset.
compositionVideoTrack?.scaleTimeRange(timeRange, toDuration: scaledVideoDuration) did the trick.
I made a class which will help you to generate a slower video from AVAsset.
+ point is you can also make it faster and another + point is it will handle the audio too.
Here is my custom class sample:
import UIKit
import AVFoundation
enum SpeedoMode {
case Slower
case Faster
}
class VSVideoSpeeder: NSObject {
/// Singleton instance of `VSVideoSpeeder`
static var shared: VSVideoSpeeder = {
return VSVideoSpeeder()
}()
/// Range is b/w 1x, 2x and 3x. Will not happen anything if scale is out of range. Exporter will be nil in case url is invalid or unable to make asset instance.
func scaleAsset(fromURL url: URL, by scale: Int64, withMode mode: SpeedoMode, completion: #escaping (_ exporter: AVAssetExportSession?) -> Void) {
/// Check the valid scale
if scale < 1 || scale > 3 {
/// Can not proceed, Invalid range
completion(nil)
return
}
/// Asset
let asset = AVAsset(url: url)
/// Video Tracks
let videoTracks = asset.tracks(withMediaType: AVMediaType.video)
if videoTracks.count == 0 {
/// Can not find any video track
completion(nil)
return
}
/// Get the scaled video duration
let scaledVideoDuration = (mode == .Faster) ? CMTimeMake(asset.duration.value / scale, asset.duration.timescale) : CMTimeMake(asset.duration.value * scale, asset.duration.timescale)
let timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration)
/// Video track
let videoTrack = videoTracks.first!
let mixComposition = AVMutableComposition()
let compositionVideoTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: kCMPersistentTrackID_Invalid)
/// Audio Tracks
let audioTracks = asset.tracks(withMediaType: AVMediaType.audio)
if audioTracks.count > 0 {
/// Use audio if video contains the audio track
let compositionAudioTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: kCMPersistentTrackID_Invalid)
/// Audio track
let audioTrack = audioTracks.first!
do {
try compositionAudioTrack?.insertTimeRange(timeRange, of: audioTrack, at: kCMTimeZero)
compositionAudioTrack?.scaleTimeRange(timeRange, toDuration: scaledVideoDuration)
} catch _ {
/// Ignore audio error
}
}
do {
try compositionVideoTrack?.insertTimeRange(timeRange, of: videoTrack, at: kCMTimeZero)
compositionVideoTrack?.scaleTimeRange(timeRange, toDuration: scaledVideoDuration)
/// Keep original transformation
compositionVideoTrack?.preferredTransform = videoTrack.preferredTransform
/// Initialize Exporter now
let outputFileURL = URL(fileURLWithPath: "/Users/thetiger/Desktop/scaledVideo.mov")
/// Note:- Please use directory path if you are testing with device.
if FileManager.default.fileExists(atPath: outputFileURL.absoluteString) {
try FileManager.default.removeItem(at: outputFileURL)
}
let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)
exporter?.outputURL = outputFileURL
exporter?.outputFileType = AVFileType.mov
exporter?.shouldOptimizeForNetworkUse = true
exporter?.exportAsynchronously(completionHandler: {
completion(exporter)
})
} catch let error {
print(error.localizedDescription)
completion(nil)
return
}
}
}
I took 1x, 2x and 3x as a valid scale. Class contains the proper validation and handling. Below is the sample of how to use this function.
let url = Bundle.main.url(forResource: "1", withExtension: "mp4")!
VSVideoSpeeder.shared.scaleAsset(fromURL: url, by: 3, withMode: SpeedoMode.Slower) { (exporter) in
if let exporter = exporter {
switch exporter.status {
case .failed: do {
print(exporter.error?.localizedDescription ?? "Error in exporting..")
}
case .completed: do {
print("Scaled video has been generated successfully!")
}
case .unknown: break
case .waiting: break
case .exporting: break
case .cancelled: break
}
}
else {
/// Error
print("Exporter is not initialized.")
}
}
This line will handle the audio scaling
compositionAudioTrack?.scaleTimeRange(timeRange, toDuration: scaledVideoDuration)
I have achieved on adding slow motion to video including audio as well with proper output orientation.
- (void)SlowMotion:(NSURL *)URl
{
AVURLAsset* videoAsset = [AVURLAsset URLAssetWithURL:URl options:nil]; //self.inputAsset;
AVAsset *currentAsset = [AVAsset assetWithURL:URl];
AVAssetTrack *vdoTrack = [[currentAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
//create mutable composition
AVMutableComposition *mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
NSError *videoInsertError = nil;
BOOL videoInsertResult = [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero
error:&videoInsertError];
if (!videoInsertResult || nil != videoInsertError) {
//handle error
return;
}
NSError *audioInsertError =nil;
BOOL audioInsertResult =[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
atTime:kCMTimeZero
error:&audioInsertError];
if (!audioInsertResult || nil != audioInsertError) {
//handle error
return;
}
CMTime duration =kCMTimeZero;
duration=CMTimeAdd(duration, currentAsset.duration);
//slow down whole video by 2.0
double videoScaleFactor = 2.0;
CMTime videoDuration = videoAsset.duration;
[compositionVideoTrack scaleTimeRange:CMTimeRangeMake(kCMTimeZero, videoDuration)
toDuration:CMTimeMake(videoDuration.value*videoScaleFactor, videoDuration.timescale)];
[compositionAudioTrack scaleTimeRange:CMTimeRangeMake(kCMTimeZero, videoDuration)
toDuration:CMTimeMake(videoDuration.value*videoScaleFactor, videoDuration.timescale)];
[compositionVideoTrack setPreferredTransform:vdoTrack.preferredTransform];
NSArray *dirPaths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *docsDir = [dirPaths objectAtIndex:0];
NSString *outputFilePath = [docsDir stringByAppendingPathComponent:[NSString stringWithFormat:#"slowMotion.mov"]];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
NSURL *_filePath = [NSURL fileURLWithPath:outputFilePath];
//export
AVAssetExportSession* assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetLowQuality];
assetExport.outputURL=_filePath;
assetExport.outputFileType = AVFileTypeQuickTimeMovie;
exporter.shouldOptimizeForNetworkUse = YES;
[assetExport exportAsynchronouslyWithCompletionHandler:^
{
switch ([assetExport status]) {
case AVAssetExportSessionStatusFailed:
{
NSLog(#"Export session faiied with error: %#", [assetExport error]);
dispatch_async(dispatch_get_main_queue(), ^{
// completion(nil);
});
}
break;
case AVAssetExportSessionStatusCompleted:
{
NSLog(#"Successful");
NSURL *outputURL = assetExport.outputURL;
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputURL]) {
[self writeExportedVideoToAssetsLibrary:outputURL];
}
dispatch_async(dispatch_get_main_queue(), ^{
// completion(_filePath);
});
}
break;
default:
break;
}
}];
}
- (void)writeExportedVideoToAssetsLibrary :(NSURL *)url {
NSURL *exportURL = url;
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:exportURL]) {
[library writeVideoAtPathToSavedPhotosAlbum:exportURL completionBlock:^(NSURL *assetURL, NSError *error){
dispatch_async(dispatch_get_main_queue(), ^{
if (error) {
UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:[error localizedDescription]
message:[error localizedRecoverySuggestion]
delegate:nil
cancelButtonTitle:#"OK"
otherButtonTitles:nil];
[alertView show];
}
if(!error)
{
// [activityView setHidden:YES];
UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:#"Sucess"
message:#"video added to gallery successfully"
delegate:nil
cancelButtonTitle:#"OK"
otherButtonTitles:nil];
[alertView show];
}
#if !TARGET_IPHONE_SIMULATOR
[[NSFileManager defaultManager] removeItemAtURL:exportURL error:nil];
#endif
});
}];
} else {
NSLog(#"Video could not be exported to assets library.");
}
}
I would extract all frames from initial video using ffmpeg and then collect together using AVAssetWriter but with lower frame rate. For getting more fulid slow motion maybe you will need to apply some blur effect, or even generate frame between existing, which will be mix from two frames.
An example in swift :
I
var asset: AVAsset?
func configureAssets(){
let videoAsset = AVURLAsset(url: Bundle.main.url(forResource: "sample", withExtension: "m4v")!)
let audioAsset = AVURLAsset(url: Bundle.main.url(forResource: "sample", withExtension: "m4a")!)
// let audioAsset2 = AVURLAsset(url: Bundle.main.url(forResource: "audio2", withExtension: "m4a")!)
let comp = AVMutableComposition()
let videoAssetSourceTrack = videoAsset.tracks(withMediaType: AVMediaTypeVideo).first! as AVAssetTrack
let audioAssetSourceTrack = videoAsset.tracks(withMediaType: AVMediaTypeAudio).first! as AVAssetTrack
// let audioAssetSourceTrack2 = audioAsset2.tracks(withMediaType: AVMediaTypeAudio).first! as AVAssetTrack
let videoCompositionTrack = comp.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
let audioCompositionTrack = comp.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
do {
try videoCompositionTrack.insertTimeRange(
CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(9 , 600)),
of: videoAssetSourceTrack,
at: kCMTimeZero)
try audioCompositionTrack.insertTimeRange(
CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(9, 600)),
of: audioAssetSourceTrack,
at: kCMTimeZero)
//
// try audioCompositionTrack.insertTimeRange(
// CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(3, 600)),
// of: audioAssetSourceTrack2,
// at: CMTimeMakeWithSeconds(7, 600))
let videoScaleFactor = Int64(2.0)
let videoDuration: CMTime = videoAsset.duration
videoCompositionTrack.scaleTimeRange(CMTimeRangeMake(kCMTimeZero, videoDuration), toDuration: CMTimeMake(videoDuration.value * videoScaleFactor, videoDuration.timescale))
audioCompositionTrack.scaleTimeRange(CMTimeRangeMake(kCMTimeZero, videoDuration), toDuration: CMTimeMake(videoDuration.value * videoScaleFactor, videoDuration.timescale))
videoCompositionTrack.preferredTransform = videoAssetSourceTrack.preferredTransform
}catch { print(error) }
asset = comp
}
II
func createFileFromAsset(_ asset: AVAsset){
let documentsDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] as URL
let filePath = documentsDirectory.appendingPathComponent("rendered-audio.m4v")
deleteFile(filePath)
if let exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetLowQuality){
exportSession.canPerformMultiplePassesOverSourceMediaData = true
exportSession.outputURL = filePath
exportSession.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration)
exportSession.outputFileType = AVFileTypeQuickTimeMovie
exportSession.exportAsynchronously {
_ in
print("finished: \(filePath) : \(exportSession.status.rawValue) ")
}
}
}
func deleteFile(_ filePath:URL) {
guard FileManager.default.fileExists(atPath: filePath.path) else {
return
}
do {
try FileManager.default.removeItem(atPath: filePath.path)
}catch{
fatalError("Unable to delete file: \(error) : \(#function).")
}
}
Swift 5
Here is #TheTiger's code converted to SwiftUI:
import UIKit
import AVFoundation
enum SpeedoMode {
case Slower
case Faster
}
class VSVideoSpeeder: NSObject {
/// Singleton instance of `VSVideoSpeeder`
static var shared: VSVideoSpeeder = {
return VSVideoSpeeder()
}()
/// Range is b/w 1x, 2x and 3x. Will not happen anything if scale is out of range. Exporter will be nil in case url is invalid or unable to make asset instance.
func scaleAsset(fromURL url: URL, by scale: Int64, withMode mode: SpeedoMode, completion: #escaping (_ exporter: AVAssetExportSession?) -> Void) {
/// Check the valid scale
if scale < 1 || scale > 3 {
/// Can not proceed, Invalid range
completion(nil)
return
}
/// Asset
let asset = AVAsset(url: url)
/// Video Tracks
let videoTracks = asset.tracks(withMediaType: AVMediaType.video)
if videoTracks.count == 0 {
/// Can not find any video track
completion(nil)
return
}
/// Get the scaled video duration
let scaledVideoDuration = (mode == .Faster) ? CMTimeMake(value: asset.duration.value / scale, timescale: asset.duration.timescale) : CMTimeMake(value: asset.duration.value * scale, timescale: asset.duration.timescale)
let timeRange = CMTimeRangeMake(start: CMTime.zero, duration: asset.duration)
/// Video track
let videoTrack = videoTracks.first!
let mixComposition = AVMutableComposition()
let compositionVideoTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: kCMPersistentTrackID_Invalid)
/// Audio Tracks
let audioTracks = asset.tracks(withMediaType: AVMediaType.audio)
if audioTracks.count > 0 {
/// Use audio if video contains the audio track
let compositionAudioTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: kCMPersistentTrackID_Invalid)
/// Audio track
let audioTrack = audioTracks.first!
do {
try compositionAudioTrack?.insertTimeRange(timeRange, of: audioTrack, at: CMTime.zero)
compositionAudioTrack?.scaleTimeRange(timeRange, toDuration: scaledVideoDuration)
} catch _ {
/// Ignore audio error
}
}
do {
try compositionVideoTrack?.insertTimeRange(timeRange, of: videoTrack, at: CMTime.zero)
compositionVideoTrack?.scaleTimeRange(timeRange, toDuration: scaledVideoDuration)
/// Keep original transformation
compositionVideoTrack?.preferredTransform = videoTrack.preferredTransform
/// Initialize Exporter now
let outputFileURL = URL(fileURLWithPath: "/Users/thetiger/Desktop/scaledVideo.mov")
/// Note:- Please use directory path if you are testing with device.
if FileManager.default.fileExists(atPath: outputFileURL.absoluteString) {
try FileManager.default.removeItem(at: outputFileURL)
}
let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)
exporter?.outputURL = outputFileURL
exporter?.outputFileType = AVFileType.mov
exporter?.shouldOptimizeForNetworkUse = true
exporter?.exportAsynchronously(completionHandler: {
completion(exporter)
})
} catch let error {
print(error.localizedDescription)
completion(nil)
return
}
}
}
}
With the same use case:
let url = Bundle.main.url(forResource: "1", withExtension: "mp4")!
VSVideoSpeeder.shared.scaleAsset(fromURL: url, by: 3, withMode: SpeedoMode.Slower) { (exporter) in
if let exporter = exporter {
switch exporter.status {
case .failed: do {
print(exporter.error?.localizedDescription ?? "Error in exporting..")
}
case .completed: do {
print("Scaled video has been generated successfully!")
}
case .unknown: break
case .waiting: break
case .exporting: break
case .cancelled: break
}
}
else {
/// Error
print("Exporter is not initialized.")
}
}
Creating "Slow motion" video in iOS swift is not easy, that I came across many "slow motion" that came to know not working or some of the codes in them are depreciated. And so I finally figured a way to make slow motion in Swift.
note: This code can be used for 120fps are greater than that too.
You can make audio in slow motion in the same way I did
Here is the "code snippet I created for achieving slow motion"
func slowMotion(pathUrl: URL) {
let videoAsset = AVURLAsset.init(url: pathUrl, options: nil)
let currentAsset = AVAsset.init(url: pathUrl)
let vdoTrack = currentAsset.tracks(withMediaType: .video)[0]
let mixComposition = AVMutableComposition()
let compositionVideoTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid)
let videoInsertError: Error? = nil
var videoInsertResult = false
do {
try compositionVideoTrack?.insertTimeRange(
CMTimeRangeMake(start: .zero, duration: videoAsset.duration),
of: videoAsset.tracks(withMediaType: .video)[0],
at: .zero)
videoInsertResult = true
} catch let videoInsertError {
}
if !videoInsertResult || videoInsertError != nil {
//handle error
return
}
var duration: CMTime = .zero
duration = CMTimeAdd(duration, currentAsset.duration)
//MARK: You see this constant (videoScaleFactor) this helps in achieving the slow motion that you wanted. This increases the time scale of the video that makes slow motion
// just increase the videoScaleFactor value in order to play video in higher frames rates(more slowly)
let videoScaleFactor = 2.0
let videoDuration = videoAsset.duration
compositionVideoTrack?.scaleTimeRange(
CMTimeRangeMake(start: .zero, duration: videoDuration),
toDuration: CMTimeMake(value: videoDuration.value * Int64(videoScaleFactor), timescale: videoDuration.timescale))
compositionVideoTrack?.preferredTransform = vdoTrack.preferredTransform
let dirPaths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).map(\.path)
let docsDir = dirPaths[0]
let outputFilePath = URL(fileURLWithPath: docsDir).appendingPathComponent("slowMotion\(UUID().uuidString).mp4").path
if FileManager.default.fileExists(atPath: outputFilePath) {
do {
try FileManager.default.removeItem(atPath: outputFilePath)
} catch {
}
}
let filePath = URL(fileURLWithPath: outputFilePath)
let assetExport = AVAssetExportSession(
asset: mixComposition,
presetName: AVAssetExportPresetHighestQuality)
assetExport?.outputURL = filePath
assetExport?.outputFileType = .mp4
assetExport?.exportAsynchronously(completionHandler: {
switch assetExport?.status {
case .failed:
print("asset output media url = \(String(describing: assetExport?.outputURL))")
print("Export session faiied with error: \(String(describing: assetExport?.error))")
DispatchQueue.main.async(execute: {
// completion(nil);
})
case .completed:
print("Successful")
let outputURL = assetExport!.outputURL
print("url path = \(String(describing: outputURL))")
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: outputURL!)
}) { saved, error in
if saved {
print("video successfully saved in photos gallery view video in photos gallery")
}
if (error != nil) {
print("error in saing video \(String(describing: error?.localizedDescription))")
}
}
DispatchQueue.main.async(execute: {
// completion(_filePath);
})
case .none:
break
case .unknown:
break
case .waiting:
break
case .exporting:
break
case .cancelled:
break
case .some(_):
break
}
})
}