i want to convert MKV (Matroska) to MP4 in swift
when i add a file with MKV format my code break in line 8 , what should i do to fix that?
this is my code:
let composition = AVMutableComposition()
do {
let sourceUrl = Bundle.main.url(forResource: "sample", withExtension: "mov")!
let asset = AVURLAsset(url: sourceUrl)
8 ->here guard let videoAssetTrack = asset.tracks(withMediaType: AVMediaType.video).first else { return }
guard let audioCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: kCMPersistentTrackID_Invalid) else { return }
try audioCompositionTrack.insertTimeRange(videoAssetTrack.timeRange, of: videoAssetTrack, at: CMTime.zero)
} catch {
print(error)
}
// Create an export session
let exportSession = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetPassthrough)!
exportSession.outputFileType = AVFileType.mp4
exportSession.outputURL = browseURL
// Export file
exportSession.exportAsynchronously {
guard case exportSession.status = AVAssetExportSession.Status.completed else { return }
DispatchQueue.main.async {
// Present a UIActivityViewController to share audio file
print("completed")
}
}
Related
I'm trying to convert mp4 video file to m4a audio format by AVAssetExportSession on my iOS app.
This is the conversion code:
let outputUrl = URL(fileURLWithPath: NSTemporaryDirectory() + "out.m4a")
if FileManager.default.fileExists(atPath: outputUrl.path) {
try? FileManager.default.removeItem(atPath: outputUrl.path)
}
let asset = AVURLAsset(url: inputUrl)
// tried the `AVAssetExportPresetAppleM4A` preset name but the same result
let exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetPassthrough)!
exportSession.outputFileType = AVFileType.m4a
exportSession.outputURL = outputUrl
await exportSession.export()
switch exportSession.status {
case .completed:
return outputUrl
default:
// This becomes `4` which is `.failed`
print("Status: \(exportSession.status)")
throw exportSession.error!
}
Currently, it seems to work on iPhone simulators (confirmed on iOS 16.1/15.5) but it doesn't on my iPhone 7 (iOS 15.7.1) real device. It doesn't seem to work as well on my colleague's iOS 16.1 real device, so it shouldn't be a matter of the iOS version.
The mp4 file is located in the iOS Files app and the inputUrl in the above code becomes something like this (I get this URL via UIDocumentPickerViewController):
file:///private/var/mobile/Library/Mobile%20Documents/com~apple~CloudDocs/Downloads/%E3%81%8A%E3%81%97%E3%82%83%E3%81%B8%E3%82%99%E3%82%8A%E3%81%B2%E3%82%8D%E3%82%86%E3%81%8D.mp4
and the error is:
Error Domain=AVFoundationErrorDomain Code=-11800 "The operation could not be completed" UserInfo={NSUnderlyingError=0x2808f30c0 {Error Domain=NSOSStatusErrorDomain Code=-16979 "(null)"}, NSLocalizedFailureReason=An unknown error occurred (-16979), NSLocalizedRecoverySuggestion=XXXXDEFAULTVALUEXXXX, NSURL=file:///private/var/mobile/Library/Mobile%20Documents/com~apple~CloudDocs/Downloads/%E3%81%8A%E3%81%97%E3%82%83%E3%81%B8%E3%82%99%E3%82%8A%E3%81%B2%E3%82%8D%E3%82%86%E3%81%8D.mp4, NSLocalizedDescription=The operation could not be completed}
It seems to be resolved by calling startAccessingSecurityScopedResource() to the inputUrl before exporting.
inputUrl.startAccessingSecurityScopedResource()
Not sure exactly why but that's probably because the inputUrl is under the file:///private namespace?
Use this function for extract audio from video :----
Export audio from video url into new path :-
func extractAudioFromVideo(videoUrl:URL) {
let mixComposition: AVMutableComposition = AVMutableComposition()
var mutableCompositionAudioVideoTrack: [AVMutableCompositionTrack] = []
let videoAsset: AVAsset = AVAsset(url: videoUrl)
if let audioVideoTrack = mixComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid){
mutableCompositionAudioVideoTrack.append(audioVideoTrack)
if let audioVideoAssetTrack: AVAssetTrack = videoAsset.tracks(withMediaType: .audio).first {
do {
try mutableCompositionAudioVideoTrack.first?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: videoAsset.duration), of: audioVideoAssetTrack, at: CMTime.zero)
} catch {
print(error)
}
}
}
if let documentsPath = NSSearchPathForDirectoriesInDomains(.cachesDirectory, .userDomainMask, true).first {
let outputURL = URL(fileURLWithPath: documentsPath).appendingPathComponent(".m4a")
do {
if FileManager.default.fileExists(atPath: outputURL.path) {
try FileManager.default.removeItem(at: outputURL)
}
} catch { }
if let exportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetAppleM4A) {
exportSession.outputURL = outputURL
exportSession.outputFileType = AVFileType.m4a
exportSession.shouldOptimizeForNetworkUse = true
exportSession.exportAsynchronously(completionHandler: {
switch exportSession.status {
case . completed:
DispatchQueue.main.async {
print("audio url :---- \(outputURL)")
// -------- play output audio URL in player ------
}
case .failed:
if let _error = exportSession.error {
print(_error.localizedDescription)
}
case .cancelled:
if let _error = exportSession.error {
print(_error.localizedDescription)
}
default:
print("")
}
})
}
}
}
AVMutableComposition Play :-
You can play direct AVMutableComposition without exporting audio track.
Benefits of play AVMutableComposition is that you can instant play audio into player.
var avplayer = AVPlayer()
var playerController : AVPlayerViewController?
#IBAction func btnAudioPlay(sender:UIButton) {
self.playAudioCompositionFromVideo(fromVideoURL: URL(string: "")!) { Composition in
let playerItem = AVPlayerItem(asset: Composition)
self.playerController = AVPlayerViewController()
self.avplayer = AVPlayer(playerItem: playerItem)
self.playerController?.player = self.avplayer
self.playerController?.player?.play()
} failure: { errore in
print(errore as Any)
}
}
func playAudioCompositionFromVideo(fromVideoURL url: URL, success: #escaping ((AVMutableComposition) -> Void), failure: #escaping ((String?) -> Void)) {
let asset = AVPlayerItem(url: url).asset
let mixComposition = AVMutableComposition()
let timeRange = CMTimeRangeMake(start: CMTime.zero, duration: asset.duration)
//------------ Get Audio Tracks From Asset ---------
let audioTracks = asset.tracks(withMediaType: AVMediaType.audio)
if audioTracks.count > 0 {
// ---- Use audio if video contains the audio track ---
let compositionAudioTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: kCMPersistentTrackID_Invalid)
// -------- Get First Audio track --------
guard let audioTrack = audioTracks.first else { return }
do {
try compositionAudioTrack?.insertTimeRange(timeRange, of: audioTrack, at: CMTime.zero)
compositionAudioTrack?.preferredTransform = audioTrack.preferredTransform
success(mixComposition)
} catch _ {
failure("audio track insert failed!")
}
} else {
failure("audio track is not available!")
}
}
My goal is to merge audio (mp3 music) with video captured by iPhone camera, I am able to merge audio with video using AVMutableComposition but in final output video sounds is not coming.
below is the code I am using:
open func mergeVideoWithAudio(videoUrl: URL, audioUrl: URL){
let mixComposition: AVMutableComposition = AVMutableComposition()
var mutableCompositionVideoTrack: [AVMutableCompositionTrack] = []
var mutableCompositionAudioTrack: [AVMutableCompositionTrack] = []
let totalVideoCompositionInstruction : AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()
let aVideoAsset: AVAsset = AVAsset(url: videoUrl)
let aAudioAsset: AVAsset = AVAsset(url: audioUrl)
if let videoTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid), let audioTrack = mixComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid) {
mutableCompositionVideoTrack.append(videoTrack)
mutableCompositionAudioTrack.append(audioTrack)
}
let time = CMTimeMakeWithSeconds(Float64(musicTrimmerController.currentPlayerPosition), 1000)
if let aVideoAssetTrack: AVAssetTrack = aVideoAsset.tracks(withMediaType: .video).first,
let aAudioAssetTrack: AVAssetTrack = aAudioAsset.tracks(withMediaType: .audio).first {
do {
try mutableCompositionVideoTrack.first?.insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), of: aVideoAssetTrack, at: kCMTimeZero)
try mutableCompositionAudioTrack.first?.insertTimeRange(CMTimeRangeMake(time, aVideoAssetTrack.timeRange.duration), of: aAudioAssetTrack, at: kCMTimeZero)
} catch{
print(error)
}
totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero,aVideoAssetTrack.timeRange.duration)
let compositionV = mixComposition.tracks(withMediaType: AVMediaType.video).last
if ((aVideoAssetTrack != nil) && (compositionV != nil)) {
compositionV?.preferredTransform = (aVideoAssetTrack.preferredTransform)
}
}
if let documentsPath = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first {
let outputURL = URL(fileURLWithPath: documentsPath).appendingPathComponent("movie.mov")
do {
if FileManager.default.fileExists(atPath: outputURL.path) {
try FileManager.default.removeItem(at: outputURL)
}
} catch { }
if let exportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) {0
exportSession.outputURL = outputURL
exportSession.outputFileType = AVFileType.mp4
exportSession.shouldOptimizeForNetworkUse = true
/// try to export the file and handle the status cases
exportSession.exportAsynchronously(completionHandler: {
switch exportSession.status {
case .failed:
print(exportSession.error as Any)
case .cancelled:
print(exportSession.error as Any)
default:
print("Save video output")
}
})
}
}
}
I am new in swift. I need to remove the audio from video files and play them via URL. I have gone through these link1 & link2 ...but there were many errors when I tried to convert them in swift.
Any help would be greatly appreciated.
Swift 4.2
var mutableVideoURL: URL! //final video url
func removeAudioFromVideo(_ videoURL: URL) {
let inputVideoURL: URL = videoURL
let sourceAsset = AVURLAsset(url: inputVideoURL)
let sourceVideoTrack: AVAssetTrack? = sourceAsset.tracks(withMediaType: AVMediaType.video)[0]
let composition : AVMutableComposition = AVMutableComposition()
let compositionVideoTrack: AVMutableCompositionTrack? = composition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: kCMPersistentTrackID_Invalid)
compositionVideoTrack!.preferredTransform = sourceVideoTrack!.preferredTransform
let x: CMTimeRange = CMTimeRangeMake(start: CMTime.zero, duration: sourceAsset.duration)
_ = try? compositionVideoTrack!.insertTimeRange(x, of: sourceVideoTrack!, at: CMTime.zero)
mutableVideoURL = documentsURL.appendingPathComponent("pppppppppp.mp4")
let exporter: AVAssetExportSession = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality)!
exporter.outputFileType = AVFileType.mp4
exporter.outputURL = mutableVideoURL
removeFileAtURLIfExists(url: mutableVideoURL)
exporter.exportAsynchronously(completionHandler:
{
switch exporter.status
{
case AVAssetExportSession.Status.failed:
print("1000000000failed \(exporter.error)")
case AVAssetExportSession.Status.cancelled:
print("1000000000cancelled \(exporter.error)")
case AVAssetExportSession.Status.unknown:
print("1000000000unknown\(exporter.error)")
case AVAssetExportSession.Status.waiting:
print("1000000000waiting\(exporter.error)")
case AVAssetExportSession.Status.exporting:
print("1000000000exporting\(exporter.error)")
default:
print("1000000000-----Mutable video exportation complete.")
}
})
}
func removeFileAtURLIfExists(url:URL) {
let filePath = url.path
let fileManager = FileManager.default
if fileManager.fileExists(atPath: filePath) {
do{
try fileManager.removeItem(atPath: filePath)
} catch {
print("Couldn't remove existing destination file: \(error)")
}
}
}
Note : the addition of this line
compositionVideoTrack?.preferredTransform = sourceVideoTrack!.preferredTransform
that preserve orientation of video
with the help of this link I wrote this code & this worked for me...
var mutableVideoURL = NSURL() //final video url
func removeAudioFromVideo(_ videoURL: URL) {
let inputVideoURL: URL = videoURL
let sourceAsset = AVURLAsset(url: inputVideoURL)
let sourceVideoTrack: AVAssetTrack? = sourceAsset.tracks(withMediaType: AVMediaTypeVideo)[0]
let composition : AVMutableComposition = AVMutableComposition()
let compositionVideoTrack: AVMutableCompositionTrack? = composition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
let x: CMTimeRange = CMTimeRangeMake(kCMTimeZero, sourceAsset.duration)
_ = try? compositionVideoTrack!.insertTimeRange(x, of: sourceVideoTrack!, at: kCMTimeZero)
mutableVideoURL = NSURL(fileURLWithPath: NSHomeDirectory() + "/Documents/FinalVideo.mp4")
let exporter: AVAssetExportSession = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality)!
exporter.outputFileType = AVFileTypeMPEG4
exporter.outputURL = mutableVideoURL as URL
removeFileAtURLIfExists(url: mutableVideoURL)
exporter.exportAsynchronously(completionHandler:
{
switch exporter.status
{
case AVAssetExportSessionStatus.failed:
print("failed \(exporter.error)")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(exporter.error)")
case AVAssetExportSessionStatus.unknown:
print("unknown\(exporter.error)")
case AVAssetExportSessionStatus.waiting:
print("waiting\(exporter.error)")
case AVAssetExportSessionStatus.exporting:
print("exporting\(exporter.error)")
default:
print("-----Mutable video exportation complete.")
}
})
}
func removeFileAtURLIfExists(url: NSURL) {
if let filePath = url.path {
let fileManager = FileManager.default
if fileManager.fileExists(atPath: filePath) {
do{
try fileManager.removeItem(atPath: filePath)
} catch let error as NSError {
print("Couldn't remove existing destination file: \(error)")
}
}
}
}
Im trying to merge N audio tracks, to an video file.
The video is in MP4 format, and all audios are m4a.
All the preparation works well but when the export finishes, always fails.
Heres my code :
func mixAudioAndVideo() {
self.player?.pause()
let mixComposition = AVMutableComposition()
let paths = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)
let docsDirect = paths[0]
for audioTrack in self.audioTracks {
let musicFile = docsDirect.URLByAppendingPathComponent(audioTrack.audioName)
let audioAsset = AVURLAsset(URL: musicFile!, options: nil)
let audioTimeRange = CMTimeRangeMake(audioTrack.audioTime!, audioAsset.duration)
let compositionAudioTrack:AVMutableCompositionTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
do {
try compositionAudioTrack.insertTimeRange(audioTimeRange, ofTrack: audioAsset.tracksWithMediaType(AVMediaTypeAudio).first!, atTime: audioTrack.audioTime!)
} catch let error {
print(error)
}
}
let videoAsset = AVURLAsset(URL: video!.movieURL, options: nil)
let videoTimeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
let compositionVideoTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
do {
try compositionVideoTrack.insertTimeRange(videoTimeRange, ofTrack: videoAsset.tracksWithMediaType(AVMediaTypeVideo).first!, atTime: kCMTimeZero)
} catch let error {
print(error)
}
let videoName = "video\(self.audioTracks.count).mov"
let outputFilePath = docsDirect.URLByAppendingPathComponent(videoName)
let assetExport = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)
assetExport!.outputFileType = AVFileTypeQuickTimeMovie
assetExport!.outputURL = outputFilePath!
assetExport?.exportAsynchronouslyWithCompletionHandler({
dispatch_async(dispatch_get_main_queue()){
print("finished exporting \(outputFilePath)")
print("status \(assetExport?.status)")
print("error \(assetExport?.error)")
SVProgressHUD.dismiss()
}
})
}
And the code i get :
error Optional(Error Domain=NSURLErrorDomain Code=-1 "unknown error" UserInfo={NSLocalizedDescription=unknown error, NSUnderlyingError=0x170056140 {Error Domain=NSOSStatusErrorDomain Code=-12935 "(null)"}})
Swift: 3
first merge N audio tracks
var mergeAudioURL = NSURL()
func mergeAudioFiles(audioFileUrls: NSArray) {
let composition = AVMutableComposition()
for i in 0 ..< audioFileUrls.count {
let compositionAudioTrack :AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
let asset = AVURLAsset(url: (audioFileUrls[i] as! NSURL) as URL)
let track = asset.tracks(withMediaType: AVMediaTypeAudio)[0]
let timeRange = CMTimeRange(start: CMTimeMake(0, 600), duration: track.timeRange.duration)
try! compositionAudioTrack.insertTimeRange(timeRange, of: track, at: composition.duration)
}
let documentDirectoryURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first! as NSURL
self.mergeAudioURL = documentDirectoryURL.appendingPathComponent("Merge Audio.m4a")! as URL as NSURL
let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A)
assetExport?.outputFileType = AVFileTypeAppleM4A
assetExport?.outputURL = mergeAudioURL as URL
removeFileAtURLIfExists(url: mergeAudioURL)
assetExport?.exportAsynchronously(completionHandler:
{
switch assetExport!.status
{
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport?.error)")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport?.error)")
case AVAssetExportSessionStatus.unknown:
print("unknown\(assetExport?.error)")
case AVAssetExportSessionStatus.waiting:
print("waiting\(assetExport?.error)")
case AVAssetExportSessionStatus.exporting:
print("exporting\(assetExport?.error)")
default:
print("-----Merge audio exportation complete.\(self.mergeAudioURL)")
}
})
}
then merge audio with video
var mergedAudioVideoURl = NSURL()
func mergeMutableVideoWithAudio(videoUrl:NSURL, audioUrl:NSURL){
let mixComposition : AVMutableComposition = AVMutableComposition()
var mutableCompositionVideoTrack : [AVMutableCompositionTrack] = []
var mutableCompositionAudioTrack : [AVMutableCompositionTrack] = []
let totalVideoCompositionInstruction : AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()
//start merge
let aVideoAsset : AVAsset = AVAsset(url: videoUrl as URL)
let aAudioAsset : AVAsset = AVAsset(url: audioUrl as URL)
mutableCompositionVideoTrack.append(mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid))
mutableCompositionAudioTrack.append( mixComposition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid))
let aVideoAssetTrack : AVAssetTrack = aVideoAsset.tracks(withMediaType: AVMediaTypeVideo)[0]
let aAudioAssetTrack : AVAssetTrack = aAudioAsset.tracks(withMediaType: AVMediaTypeAudio)[0]
do{
try mutableCompositionVideoTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), of: aVideoAssetTrack, at: kCMTimeZero)
try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), of: aAudioAssetTrack, at: kCMTimeZero)
}catch{
}
totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero,aVideoAssetTrack.timeRange.duration )
let mutableVideoComposition : AVMutableVideoComposition = AVMutableVideoComposition()
mutableVideoComposition.frameDuration = CMTimeMake(1, 30)
mutableVideoComposition.renderSize = CGSize(width: 1280, height: 720)
mergedAudioVideoURl = NSURL(fileURLWithPath: NSHomeDirectory() + "/Documents/FinalVideo.mp4")
let assetExport: AVAssetExportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)!
assetExport.outputFileType = AVFileTypeMPEG4
assetExport.outputURL = mergedAudioVideoURl as URL
removeFileAtURLIfExists(url: mergedAudioVideoURl)
assetExport.shouldOptimizeForNetworkUse = true
assetExport.exportAsynchronously { () -> Void in
switch assetExport.status {
case AVAssetExportSessionStatus.completed:
print("-----Merge mutable video with trimmed audio exportation complete.\(self.mergedAudioVideoURl)")
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport.error)")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport.error)")
default:
print("complete")
}
}
}
func removeFileAtURLIfExists(url: NSURL) {
if let filePath = url.path {
let fileManager = FileManager.default
if fileManager.fileExists(atPath: filePath) {
do{
try fileManager.removeItem(atPath: filePath)
} catch let error as NSError {
print("-----Couldn't remove existing destination file: \(error)")
}
}
}
}
I have requirement is that user have multiple clips and app gives 3 feature. user can make slow motion, user can make it faster. after changing speed user can merge them to make single video and save to device.
for example
clip 1, clip 2 and clip 3 record in normal speed and then convert clip 1 in slow motion, clip 2 is in normal speed and clip 3 in fast speed and then when user merge it the those three clips will be combine together and make it in one clip and user can share in social network.
recording video with AVFoundation or select video from gallery
func convertVideoWithSpeed(completion:()->()) {
if RecordedSegment.segments.count > 0 {
self.exportVideoWithMode(RecordedSegment.segments.first!,title: "clip_\(counter).mp4", completion: { [unowned self] (path) in
RecordedSegment.segments.removeFirst()
self.counter = self.counter + 1
self.mergedVideArray.append("clip_\(self.counter).mp4")
self.convertVideoWithSpeed(completion)
})
} else {
var arr1 = [NSURL]()
for track in self.mergedVideArray {
let documentsURL = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)[0]
var finalURL = documentsURL.URLByAppendingPathComponent(track)
arr1.append(finalURL)
}
self.mergeVideos(self.mergedVideArray, completion: {
completion()
})
}
}
Converting video frame rates for different clips
func exportVideoWithMode(segment:RecordedSegment,title:String,completion:(path:String)->()) {
let mixComposition = AVMutableComposition()
let videoTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
let audioTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
let startTimer = kCMTimeZero
print(RecordedSegment.segments)
var size = CGSizeZero
let astTrack = AVAsset(URL: NSURL(string: segment.path!)!)
size = astTrack.tracksWithMediaType(AVMediaTypeVideo)[0].naturalSize
do {
try videoTrack.insertTimeRange(CMTimeRangeMake(startTimer, astTrack.duration), ofTrack: astTrack.tracksWithMediaType(AVMediaTypeVideo)[0] , atTime: startTimer)
try audioTrack.insertTimeRange(CMTimeRangeMake(startTimer, astTrack.duration), ofTrack: astTrack.tracksWithMediaType(AVMediaTypeAudio)[0] , atTime: startTimer)
} catch _ {
print("Failed to load first track")
}
let documentsURL = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)[0]
let finalURL = documentsURL.URLByAppendingPathComponent(title)
let instruction = AVMutableVideoCompositionInstruction()
let layerInstruct = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
instruction.layerInstructions = [layerInstruct]
let videoComposition = AVMutableVideoComposition()
videoComposition.instructions = [instruction]
if segment.mode == .Slow {
videoComposition.frameDuration = CMTimeMake(1, 15)
} else if segment.mode == .Fast {
videoComposition.frameDuration = CMTimeMake(1, 30)
} else {
videoComposition.frameDuration = CMTimeMake(1, 30)
}
videoComposition.renderSize = size
videoComposition.renderScale = 1
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, astTrack.duration)
guard let exportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) else { return }
exportSession.outputURL = finalURL
exportSession.outputFileType = AVFileTypeQuickTimeMovie
exportSession.shouldOptimizeForNetworkUse = true
exportSession.videoComposition = videoComposition
if NSFileManager.defaultManager().fileExistsAtPath(finalURL.path!) {
do {
try NSFileManager.defaultManager().removeItemAtURL(finalURL)
} catch {
}
}
// 6 - Perform the Export
exportSession.exportAsynchronouslyWithCompletionHandler() {
let error = exportSession.error?.code
print(exportSession.error)
if exportSession.status == .Cancelled {
print("Export was cancelled")
GlobalUtility.hideActivityIndi(self)
} else if exportSession.status == .Completed {
print("completed")
let asset = AVAsset(URL: finalURL)
let track = asset.tracksWithMediaType(AVMediaTypeVideo)[0]
print("==============\(track.nominalFrameRate)")
completion(path: finalURL.path!)
} else if error == nil{
completion(path: finalURL.path!)
}else{
if exportSession.status == .Cancelled {
print("Export was cancelled")
GlobalUtility.hideActivityIndi(self)
}
GlobalUtility.hideActivityIndi(self)
}
}
}
merging them to one video
func mergeVideos(mergePaths:[String],completion:()->()) {
var count = 0
let mixComposition = AVMutableComposition()
let videoTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
let audioTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
var startTimer = kCMTimeZero
print(RecordedSegment.segments)
for track in mergePaths {
let documentsURL = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)[0]
var finalURL = documentsURL.URLByAppendingPathComponent(track)
if NSFileManager.defaultManager().fileExistsAtPath(finalURL.path!) {
let astTrack = AVAsset(URL: finalURL)
let video = astTrack.tracksWithMediaType(AVMediaTypeVideo)
let audio = astTrack.tracksWithMediaType(AVMediaTypeAudio)
if audio.count > 0 && video.count > 0 {
do {
try videoTrack.insertTimeRange(CMTimeRangeMake(startTimer, astTrack.duration), ofTrack: video[0] , atTime: startTimer)
try audioTrack.insertTimeRange(CMTimeRangeMake(startTimer, astTrack.duration), ofTrack: audio[0] , atTime: startTimer)
startTimer = (videoTrack.asset?.duration)!
} catch _ {
print("Failed to load first track")
}
} else {
print("file not exist")
}
} else {
print("tracks not exist")
}
}
//let documentsURL = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)[0]
let finalURL = self.recordSession.outputUrl
count = count + 1
guard let exportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) else { return }
exportSession.outputURL = finalURL
exportSession.outputFileType = AVFileTypeQuickTimeMovie
exportSession.shouldOptimizeForNetworkUse = true
if NSFileManager.defaultManager().fileExistsAtPath(self.recordSession.outputUrl.path!) {
do {
try NSFileManager.defaultManager().removeItemAtURL(self.recordSession.outputUrl)
} catch {
}
}
// 6 - Perform the Export
exportSession.exportAsynchronouslyWithCompletionHandler() {
let error = exportSession.error?.code
print(exportSession.error)
if exportSession.status == .Cancelled {
print("Export was cancelled")
GlobalUtility.hideActivityIndi(self)
} else if exportSession.status == .Completed {
print("completed")
completion()
} else if error == nil{
completion()
}else{
if exportSession.status == .Cancelled {
print("Export was cancelled")
GlobalUtility.hideActivityIndi(self)
}
GlobalUtility.hideActivityIndi(self)
}
}
}