I want the user to play audio, change the volume in some parts, and then save that file with the new levels of volume.
I changed the volume of an audio with AVAssetExportSession and AVMutableAudioMixInputParameters and worked, the problem is that I need create a audio loop with this audio, so first I created this loop and then I need change the volume
Here is my code
let type = "m4a"
let documentsDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0].appendingPathComponent("mixedAudio.m4a")
var backgroundUrl = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0].appendingPathComponent("background.m4a")
override func viewDidLoad() {
super.viewDidLoad()
playButotn.isEnabled = false
let mainUrl = Bundle.main.url(forResource: "prueba1", withExtension: type, subdirectory: "Audios")
let mainDurations = AVAsset(url: mainUrl!).duration
let secondAudioUrl = Bundle.main.url(forResource: "walk", withExtension: type, subdirectory: "Audios")
let backgroundDuration = AVAsset(url: secondAudioUrl!).duration
if mainDurations > backgroundDuration {
var times = Int(mainDurations.seconds / backgroundDuration.seconds)
let rem = mainDurations.seconds.truncatingRemainder(dividingBy: backgroundDuration.seconds)
if rem > 0 {
times = times + 1
}
createLoopAudio(times: times) {
self.createFade {
self.createMix(mainUrl: mainUrl!, backgroundUrl: self.backgroundUrl)
}
}
}else {
backgroundUrl = secondAudioUrl!
createMix(mainUrl: mainUrl!, backgroundUrl: backgroundUrl)
}
}
func createMix(mainUrl: URL, backgroundUrl: URL){
let composition = AVMutableComposition()
let mainAsset = AVAsset(url: mainUrl)
let backgroundAsset = AVAsset(url: backgroundUrl)
let mainDurations = AVAsset(url: mainUrl).duration
let mainAudioTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
let backgroundAudioTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
let timeRange = CMTimeRangeMake(kCMTimeZero, mainDurations)
do {
try mainAudioTrack.insertTimeRange(timeRange, of: mainAsset.tracks(withMediaType: AVMediaTypeAudio)[0], at: kCMTimeZero)
try backgroundAudioTrack.insertTimeRange(timeRange, of: backgroundAsset.tracks(withMediaType: AVMediaTypeAudio)[0], at: kCMTimeZero)
}catch {
print(error.localizedDescription)
}
let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetPassthrough)
assetExport?.outputFileType = AVFileTypeMPEG4
assetExport?.outputURL = documentsDirectory
assetExport?.shouldOptimizeForNetworkUse = true
if FileManager.default.fileExists(atPath: documentsDirectory.path) {
try! FileManager.default.removeItem(atPath: documentsDirectory.path)
}
assetExport?.exportAsynchronously(completionHandler: {
switch assetExport!.status
{
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport?.error)")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport?.error)")
case AVAssetExportSessionStatus.unknown:
print("unknown\(assetExport?.error)")
case AVAssetExportSessionStatus.waiting:
print("waiting\(assetExport?.error)")
case AVAssetExportSessionStatus.exporting:
print("exporting\(assetExport?.error)")
case AVAssetExportSessionStatus.completed:
print("complete")
DispatchQueue.main.async {
self.playButotn.isEnabled = true
}
}
})
}
func createLoopAudio(times: Int, completion: #escaping () -> Void){
let composition = AVMutableComposition()
var nextTimeStartClip = kCMTimeZero
for _ in 1...times {
let url = Bundle.main.url(forResource: "walk", withExtension: type, subdirectory: "Audios")
let audioAsset = AVAsset(url: url!)
print("tracks walk \(audioAsset.tracks.count)")
let audioTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
let timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration)
do {
try audioTrack.insertTimeRange(timeRange, of: audioAsset.tracks(withMediaType: AVMediaTypeAudio)[0], at: nextTimeStartClip)
}catch {
print(error.localizedDescription)
}
nextTimeStartClip = CMTimeAdd(nextTimeStartClip, audioAsset.duration)
}
let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetPassthrough)
assetExport?.outputFileType = AVFileTypeMPEG4
assetExport?.outputURL = backgroundUrl
assetExport?.shouldOptimizeForNetworkUse = true
if FileManager.default.fileExists(atPath: backgroundUrl.path) {
try! FileManager.default.removeItem(atPath: backgroundUrl.path)
}
assetExport?.exportAsynchronously(completionHandler: {
switch assetExport!.status
{
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport?.error)")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport?.error)")
case AVAssetExportSessionStatus.unknown:
print("unknown\(assetExport?.error)")
case AVAssetExportSessionStatus.waiting:
print("waiting\(assetExport?.error)")
case AVAssetExportSessionStatus.exporting:
print("exporting\(assetExport?.error)")
case AVAssetExportSessionStatus.completed:
print("loop complete")
completion()
}
})
}
func createFade(completion: #escaping () -> Void) {
let exportAudioMix = AVMutableAudioMix()
let audioAsset = AVAsset(url: backgroundUrl)
let exportAudioMixInputParameters = AVMutableAudioMixInputParameters(track: audioAsset.tracks(withMediaType: AVMediaTypeAudio)[0])
let start = 2
let length = 3
exportAudioMixInputParameters.setVolume(0.0, at: CMTimeMakeWithSeconds(Float64(start - 1), 1))
exportAudioMixInputParameters.setVolume(0.1, at: CMTimeMakeWithSeconds(Float64(start), 1))
exportAudioMixInputParameters.setVolume(0.5, at: CMTimeMakeWithSeconds(Float64(start + 1), 1))
exportAudioMixInputParameters.setVolume(1.0, at: CMTimeMakeWithSeconds(Float64(start + 2), 1))
exportAudioMixInputParameters.setVolume(1.0, at: CMTimeMakeWithSeconds(Float64(start + length - 2), 1))
exportAudioMixInputParameters.setVolume(0.5, at: CMTimeMakeWithSeconds(Float64(start + length - 1), 1))
exportAudioMixInputParameters.setVolume(0.1, at: CMTimeMakeWithSeconds(Float64(start + length), 1))
exportAudioMix.inputParameters = [exportAudioMixInputParameters]
let composition = AVMutableComposition()
print("tracks loop \(audioAsset.tracks.count)")
let audioTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
let timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration)
do {
try audioTrack.insertTimeRange(timeRange, of: audioAsset.tracks(withMediaType: AVMediaTypeAudio)[0], at: kCMTimeZero)
}catch {
print(error.localizedDescription)
}
let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A)
assetExport?.outputFileType = AVFileTypeAppleM4A
assetExport?.outputURL = backgroundUrl
assetExport?.shouldOptimizeForNetworkUse = true
if FileManager.default.fileExists(atPath: backgroundUrl.path) {
try! FileManager.default.removeItem(atPath: backgroundUrl.path)
}
assetExport?.exportAsynchronously(completionHandler: {
switch assetExport!.status
{
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport?.error)")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport?.error)")
case AVAssetExportSessionStatus.unknown:
print("unknown\(assetExport?.error)")
case AVAssetExportSessionStatus.waiting:
print("waiting\(assetExport?.error)")
case AVAssetExportSessionStatus.exporting:
print("exporting\(assetExport?.error)")
case AVAssetExportSessionStatus.completed:
print("faded complete")
completion()
}
})
Well, Found a way and works fine: I have 2 audios; A and B, B lasts less than A, so I create a loop of B to fit A, Then I play 2 audios at same time and with a slider I modify the volume of b. Finally I save this volume configuration and mix the audios. This is my code:
import UIKit
import AVFoundation
class ViewController: UIViewController {
#IBOutlet weak var playButotn: UIButton!
var player: AVAudioPlayer?
var podcastPlayer: AVAudioPlayer?
var times = 0
#IBOutlet weak var volumeSlider: UISlider!
var volumeRanges = [VolumeRange]()
let type = "m4a"
let documentsDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0].appendingPathComponent("mixedAudio.m4a")
var backgroundUrl = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0].appendingPathComponent("background.m4a")
var mainUrl: URL?
override func viewDidLoad() {
super.viewDidLoad()
mainUrl = Bundle.main.url(forResource: "prueba1", withExtension: type, subdirectory: "Audios")
playButotn.isEnabled = false
volumeSlider.value = 1
let mainDurations = AVAsset(url: mainUrl!).duration
print(AVAsset(url: mainUrl!).duration.seconds)
let secondAudioUrl = Bundle.main.url(forResource: "rocking", withExtension: type, subdirectory: "Audios")
let backgroundDuration = AVAsset(url: secondAudioUrl!).duration
if mainDurations > backgroundDuration {
times = Int(mainDurations.seconds / backgroundDuration.seconds)
let rem = mainDurations.seconds.truncatingRemainder(dividingBy: backgroundDuration.seconds)
if rem > 0 {
times = times + 1
}
createLoop(times: times) {
DispatchQueue.main.async {
self.playButotn.isEnabled = true
}
}
}else {
backgroundUrl = secondAudioUrl!
createMix()
}
}
func createMix(){
let composition = AVMutableComposition()
let mainAsset = AVAsset(url: mainUrl!)
let backgroundAsset = AVAsset(url: backgroundUrl)
let mainDurations = AVAsset(url: mainUrl!).duration
let mainAudioTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
let backgroundAudioTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
let timeRange = CMTimeRangeMake(kCMTimeZero, mainDurations)
do {
try mainAudioTrack.insertTimeRange(timeRange, of: mainAsset.tracks(withMediaType: AVMediaTypeAudio)[0], at: kCMTimeZero)
try backgroundAudioTrack.insertTimeRange(timeRange, of: backgroundAsset.tracks(withMediaType: AVMediaTypeAudio)[0], at: kCMTimeZero)
}catch {
print(error.localizedDescription)
}
let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetMediumQuality)
assetExport?.outputFileType = AVFileTypeMPEG4
assetExport?.outputURL = documentsDirectory
assetExport?.shouldOptimizeForNetworkUse = true
if FileManager.default.fileExists(atPath: documentsDirectory.path) {
try! FileManager.default.removeItem(atPath: documentsDirectory.path)
}
assetExport?.exportAsynchronously(completionHandler: {
switch assetExport!.status
{
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport?.error?.localizedDescription ?? "")")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport?.error?.localizedDescription ?? "")")
case AVAssetExportSessionStatus.unknown:
print("unknown\(assetExport?.error?.localizedDescription ?? "")")
case AVAssetExportSessionStatus.waiting:
print("waiting\(assetExport?.error?.localizedDescription ?? "")")
case AVAssetExportSessionStatus.exporting:
print("exporting\(assetExport?.error?.localizedDescription ?? "")")
case AVAssetExportSessionStatus.completed:
print("mix complete")
DispatchQueue.main.async {
self.playButotn.isEnabled = true
}
}
})
}
func createLoop(times: Int, completion: #escaping () -> Void){
let urlFondo = Bundle.main.url(forResource: "rocking", withExtension: type, subdirectory: "Audios")
let acentoFile = try! JUMAudioFile(forReading: urlFondo!)
let acentoPCM = acentoFile.getPCMArrayBufferFromURL()
let (_ , acentoCompleteData) = JUMAudioFile.convertToPoints(arrayFloatValues: acentoPCM)
var newDraft = [Float]()
for _ in 1...times {
for array in acentoCompleteData {
for fl in array {
newDraft.append(fl)
}
}
}
let _ = try! JUMAudioFile(createFileFromFloats: [newDraft], url: self.backgroundUrl)
print("loop complete")
completion()
}
func createLoopAudioWithFade(completion: #escaping () -> Void){
let composition = AVMutableComposition()
let exportAudioMix = AVMutableAudioMix()
var exportAudioMixInputParametersArry = [AVMutableAudioMixInputParameters]()
let audioAsset = AVAsset(url: self.backgroundUrl)
let audioTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
let timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration)
do {
try audioTrack.insertTimeRange(timeRange, of: audioAsset.tracks(withMediaType: AVMediaTypeAudio)[0], at: kCMTimeZero)
}catch {
print(error.localizedDescription)
}
let exportAudioMixInputParameters = AVMutableAudioMixInputParameters(track: audioTrack)
for ranges in volumeRanges {
exportAudioMixInputParameters.setVolume(ranges.volume!, at: CMTimeMakeWithSeconds(ranges.start!, 50000))
}
exportAudioMixInputParametersArry.append(exportAudioMixInputParameters)
exportAudioMix.inputParameters = exportAudioMixInputParametersArry
let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetMediumQuality)
assetExport?.outputFileType = AVFileTypeMPEG4
assetExport?.outputURL = backgroundUrl
assetExport?.shouldOptimizeForNetworkUse = true
if FileManager.default.fileExists(atPath: backgroundUrl.path) {
try! FileManager.default.removeItem(atPath: backgroundUrl.path)
}
assetExport?.audioMix = exportAudioMix
assetExport?.exportAsynchronously(completionHandler: {
switch assetExport!.status
{
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport?.error?.localizedDescription ?? "")")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport?.error?.localizedDescription ?? "")")
case AVAssetExportSessionStatus.unknown:
print("unknown\(assetExport?.error?.localizedDescription ?? "")")
case AVAssetExportSessionStatus.waiting:
print("waiting\(assetExport?.error?.localizedDescription ?? "")")
case AVAssetExportSessionStatus.exporting:
print("exporting\(assetExport?.error?.localizedDescription ?? "")")
case AVAssetExportSessionStatus.completed:
print("fade complete")
completion()
}
})
}
#IBAction func play(_ sender: Any) {
do{
player = try AVAudioPlayer(contentsOf: backgroundUrl)
player?.prepareToPlay()
player?.volume = 1.0
podcastPlayer = try AVAudioPlayer(contentsOf: mainUrl!)
podcastPlayer?.prepareToPlay()
podcastPlayer?.volume = 1
podcastPlayer?.play()
player?.play()
}catch {
print(error.localizedDescription)
}
}
#IBAction func changeVolume(_ sender: UISlider) {
if (player?.isPlaying)!{
player?.volume = sender.value
let volumeRange = VolumeRange()
volumeRange.volume = sender.value
volumeRange.start = player?.currentTime
volumeRanges.append(volumeRange)
}
}
#IBAction func touchUp(_ sender: UISlider) {
}
#IBAction func touchUpOutside(_ sender: UISlider) {
print("ouside")
}
#IBAction func generar(_ sender: Any) {
playButotn.isEnabled = false
self.createLoopAudioWithFade() {
self.createMix()
}
}
#IBAction func playMix(_ sender: UIButton) {
do {
player = try AVAudioPlayer(contentsOf: documentsDirectory)
player?.prepareToPlay()
player?.volume = 1.0
player?.play()
}catch {
}
}
}
class VolumeRange {
var volume: Float?
var start: Double?
}
Related
Please Help me. i am stack here.. :(
my audio and video will come from Document directory. Everything is working when i go to save video into photos . but then this error happend and it's not saved.
failed Optional(Error Domain=AVFoundationErrorDomain Code=-11800 "The operation could not be completed" UserInfo={NSUnderlyingError=0x17044a2f0 {Error Domain=NSOSStatusErrorDomain Code=-12842 "(null)"}, NSLocalizedFailureReason=An unknown error occurred (-12842), NSLocalizedDescription=The operation could not be completed})
here is my func
func getData(){
let documentsUrl = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first!
do {
// Get the directory contents urls (including subfolders urls)
let directoryContents = try FileManager.default.contentsOfDirectory(at: documentsUrl, includingPropertiesForKeys: nil, options: [])
print(directoryContents)
// if you want to filter the directory contents you can do like this:
videoUrlforMarge = directoryContents.filter{ $0.pathExtension == "mov" } as [AnyObject]
//videoUrlforMarge.append(directoryContents[1] as AnyObject)
print("this video \(videoUrlforMarge[0])")
audioUrl = directoryContents.filter{ $0.pathExtension == "caf" } as [AnyObject]
} catch let error as NSError {
print(error.localizedDescription)
}
}
here is my merge func
func mergeFilesWithUrl(videoUrl:NSURL, audioUrl:NSURL)
{
let mixComposition : AVMutableComposition = AVMutableComposition()
var mutableCompositionVideoTrack : [AVMutableCompositionTrack] = []
var mutableCompositionAudioTrack : [AVMutableCompositionTrack] = []
let totalVideoCompositionInstruction : AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()
//start merge
let aVideoAsset : AVAsset = AVAsset(url: videoUrl as URL)
let aAudioAsset : AVAsset = AVAsset(url: audioUrl as URL)
mutableCompositionVideoTrack.append(mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid))
mutableCompositionAudioTrack.append( mixComposition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid))
guard aVideoAsset.tracks(withMediaType: AVMediaTypeVideo).count > 0 && aAudioAsset.tracks(withMediaType: AVMediaTypeAudio).count > 0 else{
return
}
let aVideoAssetTrack : AVAssetTrack = aVideoAsset.tracks(withMediaType: AVMediaTypeVideo)[0]
let aAudioAssetTrack : AVAssetTrack = aAudioAsset.tracks(withMediaType: AVMediaTypeAudio)[0]
do{
try mutableCompositionVideoTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), of: aVideoAssetTrack, at: kCMTimeZero)
try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), of: aAudioAssetTrack, at: kCMTimeZero)
}catch{
}
totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero,aVideoAssetTrack.timeRange.duration )
let mutableVideoComposition : AVMutableVideoComposition = AVMutableVideoComposition()
mutableVideoComposition.frameDuration = CMTimeMake(1, 30)
mutableVideoComposition.renderSize = CGSize(width: 1280, height: 720)
let VideoFilePath = NSURL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent("mergeVideo\(arc4random()%1000)d")!.appendingPathExtension("mp4").absoluteString
if FileManager.default.fileExists(atPath: VideoFilePath)
{
do
{
try FileManager.default.removeItem(atPath: VideoFilePath)
}
catch { }
}
let tempfilemainurl = NSURL(string: VideoFilePath)!
let sourceAsset = AVURLAsset(url: tempfilemainurl as URL, options: nil)
let assetExport: AVAssetExportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetMediumQuality)!
assetExport.outputFileType = AVFileTypeQuickTimeMovie
assetExport.outputURL = tempfilemainurl as URL
assetExport.exportAsynchronously { () -> Void in
switch assetExport.status
{
case AVAssetExportSessionStatus.completed:
DispatchQueue.main.async(execute: {
do
{
self.userreponsevideoData = try NSData(contentsOf: tempfilemainurl as URL, options: NSData.ReadingOptions())
print("MB - \(self.userreponsevideoData.length) byte")
let assetsLib = ALAssetsLibrary()
assetsLib.writeVideoAtPath(toSavedPhotosAlbum: tempfilemainurl as URL!, completionBlock: nil)
}
catch
{
print(error)
}
})
case AVAssetExportSessionStatus.failed:
print("failed \(String(describing: assetExport.error))")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(String(describing: assetExport.error))")
default:
print("complete")
}
}
}
and other func i call merge func
guard videoUrlforMarge.count > 0 && audioUrl.count > 0 else{
return
}
let videoUrl = videoUrlforMarge[0]
let url = NSURL(fileURLWithPath: videoUrl.absoluteString!!)
let audio = audioUrl[0]
let urla = NSURL(fileURLWithPath: audio.absoluteString!!)
self.mergeFilesWithUrl(videoUrl: url as NSURL , audioUrl:
urla as NSURL
I am new in swift. I need to remove the audio from video files and play them via URL. I have gone through these link1 & link2 ...but there were many errors when I tried to convert them in swift.
Any help would be greatly appreciated.
Swift 4.2
var mutableVideoURL: URL! //final video url
func removeAudioFromVideo(_ videoURL: URL) {
let inputVideoURL: URL = videoURL
let sourceAsset = AVURLAsset(url: inputVideoURL)
let sourceVideoTrack: AVAssetTrack? = sourceAsset.tracks(withMediaType: AVMediaType.video)[0]
let composition : AVMutableComposition = AVMutableComposition()
let compositionVideoTrack: AVMutableCompositionTrack? = composition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: kCMPersistentTrackID_Invalid)
compositionVideoTrack!.preferredTransform = sourceVideoTrack!.preferredTransform
let x: CMTimeRange = CMTimeRangeMake(start: CMTime.zero, duration: sourceAsset.duration)
_ = try? compositionVideoTrack!.insertTimeRange(x, of: sourceVideoTrack!, at: CMTime.zero)
mutableVideoURL = documentsURL.appendingPathComponent("pppppppppp.mp4")
let exporter: AVAssetExportSession = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality)!
exporter.outputFileType = AVFileType.mp4
exporter.outputURL = mutableVideoURL
removeFileAtURLIfExists(url: mutableVideoURL)
exporter.exportAsynchronously(completionHandler:
{
switch exporter.status
{
case AVAssetExportSession.Status.failed:
print("1000000000failed \(exporter.error)")
case AVAssetExportSession.Status.cancelled:
print("1000000000cancelled \(exporter.error)")
case AVAssetExportSession.Status.unknown:
print("1000000000unknown\(exporter.error)")
case AVAssetExportSession.Status.waiting:
print("1000000000waiting\(exporter.error)")
case AVAssetExportSession.Status.exporting:
print("1000000000exporting\(exporter.error)")
default:
print("1000000000-----Mutable video exportation complete.")
}
})
}
func removeFileAtURLIfExists(url:URL) {
let filePath = url.path
let fileManager = FileManager.default
if fileManager.fileExists(atPath: filePath) {
do{
try fileManager.removeItem(atPath: filePath)
} catch {
print("Couldn't remove existing destination file: \(error)")
}
}
}
Note : the addition of this line
compositionVideoTrack?.preferredTransform = sourceVideoTrack!.preferredTransform
that preserve orientation of video
with the help of this link I wrote this code & this worked for me...
var mutableVideoURL = NSURL() //final video url
func removeAudioFromVideo(_ videoURL: URL) {
let inputVideoURL: URL = videoURL
let sourceAsset = AVURLAsset(url: inputVideoURL)
let sourceVideoTrack: AVAssetTrack? = sourceAsset.tracks(withMediaType: AVMediaTypeVideo)[0]
let composition : AVMutableComposition = AVMutableComposition()
let compositionVideoTrack: AVMutableCompositionTrack? = composition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
let x: CMTimeRange = CMTimeRangeMake(kCMTimeZero, sourceAsset.duration)
_ = try? compositionVideoTrack!.insertTimeRange(x, of: sourceVideoTrack!, at: kCMTimeZero)
mutableVideoURL = NSURL(fileURLWithPath: NSHomeDirectory() + "/Documents/FinalVideo.mp4")
let exporter: AVAssetExportSession = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality)!
exporter.outputFileType = AVFileTypeMPEG4
exporter.outputURL = mutableVideoURL as URL
removeFileAtURLIfExists(url: mutableVideoURL)
exporter.exportAsynchronously(completionHandler:
{
switch exporter.status
{
case AVAssetExportSessionStatus.failed:
print("failed \(exporter.error)")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(exporter.error)")
case AVAssetExportSessionStatus.unknown:
print("unknown\(exporter.error)")
case AVAssetExportSessionStatus.waiting:
print("waiting\(exporter.error)")
case AVAssetExportSessionStatus.exporting:
print("exporting\(exporter.error)")
default:
print("-----Mutable video exportation complete.")
}
})
}
func removeFileAtURLIfExists(url: NSURL) {
if let filePath = url.path {
let fileManager = FileManager.default
if fileManager.fileExists(atPath: filePath) {
do{
try fileManager.removeItem(atPath: filePath)
} catch let error as NSError {
print("Couldn't remove existing destination file: \(error)")
}
}
}
}
Im trying to merge N audio tracks, to an video file.
The video is in MP4 format, and all audios are m4a.
All the preparation works well but when the export finishes, always fails.
Heres my code :
func mixAudioAndVideo() {
self.player?.pause()
let mixComposition = AVMutableComposition()
let paths = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)
let docsDirect = paths[0]
for audioTrack in self.audioTracks {
let musicFile = docsDirect.URLByAppendingPathComponent(audioTrack.audioName)
let audioAsset = AVURLAsset(URL: musicFile!, options: nil)
let audioTimeRange = CMTimeRangeMake(audioTrack.audioTime!, audioAsset.duration)
let compositionAudioTrack:AVMutableCompositionTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
do {
try compositionAudioTrack.insertTimeRange(audioTimeRange, ofTrack: audioAsset.tracksWithMediaType(AVMediaTypeAudio).first!, atTime: audioTrack.audioTime!)
} catch let error {
print(error)
}
}
let videoAsset = AVURLAsset(URL: video!.movieURL, options: nil)
let videoTimeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
let compositionVideoTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
do {
try compositionVideoTrack.insertTimeRange(videoTimeRange, ofTrack: videoAsset.tracksWithMediaType(AVMediaTypeVideo).first!, atTime: kCMTimeZero)
} catch let error {
print(error)
}
let videoName = "video\(self.audioTracks.count).mov"
let outputFilePath = docsDirect.URLByAppendingPathComponent(videoName)
let assetExport = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)
assetExport!.outputFileType = AVFileTypeQuickTimeMovie
assetExport!.outputURL = outputFilePath!
assetExport?.exportAsynchronouslyWithCompletionHandler({
dispatch_async(dispatch_get_main_queue()){
print("finished exporting \(outputFilePath)")
print("status \(assetExport?.status)")
print("error \(assetExport?.error)")
SVProgressHUD.dismiss()
}
})
}
And the code i get :
error Optional(Error Domain=NSURLErrorDomain Code=-1 "unknown error" UserInfo={NSLocalizedDescription=unknown error, NSUnderlyingError=0x170056140 {Error Domain=NSOSStatusErrorDomain Code=-12935 "(null)"}})
Swift: 3
first merge N audio tracks
var mergeAudioURL = NSURL()
func mergeAudioFiles(audioFileUrls: NSArray) {
let composition = AVMutableComposition()
for i in 0 ..< audioFileUrls.count {
let compositionAudioTrack :AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
let asset = AVURLAsset(url: (audioFileUrls[i] as! NSURL) as URL)
let track = asset.tracks(withMediaType: AVMediaTypeAudio)[0]
let timeRange = CMTimeRange(start: CMTimeMake(0, 600), duration: track.timeRange.duration)
try! compositionAudioTrack.insertTimeRange(timeRange, of: track, at: composition.duration)
}
let documentDirectoryURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first! as NSURL
self.mergeAudioURL = documentDirectoryURL.appendingPathComponent("Merge Audio.m4a")! as URL as NSURL
let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A)
assetExport?.outputFileType = AVFileTypeAppleM4A
assetExport?.outputURL = mergeAudioURL as URL
removeFileAtURLIfExists(url: mergeAudioURL)
assetExport?.exportAsynchronously(completionHandler:
{
switch assetExport!.status
{
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport?.error)")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport?.error)")
case AVAssetExportSessionStatus.unknown:
print("unknown\(assetExport?.error)")
case AVAssetExportSessionStatus.waiting:
print("waiting\(assetExport?.error)")
case AVAssetExportSessionStatus.exporting:
print("exporting\(assetExport?.error)")
default:
print("-----Merge audio exportation complete.\(self.mergeAudioURL)")
}
})
}
then merge audio with video
var mergedAudioVideoURl = NSURL()
func mergeMutableVideoWithAudio(videoUrl:NSURL, audioUrl:NSURL){
let mixComposition : AVMutableComposition = AVMutableComposition()
var mutableCompositionVideoTrack : [AVMutableCompositionTrack] = []
var mutableCompositionAudioTrack : [AVMutableCompositionTrack] = []
let totalVideoCompositionInstruction : AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()
//start merge
let aVideoAsset : AVAsset = AVAsset(url: videoUrl as URL)
let aAudioAsset : AVAsset = AVAsset(url: audioUrl as URL)
mutableCompositionVideoTrack.append(mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid))
mutableCompositionAudioTrack.append( mixComposition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid))
let aVideoAssetTrack : AVAssetTrack = aVideoAsset.tracks(withMediaType: AVMediaTypeVideo)[0]
let aAudioAssetTrack : AVAssetTrack = aAudioAsset.tracks(withMediaType: AVMediaTypeAudio)[0]
do{
try mutableCompositionVideoTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), of: aVideoAssetTrack, at: kCMTimeZero)
try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), of: aAudioAssetTrack, at: kCMTimeZero)
}catch{
}
totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero,aVideoAssetTrack.timeRange.duration )
let mutableVideoComposition : AVMutableVideoComposition = AVMutableVideoComposition()
mutableVideoComposition.frameDuration = CMTimeMake(1, 30)
mutableVideoComposition.renderSize = CGSize(width: 1280, height: 720)
mergedAudioVideoURl = NSURL(fileURLWithPath: NSHomeDirectory() + "/Documents/FinalVideo.mp4")
let assetExport: AVAssetExportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)!
assetExport.outputFileType = AVFileTypeMPEG4
assetExport.outputURL = mergedAudioVideoURl as URL
removeFileAtURLIfExists(url: mergedAudioVideoURl)
assetExport.shouldOptimizeForNetworkUse = true
assetExport.exportAsynchronously { () -> Void in
switch assetExport.status {
case AVAssetExportSessionStatus.completed:
print("-----Merge mutable video with trimmed audio exportation complete.\(self.mergedAudioVideoURl)")
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport.error)")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport.error)")
default:
print("complete")
}
}
}
func removeFileAtURLIfExists(url: NSURL) {
if let filePath = url.path {
let fileManager = FileManager.default
if fileManager.fileExists(atPath: filePath) {
do{
try fileManager.removeItem(atPath: filePath)
} catch let error as NSError {
print("-----Couldn't remove existing destination file: \(error)")
}
}
}
}
I have requirement is that user have multiple clips and app gives 3 feature. user can make slow motion, user can make it faster. after changing speed user can merge them to make single video and save to device.
for example
clip 1, clip 2 and clip 3 record in normal speed and then convert clip 1 in slow motion, clip 2 is in normal speed and clip 3 in fast speed and then when user merge it the those three clips will be combine together and make it in one clip and user can share in social network.
recording video with AVFoundation or select video from gallery
func convertVideoWithSpeed(completion:()->()) {
if RecordedSegment.segments.count > 0 {
self.exportVideoWithMode(RecordedSegment.segments.first!,title: "clip_\(counter).mp4", completion: { [unowned self] (path) in
RecordedSegment.segments.removeFirst()
self.counter = self.counter + 1
self.mergedVideArray.append("clip_\(self.counter).mp4")
self.convertVideoWithSpeed(completion)
})
} else {
var arr1 = [NSURL]()
for track in self.mergedVideArray {
let documentsURL = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)[0]
var finalURL = documentsURL.URLByAppendingPathComponent(track)
arr1.append(finalURL)
}
self.mergeVideos(self.mergedVideArray, completion: {
completion()
})
}
}
Converting video frame rates for different clips
func exportVideoWithMode(segment:RecordedSegment,title:String,completion:(path:String)->()) {
let mixComposition = AVMutableComposition()
let videoTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
let audioTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
let startTimer = kCMTimeZero
print(RecordedSegment.segments)
var size = CGSizeZero
let astTrack = AVAsset(URL: NSURL(string: segment.path!)!)
size = astTrack.tracksWithMediaType(AVMediaTypeVideo)[0].naturalSize
do {
try videoTrack.insertTimeRange(CMTimeRangeMake(startTimer, astTrack.duration), ofTrack: astTrack.tracksWithMediaType(AVMediaTypeVideo)[0] , atTime: startTimer)
try audioTrack.insertTimeRange(CMTimeRangeMake(startTimer, astTrack.duration), ofTrack: astTrack.tracksWithMediaType(AVMediaTypeAudio)[0] , atTime: startTimer)
} catch _ {
print("Failed to load first track")
}
let documentsURL = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)[0]
let finalURL = documentsURL.URLByAppendingPathComponent(title)
let instruction = AVMutableVideoCompositionInstruction()
let layerInstruct = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
instruction.layerInstructions = [layerInstruct]
let videoComposition = AVMutableVideoComposition()
videoComposition.instructions = [instruction]
if segment.mode == .Slow {
videoComposition.frameDuration = CMTimeMake(1, 15)
} else if segment.mode == .Fast {
videoComposition.frameDuration = CMTimeMake(1, 30)
} else {
videoComposition.frameDuration = CMTimeMake(1, 30)
}
videoComposition.renderSize = size
videoComposition.renderScale = 1
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, astTrack.duration)
guard let exportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) else { return }
exportSession.outputURL = finalURL
exportSession.outputFileType = AVFileTypeQuickTimeMovie
exportSession.shouldOptimizeForNetworkUse = true
exportSession.videoComposition = videoComposition
if NSFileManager.defaultManager().fileExistsAtPath(finalURL.path!) {
do {
try NSFileManager.defaultManager().removeItemAtURL(finalURL)
} catch {
}
}
// 6 - Perform the Export
exportSession.exportAsynchronouslyWithCompletionHandler() {
let error = exportSession.error?.code
print(exportSession.error)
if exportSession.status == .Cancelled {
print("Export was cancelled")
GlobalUtility.hideActivityIndi(self)
} else if exportSession.status == .Completed {
print("completed")
let asset = AVAsset(URL: finalURL)
let track = asset.tracksWithMediaType(AVMediaTypeVideo)[0]
print("==============\(track.nominalFrameRate)")
completion(path: finalURL.path!)
} else if error == nil{
completion(path: finalURL.path!)
}else{
if exportSession.status == .Cancelled {
print("Export was cancelled")
GlobalUtility.hideActivityIndi(self)
}
GlobalUtility.hideActivityIndi(self)
}
}
}
merging them to one video
func mergeVideos(mergePaths:[String],completion:()->()) {
var count = 0
let mixComposition = AVMutableComposition()
let videoTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
let audioTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
var startTimer = kCMTimeZero
print(RecordedSegment.segments)
for track in mergePaths {
let documentsURL = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)[0]
var finalURL = documentsURL.URLByAppendingPathComponent(track)
if NSFileManager.defaultManager().fileExistsAtPath(finalURL.path!) {
let astTrack = AVAsset(URL: finalURL)
let video = astTrack.tracksWithMediaType(AVMediaTypeVideo)
let audio = astTrack.tracksWithMediaType(AVMediaTypeAudio)
if audio.count > 0 && video.count > 0 {
do {
try videoTrack.insertTimeRange(CMTimeRangeMake(startTimer, astTrack.duration), ofTrack: video[0] , atTime: startTimer)
try audioTrack.insertTimeRange(CMTimeRangeMake(startTimer, astTrack.duration), ofTrack: audio[0] , atTime: startTimer)
startTimer = (videoTrack.asset?.duration)!
} catch _ {
print("Failed to load first track")
}
} else {
print("file not exist")
}
} else {
print("tracks not exist")
}
}
//let documentsURL = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)[0]
let finalURL = self.recordSession.outputUrl
count = count + 1
guard let exportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) else { return }
exportSession.outputURL = finalURL
exportSession.outputFileType = AVFileTypeQuickTimeMovie
exportSession.shouldOptimizeForNetworkUse = true
if NSFileManager.defaultManager().fileExistsAtPath(self.recordSession.outputUrl.path!) {
do {
try NSFileManager.defaultManager().removeItemAtURL(self.recordSession.outputUrl)
} catch {
}
}
// 6 - Perform the Export
exportSession.exportAsynchronouslyWithCompletionHandler() {
let error = exportSession.error?.code
print(exportSession.error)
if exportSession.status == .Cancelled {
print("Export was cancelled")
GlobalUtility.hideActivityIndi(self)
} else if exportSession.status == .Completed {
print("completed")
completion()
} else if error == nil{
completion()
}else{
if exportSession.status == .Cancelled {
print("Export was cancelled")
GlobalUtility.hideActivityIndi(self)
}
GlobalUtility.hideActivityIndi(self)
}
}
}
I am working on a task in which I have to trim the recorded video from particular start point to particular end point as entered or selected by user.
How am I supposed to do that. As I used UIVideoEditorController before but I don't want to use the default view and I want to trim the video directly.
let FinalUrlTosave = NSURL(string: "\(newURL)")
exportSession!.outputURL=FinalUrlTosave
exportSession!.shouldOptimizeForNetworkUse = true
// exportSession.outputFileType = AVFileTypeQuickTimeMovie
exportSession!.outputFileType = AVFileTypeQuickTimeMovie;
let start:CMTime
let duration:CMTime
var st = starttime.doubleValue
var ed = endTime.doubleValue
start = CMTimeMakeWithSeconds(st, 600)
duration = CMTimeMakeWithSeconds(ed, 600)
// let timeRangeForCurrentSlice = CMTimeRangeMake(start, duration)
let range = CMTimeRangeMake(start, duration);
exportSession!.timeRange = range
exportSession!.exportAsynchronouslyWithCompletionHandler({
switch exportSession!.status{
case AVAssetExportSessionStatus.Failed:
print("failed \(exportSession!.error)")
case AVAssetExportSessionStatus.Cancelled:
print("cancelled \(exportSession!.error)")
default:
print("complete....complete")
// self.SaveVideoToPhotoLibrary(destinationURL1!)
}
})
I am trying to achieve my goal using this but not succeeding.
Error message:
failed Optional(Error Domain=NSURLErrorDomain Code=-1100 "The
requested URL was not found on this server."
UserInfo={NSErrorFailingURLStringKey=file:///var/mobile/Containers/Data/Application/E68D3BFD-6923-4EA6-9FB3-C020CE4AA9D4/Documents/moment/jGq_9AUFa47s2ZiiPP4x.mp4,
NSErrorFailingURLKey=file:///var/mobile/Containers/Data/Application/E68D3BFD-6923-4EA6-9FB3-C020CE4AA9D4/Documents/moment/jGq_9AUFa47s2ZiiPP4x.mp4,
NSLocalizedDescription=The requested URL was not found on this
server., NSUnderlyingError=0x1553c220 {Error Domain=N
Error occured second time:
failed Optional(Error Domain=NSURLErrorDomain Code=-3000 "Cannot
create file" UserInfo={NSUnderlyingError=0x14e00000 {Error
Domain=NSOSStatusErrorDomain Code=-12124 "(null)"},
NSLocalizedDescription=Cannot create file})
I found my solution using this method and it works like a charm....
func cropVideo(sourceURL1: NSURL, statTime:Float, endTime:Float)
{
let manager = NSFileManager.defaultManager()
guard let documentDirectory = try? manager.URLForDirectory(.DocumentDirectory, inDomain: .UserDomainMask, appropriateForURL: nil, create: true) else {return}
guard let mediaType = "mp4" as? String else {return}
guard let url = sourceURL1 as? NSURL else {return}
if mediaType == kUTTypeMovie as String || mediaType == "mp4" as String {
let asset = AVAsset(URL: url)
let length = Float(asset.duration.value) / Float(asset.duration.timescale)
print("video length: \(length) seconds")
let start = statTime
let end = endTime
var outputURL = documentDirectory.URLByAppendingPathComponent("output")
do {
try manager.createDirectoryAtURL(outputURL, withIntermediateDirectories: true, attributes: nil)
let name = Moment.newName()
outputURL = outputURL.URLByAppendingPathComponent("\(name).mp4")
}catch let error {
print(error)
}
//Remove existing file
_ = try? manager.removeItemAtURL(outputURL)
guard let exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetHighestQuality) else {return}
exportSession.outputURL = outputURL
exportSession.outputFileType = AVFileTypeMPEG4
let startTime = CMTime(seconds: Double(start ?? 0), preferredTimescale: 1000)
let endTime = CMTime(seconds: Double(end ?? length), preferredTimescale: 1000)
let timeRange = CMTimeRange(start: startTime, end: endTime)
exportSession.timeRange = timeRange
exportSession.exportAsynchronouslyWithCompletionHandler{
switch exportSession.status {
case .Completed:
print("exported at \(outputURL)")
self.saveVideoTimeline(outputURL)
case .Failed:
print("failed \(exportSession.error)")
case .Cancelled:
print("cancelled \(exportSession.error)")
default: break
}
}
}
}
Swift 5
func cropVideo(sourceURL1: URL, statTime:Float, endTime:Float)
{
let manager = FileManager.default
guard let documentDirectory = try? manager.url(for: .documentDirectory, in: .userDomainMask, appropriateFor: nil, create: true) else {return}
let mediaType = "mp4"
if mediaType == kUTTypeMovie as String || mediaType == "mp4" as String {
let asset = AVAsset(url: sourceURL1 as URL)
let length = Float(asset.duration.value) / Float(asset.duration.timescale)
print("video length: \(length) seconds")
let start = statTime
let end = endTime
var outputURL = documentDirectory.appendingPathComponent("output")
do {
try manager.createDirectory(at: outputURL, withIntermediateDirectories: true, attributes: nil)
outputURL = outputURL.appendingPathComponent("\(UUID().uuidString).\(mediaType)")
}catch let error {
print(error)
}
//Remove existing file
_ = try? manager.removeItem(at: outputURL)
guard let exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetHighestQuality) else {return}
exportSession.outputURL = outputURL
exportSession.outputFileType = .mp4
let startTime = CMTime(seconds: Double(start ), preferredTimescale: 1000)
let endTime = CMTime(seconds: Double(end ), preferredTimescale: 1000)
let timeRange = CMTimeRange(start: startTime, end: endTime)
exportSession.timeRange = timeRange
exportSession.exportAsynchronously{
switch exportSession.status {
case .completed:
print("exported at \(outputURL)")
case .failed:
print("failed \(exportSession.error)")
case .cancelled:
print("cancelled \(exportSession.error)")
default: break
}
}
}
}
A swift4 version for this.
static func cropVideo(sourceURL: URL, startTime: Double, endTime: Double, completion: ((_ outputUrl: URL) -> Void)? = nil)
{
let fileManager = FileManager.default
let documentDirectory = fileManager.urls(for: .documentDirectory, in: .userDomainMask)[0]
let asset = AVAsset(url: sourceURL)
let length = Float(asset.duration.value) / Float(asset.duration.timescale)
print("video length: \(length) seconds")
var outputURL = documentDirectory.appendingPathComponent("output")
do {
try fileManager.createDirectory(at: outputURL, withIntermediateDirectories: true, attributes: nil)
outputURL = outputURL.appendingPathComponent("\(sourceURL.lastPathComponent).mp4")
}catch let error {
print(error)
}
//Remove existing file
try? fileManager.removeItem(at: outputURL)
guard let exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetHighestQuality) else { return }
exportSession.outputURL = outputURL
exportSession.outputFileType = .mp4
let timeRange = CMTimeRange(start: CMTime(seconds: startTime, preferredTimescale: 1000),
end: CMTime(seconds: endTime, preferredTimescale: 1000))
exportSession.timeRange = timeRange
exportSession.exportAsynchronously {
switch exportSession.status {
case .completed:
print("exported at \(outputURL)")
completion?(outputURL)
case .failed:
print("failed \(exportSession.error.debugDescription)")
case .cancelled:
print("cancelled \(exportSession.error.debugDescription)")
default: break
}
}
}
This one does the job and it fixes the rotation problem.
extension AVAsset {
func assetByTrimming(startTime: CMTime, endTime: CMTime) throws -> AVAsset {
let duration = CMTimeSubtract(endTime, startTime)
let timeRange = CMTimeRange(start: startTime, duration: duration)
let composition = AVMutableComposition()
do {
for track in tracks {
let compositionTrack = composition.addMutableTrack(withMediaType: track.mediaType, preferredTrackID: track.trackID)
compositionTrack?.preferredTransform = track.preferredTransform
try compositionTrack?.insertTimeRange(timeRange, of: track, at: CMTime.zero)
}
} catch let error {
throw TrimError("error during composition", underlyingError: error)
}
return composition
}
struct TrimError: Error {
let description: String
let underlyingError: Error?
init(_ description: String, underlyingError: Error? = nil) {
self.description = "TrimVideo: " + description
self.underlyingError = underlyingError
}
}
func cropVideo1(_ sourceURL1: URL, statTime:Float, endTime:Float){
let videoAsset: AVAsset = AVAsset(url: sourceURL1) as AVAsset
let composition = AVMutableComposition()
composition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
let videoComposition = AVMutableVideoComposition()
videoComposition.renderSize = CGSize(width: 1280, height: 768)
videoComposition.frameDuration = CMTimeMake(8, 15)
let instruction = AVMutableVideoCompositionInstruction()
let length = Float(videoAsset.duration.value)
print(length)
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(60, 30))
let start = statTime
let end = endTime
let exportSession = AVAssetExportSession(asset: videoAsset, presetName: AVAssetExportPresetHighestQuality)!
exportSession.outputFileType = AVFileTypeMPEG4
let startTime = CMTime(seconds: Double(start ), preferredTimescale: 1000)
let endTime = CMTime(seconds: Double(end ), preferredTimescale: 1000)
let timeRange = CMTimeRange(start: startTime, end: endTime)
exportSession.timeRange = timeRange
let formatter = DateFormatter()
formatter.dateFormat = "yyyy'-'MM'-'dd'T'HH':'mm':'ss'Z'"
let date = Date()
let documentsPath = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0] as NSString
let outputPath = "\(documentsPath)/\(formatter.string(from: date)).mp4"
let outputURL = URL(fileURLWithPath: outputPath)
exportSession.outputURL = outputURL
exportSession.outputFileType = AVFileTypeQuickTimeMovie
print("sucess")
exportSession.exportAsynchronously(completionHandler: { () -> Void in
DispatchQueue.main.async(execute: {
self.exportDidFinish(exportSession)
print("sucess")
})
})
}
func exportDidFinish(_ session: AVAssetExportSession) {
if session.status == AVAssetExportSessionStatus.completed {
let outputURL = session.outputURL
let library = ALAssetsLibrary()
if library.videoAtPathIs(compatibleWithSavedPhotosAlbum: outputURL) {
library.writeVideoAtPath(toSavedPhotosAlbum: outputURL) { alAssetURL, error in
if error != nil {
DispatchQueue.main.async(execute: {
print("Failed to save video")
})
} else {
DispatchQueue.main.async(execute: {
Print("Sucessfully saved Video")
})
}
self.activityIndicator.stopAnimating()
}
}
}
}