How to merge multiple videos with different frame rates? - ios

I have requirement is that user have multiple clips and app gives 3 feature. user can make slow motion, user can make it faster. after changing speed user can merge them to make single video and save to device.
for example
clip 1, clip 2 and clip 3 record in normal speed and then convert clip 1 in slow motion, clip 2 is in normal speed and clip 3 in fast speed and then when user merge it the those three clips will be combine together and make it in one clip and user can share in social network.
recording video with AVFoundation or select video from gallery
func convertVideoWithSpeed(completion:()->()) {
if RecordedSegment.segments.count > 0 {
self.exportVideoWithMode(RecordedSegment.segments.first!,title: "clip_\(counter).mp4", completion: { [unowned self] (path) in
RecordedSegment.segments.removeFirst()
self.counter = self.counter + 1
self.mergedVideArray.append("clip_\(self.counter).mp4")
self.convertVideoWithSpeed(completion)
})
} else {
var arr1 = [NSURL]()
for track in self.mergedVideArray {
let documentsURL = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)[0]
var finalURL = documentsURL.URLByAppendingPathComponent(track)
arr1.append(finalURL)
}
self.mergeVideos(self.mergedVideArray, completion: {
completion()
})
}
}
Converting video frame rates for different clips
func exportVideoWithMode(segment:RecordedSegment,title:String,completion:(path:String)->()) {
let mixComposition = AVMutableComposition()
let videoTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
let audioTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
let startTimer = kCMTimeZero
print(RecordedSegment.segments)
var size = CGSizeZero
let astTrack = AVAsset(URL: NSURL(string: segment.path!)!)
size = astTrack.tracksWithMediaType(AVMediaTypeVideo)[0].naturalSize
do {
try videoTrack.insertTimeRange(CMTimeRangeMake(startTimer, astTrack.duration), ofTrack: astTrack.tracksWithMediaType(AVMediaTypeVideo)[0] , atTime: startTimer)
try audioTrack.insertTimeRange(CMTimeRangeMake(startTimer, astTrack.duration), ofTrack: astTrack.tracksWithMediaType(AVMediaTypeAudio)[0] , atTime: startTimer)
} catch _ {
print("Failed to load first track")
}
let documentsURL = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)[0]
let finalURL = documentsURL.URLByAppendingPathComponent(title)
let instruction = AVMutableVideoCompositionInstruction()
let layerInstruct = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
instruction.layerInstructions = [layerInstruct]
let videoComposition = AVMutableVideoComposition()
videoComposition.instructions = [instruction]
if segment.mode == .Slow {
videoComposition.frameDuration = CMTimeMake(1, 15)
} else if segment.mode == .Fast {
videoComposition.frameDuration = CMTimeMake(1, 30)
} else {
videoComposition.frameDuration = CMTimeMake(1, 30)
}
videoComposition.renderSize = size
videoComposition.renderScale = 1
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, astTrack.duration)
guard let exportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) else { return }
exportSession.outputURL = finalURL
exportSession.outputFileType = AVFileTypeQuickTimeMovie
exportSession.shouldOptimizeForNetworkUse = true
exportSession.videoComposition = videoComposition
if NSFileManager.defaultManager().fileExistsAtPath(finalURL.path!) {
do {
try NSFileManager.defaultManager().removeItemAtURL(finalURL)
} catch {
}
}
// 6 - Perform the Export
exportSession.exportAsynchronouslyWithCompletionHandler() {
let error = exportSession.error?.code
print(exportSession.error)
if exportSession.status == .Cancelled {
print("Export was cancelled")
GlobalUtility.hideActivityIndi(self)
} else if exportSession.status == .Completed {
print("completed")
let asset = AVAsset(URL: finalURL)
let track = asset.tracksWithMediaType(AVMediaTypeVideo)[0]
print("==============\(track.nominalFrameRate)")
completion(path: finalURL.path!)
} else if error == nil{
completion(path: finalURL.path!)
}else{
if exportSession.status == .Cancelled {
print("Export was cancelled")
GlobalUtility.hideActivityIndi(self)
}
GlobalUtility.hideActivityIndi(self)
}
}
}
merging them to one video
func mergeVideos(mergePaths:[String],completion:()->()) {
var count = 0
let mixComposition = AVMutableComposition()
let videoTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
let audioTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
var startTimer = kCMTimeZero
print(RecordedSegment.segments)
for track in mergePaths {
let documentsURL = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)[0]
var finalURL = documentsURL.URLByAppendingPathComponent(track)
if NSFileManager.defaultManager().fileExistsAtPath(finalURL.path!) {
let astTrack = AVAsset(URL: finalURL)
let video = astTrack.tracksWithMediaType(AVMediaTypeVideo)
let audio = astTrack.tracksWithMediaType(AVMediaTypeAudio)
if audio.count > 0 && video.count > 0 {
do {
try videoTrack.insertTimeRange(CMTimeRangeMake(startTimer, astTrack.duration), ofTrack: video[0] , atTime: startTimer)
try audioTrack.insertTimeRange(CMTimeRangeMake(startTimer, astTrack.duration), ofTrack: audio[0] , atTime: startTimer)
startTimer = (videoTrack.asset?.duration)!
} catch _ {
print("Failed to load first track")
}
} else {
print("file not exist")
}
} else {
print("tracks not exist")
}
}
//let documentsURL = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)[0]
let finalURL = self.recordSession.outputUrl
count = count + 1
guard let exportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) else { return }
exportSession.outputURL = finalURL
exportSession.outputFileType = AVFileTypeQuickTimeMovie
exportSession.shouldOptimizeForNetworkUse = true
if NSFileManager.defaultManager().fileExistsAtPath(self.recordSession.outputUrl.path!) {
do {
try NSFileManager.defaultManager().removeItemAtURL(self.recordSession.outputUrl)
} catch {
}
}
// 6 - Perform the Export
exportSession.exportAsynchronouslyWithCompletionHandler() {
let error = exportSession.error?.code
print(exportSession.error)
if exportSession.status == .Cancelled {
print("Export was cancelled")
GlobalUtility.hideActivityIndi(self)
} else if exportSession.status == .Completed {
print("completed")
completion()
} else if error == nil{
completion()
}else{
if exportSession.status == .Cancelled {
print("Export was cancelled")
GlobalUtility.hideActivityIndi(self)
}
GlobalUtility.hideActivityIndi(self)
}
}
}

Related

how to get video and audio from MKV? (swift)

i want to convert MKV (Matroska) to MP4 in swift
when i add a file with MKV format my code break in line 8 , what should i do to fix that?
this is my code:
let composition = AVMutableComposition()
do {
let sourceUrl = Bundle.main.url(forResource: "sample", withExtension: "mov")!
let asset = AVURLAsset(url: sourceUrl)
8 ->here guard let videoAssetTrack = asset.tracks(withMediaType: AVMediaType.video).first else { return }
guard let audioCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: kCMPersistentTrackID_Invalid) else { return }
try audioCompositionTrack.insertTimeRange(videoAssetTrack.timeRange, of: videoAssetTrack, at: CMTime.zero)
} catch {
print(error)
}
// Create an export session
let exportSession = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetPassthrough)!
exportSession.outputFileType = AVFileType.mp4
exportSession.outputURL = browseURL
// Export file
exportSession.exportAsynchronously {
guard case exportSession.status = AVAssetExportSession.Status.completed else { return }
DispatchQueue.main.async {
// Present a UIActivityViewController to share audio file
print("completed")
}
}

How to merge a audio with video without loosing original sound of video - iOS Swift

My goal is to merge audio (mp3 music) with video captured by iPhone camera, I am able to merge audio with video using AVMutableComposition but in final output video sounds is not coming.
below is the code I am using:
open func mergeVideoWithAudio(videoUrl: URL, audioUrl: URL){
let mixComposition: AVMutableComposition = AVMutableComposition()
var mutableCompositionVideoTrack: [AVMutableCompositionTrack] = []
var mutableCompositionAudioTrack: [AVMutableCompositionTrack] = []
let totalVideoCompositionInstruction : AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()
let aVideoAsset: AVAsset = AVAsset(url: videoUrl)
let aAudioAsset: AVAsset = AVAsset(url: audioUrl)
if let videoTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid), let audioTrack = mixComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid) {
mutableCompositionVideoTrack.append(videoTrack)
mutableCompositionAudioTrack.append(audioTrack)
}
let time = CMTimeMakeWithSeconds(Float64(musicTrimmerController.currentPlayerPosition), 1000)
if let aVideoAssetTrack: AVAssetTrack = aVideoAsset.tracks(withMediaType: .video).first,
let aAudioAssetTrack: AVAssetTrack = aAudioAsset.tracks(withMediaType: .audio).first {
do {
try mutableCompositionVideoTrack.first?.insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), of: aVideoAssetTrack, at: kCMTimeZero)
try mutableCompositionAudioTrack.first?.insertTimeRange(CMTimeRangeMake(time, aVideoAssetTrack.timeRange.duration), of: aAudioAssetTrack, at: kCMTimeZero)
} catch{
print(error)
}
totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero,aVideoAssetTrack.timeRange.duration)
let compositionV = mixComposition.tracks(withMediaType: AVMediaType.video).last
if ((aVideoAssetTrack != nil) && (compositionV != nil)) {
compositionV?.preferredTransform = (aVideoAssetTrack.preferredTransform)
}
}
if let documentsPath = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first {
let outputURL = URL(fileURLWithPath: documentsPath).appendingPathComponent("movie.mov")
do {
if FileManager.default.fileExists(atPath: outputURL.path) {
try FileManager.default.removeItem(at: outputURL)
}
} catch { }
if let exportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) {0
exportSession.outputURL = outputURL
exportSession.outputFileType = AVFileType.mp4
exportSession.shouldOptimizeForNetworkUse = true
/// try to export the file and handle the status cases
exportSession.exportAsynchronously(completionHandler: {
switch exportSession.status {
case .failed:
print(exportSession.error as Any)
case .cancelled:
print(exportSession.error as Any)
default:
print("Save video output")
}
})
}
}
}

What's causing AVMutableComposition to increase the size of video drastically? - iOS, Swift, AVFoundation

Assuming that we have two video assets (AVAsset objects), let's call them blank and main, where main is a video of random limited length, let's say 2-5 minutes, and blank is always a 4 second video, we want to merge the videos in the following order:
blank - main - blank
// Create AVMutableComposition Object.This object will hold our multiple AVMutableCompositionTrack.
let mixComposition = AVMutableComposition()
let assets = [blank, main, blank]
var totalTime : CMTime = CMTimeMake(0, 0)
var atTimeM: CMTime = CMTimeMake(0, 0)
Utils.log([blank.duration, main.duration])
// VIDEO TRACK
let videoTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
for (index,asset) in assets.enumerated() {
do {
if index == 0 {
atTimeM = kCMTimeZero
} else {
atTimeM = totalTime // <-- Use the total time for all the videos seen so far.
}
try videoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, asset.duration), of: asset.tracks(withMediaType: AVMediaTypeVideo)[0], at: atTimeM)
} catch let error as NSError {
Utils.log("error: \(error)")
}
totalTime = CMTimeAdd(totalTime, asset.duration)
}
// AUDIO TRACK
let audioTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
do {
try audioTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, main.duration), of: main.tracks(withMediaType: AVMediaTypeAudio)[0], at: blank.duration)
} catch _ {
completionHandler(nil, ErrorType(rawValue: "Unable to add audio in composition."))
return
}
let outputURL = mainVideoObject.getDirectoryURL()?.appendingPathComponent("video-with-blank.mp4")
guard let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPreset1280x720) else {
completionHandler(nil, ErrorType(rawValue: "Unable to create export session."))
return
}
let mainInstruction = AVMutableVideoCompositionInstruction()
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeAdd(blank.duration, CMTimeAdd(main.duration, blank.duration)))
// Fixing orientation
let firstLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
let firstAssetTrack = blank.tracks(withMediaType: AVMediaTypeVideo)[0]
firstLayerInstruction.setTransform(firstAssetTrack.preferredTransform, at: kCMTimeZero)
firstLayerInstruction.setOpacity(0.0, at: blank.duration)
let secondLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
let secondAssetTrack = main.tracks(withMediaType: AVMediaTypeVideo)[0]
var isSecondAssetPortrait = false
let secondTransform = secondAssetTrack.preferredTransform
if (secondTransform.a == 0 && secondTransform.b == 1.0 && secondTransform.c == -1.0 && secondTransform.d == 0) {
isSecondAssetPortrait = true
}
if (secondTransform.a == 0 && secondTransform.b == -1.0 && secondTransform.c == 1.0 && secondTransform.d == 0) {
isSecondAssetPortrait = true
}
secondLayerInstruction.setTransform(secondAssetTrack.preferredTransform, at: blank.duration)
secondLayerInstruction.setOpacity(0.0, at: CMTimeAdd(blank.duration, main.duration))
let thirdLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
let thirdAssetTrack = blank.tracks(withMediaType: AVMediaTypeVideo)[0]
thirdLayerInstruction.setTransform(thirdAssetTrack.preferredTransform, at: CMTimeAdd(blank.duration, main.duration))
mainInstruction.layerInstructions = [firstLayerInstruction, secondLayerInstruction, thirdLayerInstruction]
var naturalSize = CGSize()
if(isSecondAssetPortrait) {
naturalSize = CGSize(width: secondAssetTrack.naturalSize.height, height: secondAssetTrack.naturalSize.width)
} else {
naturalSize = secondAssetTrack.naturalSize
}
let renderWidth = naturalSize.width
let renderHeight = naturalSize.height
let mainCompositionInst = AVMutableVideoComposition()
mainCompositionInst.instructions = [mainInstruction]
mainCompositionInst.frameDuration = CMTimeMake(1, 30)
mainCompositionInst.renderSize = CGSize(width: renderWidth, height: renderHeight)
exporter.outputURL = outputURL
exporter.outputFileType = AVFileTypeMPEG4
exporter.videoComposition = mainCompositionInst
//exporter.shouldOptimizeForNetworkUse = true
exporter.exportAsynchronously {
if exporter.status == .completed {
completionHandler(AVAsset(url: outputURL!), nil)
} else {
completionHandler(nil, ErrorType(rawValue: "Unable to export video."))
if let error = exporter.error {
Utils.log("Unable to export video. \(error)")
}
}
}
Assuming that the original video recorder for 5 minutes at 720p quality takes around 200MB of space, adding the 4s blank video on begining and end of the main video should not drastically change the size, and should finish processing very fast.
The result however is a video that's 2 to 2.5x the size of the original video (so 400 - 500 MB) and takes too long to process.
Please advise,
Thanks
Here i have prepared a custom class where you can just pass your name of your videos and keep those video into to the bundle. Once you run your app it will generate a new video file as per your requirement and drop it into the application document directory path.
Using Swift 4 i have prepared this demo
//
// ViewController.swift
// SOVideoMergingDemo
//
// Created by iOS Test User on 03/01/18.
// Copyright © 2018 Test User. Ltd. All rights reserved.
//
import UIKit
import AVFoundation
import MediaPlayer
import Photos
import AssetsLibrary
import AVKit
class ViewController : UIViewController {
//--------------------------------------------------
//MARK:
//MARK: - IBOutlets
//--------------------------------------------------
//--------------------------------------------------
//MARK:
//MARK: - Properties
//--------------------------------------------------
var videoUrls : [URL] = []
var arrVideoAsset : [AVAsset] = []
let video1 = "1"
let video2 = "2"
let outPutVideo = "MergedVideo.mp4"
let semaphore = DispatchSemaphore(value: 1)
//--------------------------------------------------
//MARK:
//MARK: - Custom Methods
//--------------------------------------------------
func getVideoURL(forVideo : String) -> URL {
let videoPath = Bundle.main.path(forResource: forVideo, ofType:"mp4")
let vidURL = URL(fileURLWithPath: videoPath!)
return vidURL
}
//--------------------------------------------------
func mergeVideos(arrVideoAsset : [AVAsset]) {
let mixComposition = AVMutableComposition()
//Tracks to insert in Composition for Merging
// Create video tracks
let firstTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid)
let secondTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid)
let thirdTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid)
do {
try firstTrack?.insertTimeRange(CMTimeRangeMake(kCMTimeZero, arrVideoAsset[0].duration), of: arrVideoAsset[0].tracks(withMediaType: .video)[0], at: kCMTimeZero)
} catch _ {
print("Failed to load first track")
}
do {
try secondTrack?.insertTimeRange(CMTimeRangeMake(kCMTimeZero, arrVideoAsset[1].duration), of: arrVideoAsset[1].tracks(withMediaType: .video)[0], at: arrVideoAsset[0].duration)
} catch _ {
print("Failed to load second track")
}
do {
try thirdTrack?.insertTimeRange(CMTimeRangeMake(kCMTimeZero, arrVideoAsset[0].duration), of: arrVideoAsset[0].tracks(withMediaType: .video)[0], at: arrVideoAsset[1].duration)
} catch _ {
print("Failed to load second track")
}
//This Instruciton is Created for Merging Video Tracks
let compositionInstruction = AVMutableVideoCompositionInstruction()
compositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero,CMTimeAdd(arrVideoAsset[0].duration, CMTimeAdd(arrVideoAsset[1].duration, arrVideoAsset[2].duration)))
//Creating Layer Instruction for Videos
let firstInstruction = videoCompositionInstructionForTrack(firstTrack!, asset: arrVideoAsset[0])
firstInstruction.setOpacity(0.0, at: arrVideoAsset[0].duration )
let secondInstruction = videoCompositionInstructionForTrack(secondTrack!, asset: arrVideoAsset[1])
secondInstruction.setOpacity(0.0, at: arrVideoAsset[1].duration)
let thirdInstruction = videoCompositionInstructionForTrack(thirdTrack!, asset: arrVideoAsset[2])
compositionInstruction.layerInstructions = [firstInstruction, secondInstruction,thirdInstruction]
//By Changing These Height and Width User can affect Size of Merged Video. Calucalte it Carefully and As per you needs
let height = (Float((firstTrack?.naturalSize.height)!) < Float((secondTrack?.naturalSize.height)!)) ? firstTrack?.naturalSize.height : secondTrack?.naturalSize.height
let width = (Float((firstTrack?.naturalSize.width)!) < Float((secondTrack?.naturalSize.width)!)) ? firstTrack?.naturalSize.width : secondTrack?.naturalSize.width
let mainComposition = AVMutableVideoComposition()
mainComposition.instructions = [compositionInstruction]
mainComposition.frameDuration = CMTimeMake(1, 30)
mainComposition.renderSize = CGSize(width:width!,height: height!)
let exporter = AVAssetExportSession(asset:mixComposition, presetName: AVAssetExportPresetHighestQuality)
exporter?.outputURL = URL(fileURLWithPath: getDocumentDirectoryPath() + "/" + outPutVideo)
exporter?.outputFileType = AVFileType.mp4
exporter?.shouldOptimizeForNetworkUse = true
exporter?.videoComposition = mainComposition
print(self.getDocumentDirectoryPath())
exporter?.exportAsynchronously(completionHandler: {
DispatchQueue.main.async {
if exporter?.status == AVAssetExportSessionStatus.completed {
do {
let videoData = try Data(contentsOf: exporter!.outputURL!)
try videoData.write(to: URL(fileURLWithPath : self.getDocumentDirectoryPath() + "/" + self.outPutVideo), options: Data.WritingOptions.atomic)
} catch {
print("Failed to Save video ===>>> \(error.localizedDescription)")
}
//Uncomment This If you want to save video in Photos Library
// PHPhotoLibrary.shared().performChanges({
// PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: (exporter?.outputURL)!)
// }, completionHandler: { (success, error) in
// if success {
// let fetchOptions = PHFetchOptions()
// fetchOptions.sortDescriptors = [NSSortDescriptor.init(key:"creationDate", ascending: false)]
// _ = PHAsset.fetchAssets(with: .video, options:fetchOptions).firstObject
// } else {
// print("Error in Saving File in Photo Libaray -> \(String(describing: error?.localizedDescription))")
// }
// })
} else {
print("Error -> \(String(describing: exporter?.error?.localizedDescription))")
}
}
})
}
//--------------------------------------------------
//This Methiod is Used to Make Layer Instruction for Particular Video
func videoCompositionInstructionForTrack(_ track: AVCompositionTrack, asset: AVAsset) -> AVMutableVideoCompositionLayerInstruction {
let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
let assetTrack = asset.tracks(withMediaType: AVMediaType.video)[0]
let scale : CGAffineTransform = CGAffineTransform(scaleX: 1, y:1)
instruction.setTransform(assetTrack.preferredTransform.concatenating(scale), at: kCMTimeZero)
return instruction
}
//--------------------------------------------------
func getDocumentDirectoryPath() -> String {
let arrPaths = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)
return arrPaths[0]
}
//--------------------------------------------------
//MARK:
//MARK: - View Life Cycle Methods
//--------------------------------------------------
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
//Prepare Video Assets
arrVideoAsset.append(AVAsset(url:getVideoURL(forVideo:video1)))
arrVideoAsset.append(AVAsset(url:getVideoURL(forVideo:video2)))
arrVideoAsset.append(AVAsset(url:getVideoURL(forVideo:video1)))
//Merge this Videos
mergeVideos(arrVideoAsset:arrVideoAsset)
}
}

Audio file with different levels of volume

I want the user to play audio, change the volume in some parts, and then save that file with the new levels of volume.
I changed the volume of an audio with AVAssetExportSession and AVMutableAudioMixInputParameters and worked, the problem is that I need create a audio loop with this audio, so first I created this loop and then I need change the volume
Here is my code
let type = "m4a"
let documentsDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0].appendingPathComponent("mixedAudio.m4a")
var backgroundUrl = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0].appendingPathComponent("background.m4a")
override func viewDidLoad() {
super.viewDidLoad()
playButotn.isEnabled = false
let mainUrl = Bundle.main.url(forResource: "prueba1", withExtension: type, subdirectory: "Audios")
let mainDurations = AVAsset(url: mainUrl!).duration
let secondAudioUrl = Bundle.main.url(forResource: "walk", withExtension: type, subdirectory: "Audios")
let backgroundDuration = AVAsset(url: secondAudioUrl!).duration
if mainDurations > backgroundDuration {
var times = Int(mainDurations.seconds / backgroundDuration.seconds)
let rem = mainDurations.seconds.truncatingRemainder(dividingBy: backgroundDuration.seconds)
if rem > 0 {
times = times + 1
}
createLoopAudio(times: times) {
self.createFade {
self.createMix(mainUrl: mainUrl!, backgroundUrl: self.backgroundUrl)
}
}
}else {
backgroundUrl = secondAudioUrl!
createMix(mainUrl: mainUrl!, backgroundUrl: backgroundUrl)
}
}
func createMix(mainUrl: URL, backgroundUrl: URL){
let composition = AVMutableComposition()
let mainAsset = AVAsset(url: mainUrl)
let backgroundAsset = AVAsset(url: backgroundUrl)
let mainDurations = AVAsset(url: mainUrl).duration
let mainAudioTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
let backgroundAudioTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
let timeRange = CMTimeRangeMake(kCMTimeZero, mainDurations)
do {
try mainAudioTrack.insertTimeRange(timeRange, of: mainAsset.tracks(withMediaType: AVMediaTypeAudio)[0], at: kCMTimeZero)
try backgroundAudioTrack.insertTimeRange(timeRange, of: backgroundAsset.tracks(withMediaType: AVMediaTypeAudio)[0], at: kCMTimeZero)
}catch {
print(error.localizedDescription)
}
let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetPassthrough)
assetExport?.outputFileType = AVFileTypeMPEG4
assetExport?.outputURL = documentsDirectory
assetExport?.shouldOptimizeForNetworkUse = true
if FileManager.default.fileExists(atPath: documentsDirectory.path) {
try! FileManager.default.removeItem(atPath: documentsDirectory.path)
}
assetExport?.exportAsynchronously(completionHandler: {
switch assetExport!.status
{
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport?.error)")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport?.error)")
case AVAssetExportSessionStatus.unknown:
print("unknown\(assetExport?.error)")
case AVAssetExportSessionStatus.waiting:
print("waiting\(assetExport?.error)")
case AVAssetExportSessionStatus.exporting:
print("exporting\(assetExport?.error)")
case AVAssetExportSessionStatus.completed:
print("complete")
DispatchQueue.main.async {
self.playButotn.isEnabled = true
}
}
})
}
func createLoopAudio(times: Int, completion: #escaping () -> Void){
let composition = AVMutableComposition()
var nextTimeStartClip = kCMTimeZero
for _ in 1...times {
let url = Bundle.main.url(forResource: "walk", withExtension: type, subdirectory: "Audios")
let audioAsset = AVAsset(url: url!)
print("tracks walk \(audioAsset.tracks.count)")
let audioTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
let timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration)
do {
try audioTrack.insertTimeRange(timeRange, of: audioAsset.tracks(withMediaType: AVMediaTypeAudio)[0], at: nextTimeStartClip)
}catch {
print(error.localizedDescription)
}
nextTimeStartClip = CMTimeAdd(nextTimeStartClip, audioAsset.duration)
}
let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetPassthrough)
assetExport?.outputFileType = AVFileTypeMPEG4
assetExport?.outputURL = backgroundUrl
assetExport?.shouldOptimizeForNetworkUse = true
if FileManager.default.fileExists(atPath: backgroundUrl.path) {
try! FileManager.default.removeItem(atPath: backgroundUrl.path)
}
assetExport?.exportAsynchronously(completionHandler: {
switch assetExport!.status
{
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport?.error)")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport?.error)")
case AVAssetExportSessionStatus.unknown:
print("unknown\(assetExport?.error)")
case AVAssetExportSessionStatus.waiting:
print("waiting\(assetExport?.error)")
case AVAssetExportSessionStatus.exporting:
print("exporting\(assetExport?.error)")
case AVAssetExportSessionStatus.completed:
print("loop complete")
completion()
}
})
}
func createFade(completion: #escaping () -> Void) {
let exportAudioMix = AVMutableAudioMix()
let audioAsset = AVAsset(url: backgroundUrl)
let exportAudioMixInputParameters = AVMutableAudioMixInputParameters(track: audioAsset.tracks(withMediaType: AVMediaTypeAudio)[0])
let start = 2
let length = 3
exportAudioMixInputParameters.setVolume(0.0, at: CMTimeMakeWithSeconds(Float64(start - 1), 1))
exportAudioMixInputParameters.setVolume(0.1, at: CMTimeMakeWithSeconds(Float64(start), 1))
exportAudioMixInputParameters.setVolume(0.5, at: CMTimeMakeWithSeconds(Float64(start + 1), 1))
exportAudioMixInputParameters.setVolume(1.0, at: CMTimeMakeWithSeconds(Float64(start + 2), 1))
exportAudioMixInputParameters.setVolume(1.0, at: CMTimeMakeWithSeconds(Float64(start + length - 2), 1))
exportAudioMixInputParameters.setVolume(0.5, at: CMTimeMakeWithSeconds(Float64(start + length - 1), 1))
exportAudioMixInputParameters.setVolume(0.1, at: CMTimeMakeWithSeconds(Float64(start + length), 1))
exportAudioMix.inputParameters = [exportAudioMixInputParameters]
let composition = AVMutableComposition()
print("tracks loop \(audioAsset.tracks.count)")
let audioTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
let timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration)
do {
try audioTrack.insertTimeRange(timeRange, of: audioAsset.tracks(withMediaType: AVMediaTypeAudio)[0], at: kCMTimeZero)
}catch {
print(error.localizedDescription)
}
let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A)
assetExport?.outputFileType = AVFileTypeAppleM4A
assetExport?.outputURL = backgroundUrl
assetExport?.shouldOptimizeForNetworkUse = true
if FileManager.default.fileExists(atPath: backgroundUrl.path) {
try! FileManager.default.removeItem(atPath: backgroundUrl.path)
}
assetExport?.exportAsynchronously(completionHandler: {
switch assetExport!.status
{
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport?.error)")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport?.error)")
case AVAssetExportSessionStatus.unknown:
print("unknown\(assetExport?.error)")
case AVAssetExportSessionStatus.waiting:
print("waiting\(assetExport?.error)")
case AVAssetExportSessionStatus.exporting:
print("exporting\(assetExport?.error)")
case AVAssetExportSessionStatus.completed:
print("faded complete")
completion()
}
})
Well, Found a way and works fine: I have 2 audios; A and B, B lasts less than A, so I create a loop of B to fit A, Then I play 2 audios at same time and with a slider I modify the volume of b. Finally I save this volume configuration and mix the audios. This is my code:
import UIKit
import AVFoundation
class ViewController: UIViewController {
#IBOutlet weak var playButotn: UIButton!
var player: AVAudioPlayer?
var podcastPlayer: AVAudioPlayer?
var times = 0
#IBOutlet weak var volumeSlider: UISlider!
var volumeRanges = [VolumeRange]()
let type = "m4a"
let documentsDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0].appendingPathComponent("mixedAudio.m4a")
var backgroundUrl = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0].appendingPathComponent("background.m4a")
var mainUrl: URL?
override func viewDidLoad() {
super.viewDidLoad()
mainUrl = Bundle.main.url(forResource: "prueba1", withExtension: type, subdirectory: "Audios")
playButotn.isEnabled = false
volumeSlider.value = 1
let mainDurations = AVAsset(url: mainUrl!).duration
print(AVAsset(url: mainUrl!).duration.seconds)
let secondAudioUrl = Bundle.main.url(forResource: "rocking", withExtension: type, subdirectory: "Audios")
let backgroundDuration = AVAsset(url: secondAudioUrl!).duration
if mainDurations > backgroundDuration {
times = Int(mainDurations.seconds / backgroundDuration.seconds)
let rem = mainDurations.seconds.truncatingRemainder(dividingBy: backgroundDuration.seconds)
if rem > 0 {
times = times + 1
}
createLoop(times: times) {
DispatchQueue.main.async {
self.playButotn.isEnabled = true
}
}
}else {
backgroundUrl = secondAudioUrl!
createMix()
}
}
func createMix(){
let composition = AVMutableComposition()
let mainAsset = AVAsset(url: mainUrl!)
let backgroundAsset = AVAsset(url: backgroundUrl)
let mainDurations = AVAsset(url: mainUrl!).duration
let mainAudioTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
let backgroundAudioTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
let timeRange = CMTimeRangeMake(kCMTimeZero, mainDurations)
do {
try mainAudioTrack.insertTimeRange(timeRange, of: mainAsset.tracks(withMediaType: AVMediaTypeAudio)[0], at: kCMTimeZero)
try backgroundAudioTrack.insertTimeRange(timeRange, of: backgroundAsset.tracks(withMediaType: AVMediaTypeAudio)[0], at: kCMTimeZero)
}catch {
print(error.localizedDescription)
}
let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetMediumQuality)
assetExport?.outputFileType = AVFileTypeMPEG4
assetExport?.outputURL = documentsDirectory
assetExport?.shouldOptimizeForNetworkUse = true
if FileManager.default.fileExists(atPath: documentsDirectory.path) {
try! FileManager.default.removeItem(atPath: documentsDirectory.path)
}
assetExport?.exportAsynchronously(completionHandler: {
switch assetExport!.status
{
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport?.error?.localizedDescription ?? "")")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport?.error?.localizedDescription ?? "")")
case AVAssetExportSessionStatus.unknown:
print("unknown\(assetExport?.error?.localizedDescription ?? "")")
case AVAssetExportSessionStatus.waiting:
print("waiting\(assetExport?.error?.localizedDescription ?? "")")
case AVAssetExportSessionStatus.exporting:
print("exporting\(assetExport?.error?.localizedDescription ?? "")")
case AVAssetExportSessionStatus.completed:
print("mix complete")
DispatchQueue.main.async {
self.playButotn.isEnabled = true
}
}
})
}
func createLoop(times: Int, completion: #escaping () -> Void){
let urlFondo = Bundle.main.url(forResource: "rocking", withExtension: type, subdirectory: "Audios")
let acentoFile = try! JUMAudioFile(forReading: urlFondo!)
let acentoPCM = acentoFile.getPCMArrayBufferFromURL()
let (_ , acentoCompleteData) = JUMAudioFile.convertToPoints(arrayFloatValues: acentoPCM)
var newDraft = [Float]()
for _ in 1...times {
for array in acentoCompleteData {
for fl in array {
newDraft.append(fl)
}
}
}
let _ = try! JUMAudioFile(createFileFromFloats: [newDraft], url: self.backgroundUrl)
print("loop complete")
completion()
}
func createLoopAudioWithFade(completion: #escaping () -> Void){
let composition = AVMutableComposition()
let exportAudioMix = AVMutableAudioMix()
var exportAudioMixInputParametersArry = [AVMutableAudioMixInputParameters]()
let audioAsset = AVAsset(url: self.backgroundUrl)
let audioTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
let timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration)
do {
try audioTrack.insertTimeRange(timeRange, of: audioAsset.tracks(withMediaType: AVMediaTypeAudio)[0], at: kCMTimeZero)
}catch {
print(error.localizedDescription)
}
let exportAudioMixInputParameters = AVMutableAudioMixInputParameters(track: audioTrack)
for ranges in volumeRanges {
exportAudioMixInputParameters.setVolume(ranges.volume!, at: CMTimeMakeWithSeconds(ranges.start!, 50000))
}
exportAudioMixInputParametersArry.append(exportAudioMixInputParameters)
exportAudioMix.inputParameters = exportAudioMixInputParametersArry
let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetMediumQuality)
assetExport?.outputFileType = AVFileTypeMPEG4
assetExport?.outputURL = backgroundUrl
assetExport?.shouldOptimizeForNetworkUse = true
if FileManager.default.fileExists(atPath: backgroundUrl.path) {
try! FileManager.default.removeItem(atPath: backgroundUrl.path)
}
assetExport?.audioMix = exportAudioMix
assetExport?.exportAsynchronously(completionHandler: {
switch assetExport!.status
{
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport?.error?.localizedDescription ?? "")")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport?.error?.localizedDescription ?? "")")
case AVAssetExportSessionStatus.unknown:
print("unknown\(assetExport?.error?.localizedDescription ?? "")")
case AVAssetExportSessionStatus.waiting:
print("waiting\(assetExport?.error?.localizedDescription ?? "")")
case AVAssetExportSessionStatus.exporting:
print("exporting\(assetExport?.error?.localizedDescription ?? "")")
case AVAssetExportSessionStatus.completed:
print("fade complete")
completion()
}
})
}
#IBAction func play(_ sender: Any) {
do{
player = try AVAudioPlayer(contentsOf: backgroundUrl)
player?.prepareToPlay()
player?.volume = 1.0
podcastPlayer = try AVAudioPlayer(contentsOf: mainUrl!)
podcastPlayer?.prepareToPlay()
podcastPlayer?.volume = 1
podcastPlayer?.play()
player?.play()
}catch {
print(error.localizedDescription)
}
}
#IBAction func changeVolume(_ sender: UISlider) {
if (player?.isPlaying)!{
player?.volume = sender.value
let volumeRange = VolumeRange()
volumeRange.volume = sender.value
volumeRange.start = player?.currentTime
volumeRanges.append(volumeRange)
}
}
#IBAction func touchUp(_ sender: UISlider) {
}
#IBAction func touchUpOutside(_ sender: UISlider) {
print("ouside")
}
#IBAction func generar(_ sender: Any) {
playButotn.isEnabled = false
self.createLoopAudioWithFade() {
self.createMix()
}
}
#IBAction func playMix(_ sender: UIButton) {
do {
player = try AVAudioPlayer(contentsOf: documentsDirectory)
player?.prepareToPlay()
player?.volume = 1.0
player?.play()
}catch {
}
}
}
class VolumeRange {
var volume: Float?
var start: Double?
}

AVAssetTrack does not see AVMediaTypeAudio

I am new in swift and programming, I tried to concatenate some recorder files, which I made with success like that:
func concatenateFiles(audioFiles: [URL], completion: #escaping (_ concatenatedFile: NSURL?) -> ()) {
// Result file
var nextClipStartTime = kCMTimeZero
let composition = AVMutableComposition()
let track = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
// Add each track
for audio in audioFiles {
let asset = AVURLAsset(url: NSURL(fileURLWithPath: audio.path) as URL, options: nil)
if let assetTrack = asset.tracks(withMediaType: AVMediaTypeAudio).first {
let timeRange = CMTimeRange(start: kCMTimeZero, duration: asset.duration)
do {
try track.insertTimeRange(timeRange, of: assetTrack, at: nextClipStartTime)
nextClipStartTime = CMTimeAdd(nextClipStartTime, timeRange.duration)
} catch {
print("Error concatenating file - \(error)")
completion(nil)
return
}
}
}
// Export the new file
if let exportSession = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A) {
let paths = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)
let format = DateFormatter()
format.dateFormat = "yyyy:MM:dd-HH:mm:ss"
let currentFileName = "REC:\(format.string(from: Date()))"
let documentsDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
let fileURL = documentsDirectory.appendingPathComponent("\(currentFileName).m4a")
// Remove existing file
do {
print(audioFiles.count)
try FileManager.default.removeItem(atPath: fileURL.path)
print("Removed \(fileURL)")
} catch {
print("Could not remove file - \(error)")
}
// Configure export session output
exportSession.outputURL = fileURL as URL
exportSession.outputFileType = AVFileTypeAppleM4A
// Perform the export
exportSession.exportAsynchronously() { () -> Void in
switch exportSession.status
{
case AVAssetExportSessionStatus.completed:
print("Export complete")
DispatchQueue.main.async(execute: {
if self.concatinatedArray == nil
{
self.concatinatedArray = [URL]()
}
self.concatinatedArray?.append(exportSession.outputURL!)
completion(fileURL as NSURL?)
})
return print("success to Merge Video")
case AVAssetExportSessionStatus.failed:
completion(nil)
return print("failed to MERGE )")
case AVAssetExportSessionStatus.cancelled:
completion(nil)
return print("cancelled merge)")
default:
print("complete")
}
}
}
}
But now, when I want to Merge it with a video, I got crashes on moment:
let aAudioAssetTrack: AVAssetTrack = aAudioAsset.tracks(withMediaType: AVMediaTypeAudio)[0]
I use standard method of merging, it works with other sounds that I have, it doesn't only with the concatenated audio files.. please help how to manage it working?...
AVURLAsset* avAsset = [[AVURLAsset alloc]initWithURL:[NSURL fileURLWithPath:path2] options:nil];
if ([[avAsset tracksWithMediaType:AVMediaTypeAudio] count] > 0)
{
AVAssetTrack *clipAudioTrack = [[avAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[firstTrackAudio insertTimeRange:CMTimeRangeMake(kCMTimeZero, avAsset.duration) ofTrack:clipAudioTrack atTime:kCMTimeZero error:nil];
}

Resources