iOS audio conversion from AVMutableComposition to AVAudioPlayer - ios

I been searching for a solution for this but so far I couldn't find it.
Right now I'm creating an audio file with AVMutableComposition and writing it to disk with AVAssetExportSession so I can load it with AVAudioPlayer and play it at a certain time. The fact that writing it to disk takes to much time is slowing things down and in some cases make my app very slow.
The creation part of the audio takes very little time, so I was wondering if insteed of exporting the file to disk I could just export it like NSData or something so I can use it in AVAudioPlayer without writing/reading from disk
code
let soundPath = "temp"
let filepath = NSURL(fileURLWithPath: NSBundle.mainBundle().pathForResource(soundPath, ofType: "wav")!)
var songAsset = AVURLAsset(URL: filepath, options: nil)
var track = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
var source: AnyObject = songAsset.tracksWithMediaType(AVMediaTypeAudio)[0]
var startTime = CMTimeMakeWithSeconds(0, 44100)
var time_insert = CMTimeMakeWithSeconds(atTime, 44100)
var trackDuration = songAsset.duration
var longestTime = CMTimeMake(882000, 44100)
var timeRange = CMTimeRangeMake(startTime, longestTime)
var trackMix = AVMutableAudioMixInputParameters(track: track)
trackMix.setVolume(volume, atTime: startTime)
audioMixParams.insert(trackMix, atIndex: audioMixParams.count)
track.insertTimeRange(timeRange, ofTrack: source as AVAssetTrack, atTime: time_insert, error: nil)
This happens several times in a loop generating a whole track
Finally I use
var exporter = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A)
exporter.audioMix = audioMix
exporter.outputFileType = "com.apple.m4a-audio"
exporter.outputURL = NSURL(fileURLWithPath: fileName)
exporter.exportAsynchronouslyWithCompletionHandler({
switch exporter.status{
case AVAssetExportSessionStatus.Failed:
println("failed \(exporter.error)")
case AVAssetExportSessionStatus.Cancelled:
println("cancelled \(exporter.error)")
default:
println("complete")
callback()
}
})
to export the file.
and after that it calls a callback that loads the sound and loops it in a time.
The fact I don't loop with the normal loop property is that each file might not have the same length as loopDuration
callback: {
self.fillSoundArray()
self.fillSoundArray()
self.fillSoundArray()
}
private func fillSoundArray() {
var currentloop = self.current_loop++
let documentsPath = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0] as NSString
var fileName = documentsPath + "/temp.mp4"
var tempAudio = AVAudioPlayer(contentsOfURL: NSURL(fileURLWithPath: fileName), error: nil)
if (self.start_time == 0) {
self.start_time = tempAudio.deviceCurrentTime
}
queue.insert(tempAudio, atIndex: queue.count)
tempAudio.prepareToPlay()
tempAudio.delegate = self
tempAudio.playAtTime(start_time + currentloop*self.loopDuration)
}

Use AVPlayerItem & AVPlayer to make a quick preview of your audio composition
AVPlayerItem* previewPlayerItem = [[AVPlayerItem alloc] initWithAsset:mutableCompositionMain
automaticallyLoadedAssetKeys:#[#"tracks",#"duration"]];
previewPlayerItem.videoComposition = mutableVideoComposition;
AVPlayer* previewPlayer = [[AVPlayer alloc] initWithPlayerItem:previewPlayerItem ];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:#selector(dismisPreviewPlayer:)
name:AVPlayerItemDidPlayToEndTimeNotification
object:[previewPlayer currentItem]];

Related

Setting multiple Volumes to each Video tracks using AudioMixInputParameters AVFoundation is not working in Swift iOS

I am working on Video based Application in Swift. As per the requirement I have to select multiple Videos from Device Gallery, setting up different different CIFilter effects and Volume for each Video Asset and then merge all the Videos and have to Save the Final Video. As an output, when I will play the Final Video then Video sound volume should change accordingly.
I have already merged all the selected Video Assets into one with different different CIFilter effects but my problem is when I am trying to set Volume for each Video Clips then it's not working. I am getting the default Volume for my Final Video. Here is my code:
func addFilerEffectAndVolumeToIndividualVideoClip(_ assetURL: URL, video: VideoFileModel, completion : ((_ session: AVAssetExportSession?, _ outputURL : URL?) -> ())?){
let videoFilteredAsset = AVAsset(url: assetURL)
print(videoFilteredAsset)
createVideoComposition(myAsset: videoFilteredAsset, videos: video)
let documentDirectory = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0]
let url = URL(fileURLWithPath: documentDirectory).appendingPathComponent("\(video.fileID)_\("FilterVideo").mov")
let filePath = url.path
let fileManager = FileManager.default
do {
if fileManager.fileExists(atPath: filePath) {
print("FILE AVAILABLE")
try fileManager.removeItem(atPath:filePath)
} else {
print("FILE NOT AVAILABLE")
}
} catch _ {
}
let composition: AVMutableComposition = AVMutableComposition()
let compositionVideo: AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
let compositionAudioVideo: AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
//Add video to the final record
do {
try compositionVideo.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoFilteredAsset.duration), of: videoFilteredAsset.tracks(withMediaType: AVMediaTypeVideo)[0], at: kCMTimeZero)
} catch _ {
}
//Extract audio from the video and the music
let audioMix: AVMutableAudioMix = AVMutableAudioMix()
var audioMixParam: [AVMutableAudioMixInputParameters] = []
let assetVideoTrack: AVAssetTrack = videoFilteredAsset.tracks(withMediaType: AVMediaTypeAudio)[0]
let videoParam: AVMutableAudioMixInputParameters = AVMutableAudioMixInputParameters(track: assetVideoTrack)
videoParam.trackID = compositionAudioVideo.trackID
//Set final volume of the audio record and the music
videoParam.setVolume(video.videoClipVolume, at: kCMTimeZero)
//Add setting
audioMixParam.append(videoParam)
//Add audio on final record
do {
try compositionAudioVideo.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoFilteredAsset.duration), of: assetVideoTrack, at: kCMTimeZero)
} catch _ {
assertionFailure()
}
//Fading volume out for background music
let durationInSeconds = CMTimeGetSeconds(videoFilteredAsset.duration)
let firstSecond = CMTimeRangeMake(CMTimeMakeWithSeconds(0, 1), CMTimeMakeWithSeconds(1, 1))
let lastSecond = CMTimeRangeMake(CMTimeMakeWithSeconds(durationInSeconds-1, 1), CMTimeMakeWithSeconds(1, 1))
videoParam.setVolumeRamp(fromStartVolume: 0, toEndVolume: video.videoClipVolume, timeRange: firstSecond)
videoParam.setVolumeRamp(fromStartVolume: video.videoClipVolume, toEndVolume: 0, timeRange: lastSecond)
//Add parameter
audioMix.inputParameters = audioMixParam
// Export part, left for facility
let exporter = AVAssetExportSession(asset: videoFilteredAsset, presetName: AVAssetExportPresetHighestQuality)!
exporter.videoComposition = videoFilterComposition
exporter.outputURL = url
exporter.outputFileType = AVFileTypeQuickTimeMovie
exporter.audioMix = audioMix
exporter.exportAsynchronously(completionHandler: { () -> Void in
completion!(exporter, url)
})
}
After that again I am using a method to merge all the Video Clips using AVAssetExportSession, there I am not setting any AudioMixInputParameters.
Note: When I am setting up volume in final merging method using AVAssetExportSession's AudioMixInputParameters, then Volume is getting change for full Video.
My question: Is it possible to set multiple volume for each Video Clips. Please suggest. Thank you!
Here is the working solution for my question:
func addVolumeToIndividualVideoClip(_ assetURL: URL, video: VideoFileModel, completion : ((_ session: AVAssetExportSession?, _ outputURL : URL?) -> ())?){
//Create Asset from Url
let filteredVideoAsset: AVAsset = AVAsset(url: assetURL)
video.fileID = String(video.videoID)
//Get the path of App Document Directory
let documentDirectory = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0]
let url = URL(fileURLWithPath: documentDirectory).appendingPathComponent("\(video.fileID)_\("FilterVideo").mov")
let filePath = url.path
let fileManager = FileManager.default
do {
if fileManager.fileExists(atPath: filePath) {
print("FILE AVAILABLE")
try fileManager.removeItem(atPath:filePath)
} else {
print("FILE NOT AVAILABLE")
}
} catch _ {
}
let composition: AVMutableComposition = AVMutableComposition()
let compositionVideo: AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
let compositionAudioVideo: AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
//Add video to the final record
do {
try compositionVideo.insertTimeRange(CMTimeRangeMake(kCMTimeZero, filteredVideoAsset.duration), of: filteredVideoAsset.tracks(withMediaType: AVMediaTypeVideo)[0], at: kCMTimeZero)
} catch _ {
}
//Extract audio from the video and the music
let audioMix: AVMutableAudioMix = AVMutableAudioMix()
var audioMixParam: [AVMutableAudioMixInputParameters] = []
let assetVideoTrack: AVAssetTrack = filteredVideoAsset.tracks(withMediaType: AVMediaTypeAudio)[0]
let videoParam: AVMutableAudioMixInputParameters = AVMutableAudioMixInputParameters(track: assetVideoTrack)
videoParam.trackID = compositionAudioVideo.trackID
//Set final volume of the audio record and the music
videoParam.setVolume(video.videoVolume, at: kCMTimeZero)
//Add setting
audioMixParam.append(videoParam)
//Add audio on final record
//First: the audio of the record and Second: the music
do {
try compositionAudioVideo.insertTimeRange(CMTimeRangeMake(kCMTimeZero, filteredVideoAsset.duration), of: assetVideoTrack, at: kCMTimeZero)
} catch _ {
assertionFailure()
}
//Fading volume out for background music
let durationInSeconds = CMTimeGetSeconds(filteredVideoAsset.duration)
let firstSecond = CMTimeRangeMake(CMTimeMakeWithSeconds(0, 1), CMTimeMakeWithSeconds(1, 1))
let lastSecond = CMTimeRangeMake(CMTimeMakeWithSeconds(durationInSeconds-1, 1), CMTimeMakeWithSeconds(1, 1))
videoParam.setVolumeRamp(fromStartVolume: 0, toEndVolume: video.videoVolume, timeRange: firstSecond)
videoParam.setVolumeRamp(fromStartVolume: video.videoVolume, toEndVolume: 0, timeRange: lastSecond)
//Add parameter
audioMix.inputParameters = audioMixParam
//Remove the previous temp video if exist
let filemgr = FileManager.default
do {
if filemgr.fileExists(atPath: "\(video.fileID)_\("FilterVideo").mov") {
try filemgr.removeItem(atPath: "\(video.fileID)_\("FilterVideo").mov")
} else {
}
} catch _ {
}
//Exporte the final record’
let exporter: AVAssetExportSession = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality)!
exporter.outputURL = url
exporter.outputFileType = AVFileTypeMPEG4
exporter.audioMix = audioMix
exporter.exportAsynchronously(completionHandler: { () -> Void in
completion!(exporter, url)
// self.saveVideoToLibrary(from: filePath)
})
}
I found, that exporting an asset with preset of AVAssetExportPresetPassthrough doesn't have an impact on output volume. When I tried to use AVAssetExportPresetLowQuality, volume change successfully applied.
I wish it is better documented somewhere :(
The working code:
// Assume we have:
let composition: AVMutableComposition
var inputParameters = [AVAudioMixInputParameters]()
// We add a track
let trackComposition = composition.addMutableTrack(...)
// Configure volume for this track
let inputParameter = AVMutableAudioMixInputParameters(track: trackComposition)
inputParameter.setVolume(desiredVolume, at: startTime)
// It works even without setting the `trackID`
// inputParameter.trackID = trackComposition.trackID
inputParameters.append(inputParameter)
// Apply gathered `inputParameters` before exporting
let audioMix = AVMutableAudioMix()
audioMix.inputParameters = inputParameters
// I found it's not working, if using `AVAssetExportPresetPassthrough`,
// so try `AVAssetExportPresetLowQuality` first
let export = AVAssetExportSession(..., presetName: AVAssetExportPresetLowQuality)
export.audioMix = audioMix
Tested this with multiple assetTrack insertions to the same compositionTrack, setting different volume for each insertion. Seems to be working.

Unable to combine multiple audio files into one in a keyboard extension running on an iOS device

The keyboard extension I built uses audio files to play audio feedback when keys are pressed. At some point, the user has an ability to combine multiple audio files into a single audio file. Combining multiple audio files works in simulator but does not work on the device.
func createSound(myNotes: [String], outputFile: String) {
// CMTime struct represents a length of time that is stored as rational number
var startTime: CMTime = kCMTimeZero
// AVMutableComposition creates new composition
let composition: AVMutableComposition = AVMutableComposition()
// AVMutableCompositionTrack - A mutable track in composition that you use to insert, remove, and scale track segments
if let compositionAudioTrack: AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: kCMPersistentTrackID_Invalid) {
for url in allFilesForCharacters() {
let avAsset: AVURLAsset = AVURLAsset(url: url)
let timeRange: CMTimeRange = CMTimeRangeMake(kCMTimeZero, avAsset.duration)
let audioTrack: AVAssetTrack = avAsset.tracks(withMediaType: AVMediaType.audio)[0]
try! compositionAudioTrack.insertTimeRange(timeRange, of: audioTrack, at: startTime)
startTime = CMTimeAdd(startTime, timeRange.duration)
}
}
let exportPath: String = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0].path+"/"+outputFile+".m4a"
try? FileManager.default.removeItem(atPath: exportPath)
if let export: AVAssetExportSession = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A) {
export.outputURL = URL(fileURLWithPath: exportPath)
export.outputFileType = AVFileType.m4a
export.exportAsynchronously {
if export.status == AVAssetExportSessionStatus.completed {
NSLog("All done");
if let data = try? Data(contentsOf: export.outputURL!) {
let board = UIPasteboard.general
board.setData(data, forPasteboardType: kUTTypeMPEG4Audio as String)
}
}
else {
print(export.error?.localizedDescription ?? "")
}
}
}
}
So I was able to solve the issue after realizing that the app had an allow full access switch in its settings. After turning it on, everything worked as expected. The app couldn't carry out its functionality because the device was blocking it from accessing the devices data.

iOS AVPlayer cant play 240 fps video

I recorded a 240 fps video after changing the AVCaptureDeviceFormat. If I save that video in the photo library, the slowmo effect is there. But, If I play that file from documents directory, using an AVPlayer, I cant see the slowmo effect.
Code to play the video:
AVPlayerItem *playerItem = [AVPlayerItem playerItemWithAsset:[AVAsset assetWithURL:[NSURL fileURLWithPath:fullPath]]];
AVPlayer *feedVideoPlayer = [AVPlayer playerWithPlayerItem:playerItem];
AVPlayerViewController *playerController = [[AVPlayerViewController alloc] init];
playerController.view.frame = CGRectMake(0, 0, videoPreviewView.frame.size.width, videoPreviewView.frame.size.height);
playerController.player = feedVideoPlayer;
It's a bit annoying, but I believe you'll need to re-create the video in an AVComposition if you don't want to lose quality. I'd love to know if there is another way, but this is what I've come up with. You can technically export the video via AVAssetExportSession, but using a PassThrough quality will result in the same video file, which won't be slow motion- you'll need to transcode it, which loses quality (AFAIK. See Issue playing slow-mo AVAsset in AVPlayer for that solution).
The first thing you'll need to do is grab the source media's original time mapping objects. You can do that like so:
let options = PHVideoRequestOptions()
options.version = PHVideoRequestOptionsVersion.current
options.deliveryMode = .highQualityFormat
PHImageManager().requestAVAsset(forVideo: phAsset, options: options, resultHandler: { (avAsset, mix, info) in
guard let avAsset = avAsset else { return }
let originalTimeMaps = avAsset.tracks(withMediaType: AVMediaTypeVideo)
.first?
.segments
.flatMap { $0.timeMapping } ?? []
}
Once you have timeMappings of the original media (the one sitting in your documents directory), you can pass in the URL of that media and the original CMTimeMapping objects that you would like to recreate. Then create a new AVComposition that is ready to play in an AVPlayer. You'll need a class similar to this:
class CompositionMapper {
let url: URL
let timeMappings: [CMTimeMapping]
init(for url: URL, with timeMappings: [CMTimeMapping]) {
self.url = url
self.timeMappings = timeMappings
}
init(with asset: AVAsset, and timeMappings: [CMTimeMapping]) {
guard let asset = asset as? AVURLAsset else {
print("cannot get a base URL from this asset.")
fatalError()
}
self.timeMappings = timeMappings
self.url = asset.url
}
func compose() -> AVComposition {
let composition = AVMutableComposition(urlAssetInitializationOptions: [AVURLAssetPreferPreciseDurationAndTimingKey: true])
let emptyTrack = composition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
let audioTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
let asset = AVAsset(url: url)
guard let videoAssetTrack = asset.tracks(withMediaType: AVMediaTypeVideo).first else { return composition }
var segments: [AVCompositionTrackSegment] = []
for map in timeMappings {
let segment = AVCompositionTrackSegment(url: url, trackID: kCMPersistentTrackID_Invalid, sourceTimeRange: map.source, targetTimeRange: map.target)
segments.append(segment)
}
emptyTrack.preferredTransform = videoAssetTrack.preferredTransform
emptyTrack.segments = segments
if let _ = asset.tracks(withMediaType: AVMediaTypeVideo).first {
audioTrack.segments = segments
}
return composition.copy() as! AVComposition
}
You can then use the compose() function of your CompositionMapper class to give you an AVComposition that is ready to play in an AVPlayer, which should respect the CMTimeMapping objects that you've passed in.
let compositionMapper = CompositionMapper(for: someAVAssetURL, with: originalTimeMaps)
let mappedComposition = compositionMapper.compose()
let playerItem = AVPlayerItem(asset: mappedComposition)
let player = AVPlayer(playerItem: playerItem)
playerItem.audioTimePitchAlgorithm = AVAudioTimePitchAlgorithmVarispeed
Let me know if you need help converting this to Objective-C, but it should be relatively straight forward.

Issue playing slow-mo AVAsset in AVPlayer

I'm trying to play an slow motion video (filmed by the user's iPhone) in an AVPlayer.
I am retrieving the AVAsset with a request on a PHAsset from a picker:
[manager requestAVAssetForVideo:PHAsset
options:videoRequestOptions
resultHandler:^(AVAsset * avasset, AVAudioMix * audioMix, NSDictionary * info) {}];
The problem is once it plays, I get this error:
-[AVComposition URL]: unrecognized selector sent to instance 0x138d17f40
However, if I set this option on the manager request, it will play as normal speed video at 120/240fps and no crashes:
videoRequestOptions.version = PHVideoRequestOptionsVersionOriginal;
Whats going on? The default version property is PHVideoRequestOptionsVersionCurrent which incorporates slow motion, user edits and trims, etc.
I would like to play that video version. Thanks
So it turns out that slow motion videos are passed as AVComposition.
You can export that into a video file / URL, and then handle it like any other video.
Solution here: https://overflow.buffer.com/2016/02/29/slow-motion-video-ios/
//Output URL
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = paths.firstObject;
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"mergeSlowMoVideo-%d.mov",arc4random() % 1000]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
//Begin slow mo video export
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL = url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
if (exporter.status == AVAssetExportSessionStatusCompleted) {
NSURL *URL = exporter.outputURL;
NSData *videoData = [NSData dataWithContentsOfURL:URL];
// Upload
[self uploadSelectedVideo:video data:videoData];
}
});
}];
For those coming here looking for a swift answer, this is the swift translation, which I use in my project, where I need the url of the slow-motion Video to play it with the AVPlayerViewController:
else if asset is AVComposition {
//Slow-Motion Assets are passed as AVComposition
let paths = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)
let documentsDirectory: NSString? = paths.first as NSString?
if documentsDirectory != nil {
let random = Int(arc4random() % 1000)
let pathToAppend = String(format: "mergeSlowMoVideo-%d.mov", random)
let myPathDocs = documentsDirectory!.strings(byAppendingPaths: [pathToAppend])
let myPath = myPathDocs.first
if myPath != nil {
let url = URL(fileURLWithPath: myPath!)
let exporter = AVAssetExportSession(asset: asset!, presetName: AVAssetExportPresetHighestQuality)
if exporter != nil {
exporter!.outputURL = url
exporter!.outputFileType = AVFileTypeQuickTimeMovie
exporter!.shouldOptimizeForNetworkUse = true
exporter!.exportAsynchronously(completionHandler: {
AsyncUtil.asyncMain {
let url = exporter!.outputURL
if url != nil {
let player = AVPlayer(url: url!)
let playerViewController = AVPlayerViewController()
playerViewController.player = player
playerViewController.modalTransitionStyle = .crossDissolve
view.present(playerViewController, animated: true) {
playerViewController.player!.play()
}
}
}
})
}
}
Playing Slo-mo & Library Video in Swift 4 n above with Custom View
var vcPlayer = AVPlayerViewController()
var player = AVPlayer()
func playallVideo(_ customView: UIView, asset: PHAsset) {
guard asset.mediaType == .video
else {
print("Not a valid video media type")
return
}
let options = PHVideoRequestOptions()
options.isNetworkAccessAllowed = true
PHCachingImageManager().requestPlayerItem(forVideo: asset, options: options) { (playerItem, info) in
DispatchQueue.main.async {
self.player = AVPlayer(playerItem: playerItem)
self.vcPlayer.player = self.player
self.vcPlayer.view.frame = customView.bounds
self.vcPlayer.videoGravity = .resizeAspectFill
self.vcPlayer.showsPlaybackControls = true
//self.vcPlayer.allowsPictureInPicturePlayback = true
self.playerView.addSubview(self.vcPlayer.view)
self.player.play()
}
}
}
/**********Function Call ********/
self.playallVideo(self.playerView/*YourCustomView*/, asset: currentAssetArr[currentIndex]/*Current PHAsset Fetched from Library*/)
:) enjoy
Looking answer in Swift?
here's how I did it
Creating "Slow motion" video in iOS swift is not easy, that I came across many "slow motion" that came to know not working or some of the codes in them are depreciated. And so I finally figured a way to make slow motion in Swift.
This code can be used for 120fps are greater than that too.
Here is the "code snippet I created for achieving slow motion"
Give me a UPVOTE if this code works.
func slowMotion(pathUrl: URL) {
let videoAsset = AVURLAsset.init(url: pathUrl, options: nil)
let currentAsset = AVAsset.init(url: pathUrl)
let vdoTrack = currentAsset.tracks(withMediaType: .video)[0]
let mixComposition = AVMutableComposition()
let compositionVideoTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid)
let videoInsertError: Error? = nil
var videoInsertResult = false
do {
try compositionVideoTrack?.insertTimeRange(
CMTimeRangeMake(start: .zero, duration: videoAsset.duration),
of: videoAsset.tracks(withMediaType: .video)[0],
at: .zero)
videoInsertResult = true
} catch let videoInsertError {
}
if !videoInsertResult || videoInsertError != nil {
//handle error
return
}
var duration: CMTime = .zero
duration = CMTimeAdd(duration, currentAsset.duration)
//MARK: You see this constant (videoScaleFactor) this helps in achieving the slow motion that you wanted. This increases the time scale of the video that makes slow motion
// just increase the videoScaleFactor value in order to play video in higher frames rates(more slowly)
let videoScaleFactor = 2.0
let videoDuration = videoAsset.duration
compositionVideoTrack?.scaleTimeRange(
CMTimeRangeMake(start: .zero, duration: videoDuration),
toDuration: CMTimeMake(value: videoDuration.value * Int64(videoScaleFactor), timescale: videoDuration.timescale))
compositionVideoTrack?.preferredTransform = vdoTrack.preferredTransform
let dirPaths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).map(\.path)
let docsDir = dirPaths[0]
let outputFilePath = URL(fileURLWithPath: docsDir).appendingPathComponent("slowMotion\(UUID().uuidString).mp4").path
if FileManager.default.fileExists(atPath: outputFilePath) {
do {
try FileManager.default.removeItem(atPath: outputFilePath)
} catch {
}
}
let filePath = URL(fileURLWithPath: outputFilePath)
let assetExport = AVAssetExportSession(
asset: mixComposition,
presetName: AVAssetExportPresetHighestQuality)
assetExport?.outputURL = filePath
assetExport?.outputFileType = .mp4
assetExport?.exportAsynchronously(completionHandler: {
switch assetExport?.status {
case .failed:
print("asset output media url = \(String(describing: assetExport?.outputURL))")
print("Export session faiied with error: \(String(describing: assetExport?.error))")
DispatchQueue.main.async(execute: {
// completion(nil);
})
case .completed:
print("Successful")
let outputURL = assetExport!.outputURL
print("url path = \(String(describing: outputURL))")
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: outputURL!)
}) { saved, error in
if saved {
print("video successfully saved in photos gallery view video in photos gallery")
}
if (error != nil) {
print("error in saing video \(String(describing: error?.localizedDescription))")
}
}
DispatchQueue.main.async(execute: {
// completion(_filePath);
})
case .none:
break
case .unknown:
break
case .waiting:
break
case .exporting:
break
case .cancelled:
break
case .some(_):
break
}
})
}

AVMutableAudioMixInputParameters: setVolume() doesn't work with audio file iOS 9

I try to mix a video record with an audio file from the iPod library of the device.
I want to set the volume of each audio (the audio of the video and the audio file).
I try to use AVMutableAudioMixInputParameters object with the method setVolume().
I don't have any problem with the volume of the video, but the volume of the audio file on the final record is always set at max. I tried to change the audio file for testing using a video and take only the audio track of this video, and that works fine.
import UIKit
import AVFoundation
class AVTools: NSObject {
/**
volume: between 1.0 and 0.0
*/
class func mergeVideoAndMusicWithVolume(videoURL: NSURL, audioURL: NSURL, startAudioTime: Float64, volumeVideo: Float, volumeAudio: Float, complete: (NSURL?) -> Void) -> Void {
//The goal is merging a video and a music from iPod library, and set it a volume
//Get the path of App Document Directory
let dirPaths = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)
let docsDir = dirPaths[0] as String
//Create Asset from record and music
let assetVideo: AVURLAsset = AVURLAsset(URL: videoURL)
let assetMusic: AVURLAsset = AVURLAsset(URL: audioURL)
let composition: AVMutableComposition = AVMutableComposition()
let compositionVideo: AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
let compositionAudioVideo: AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
let compositionAudioMusic: AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
//Add video to the final record
do {
try compositionVideo.insertTimeRange(CMTimeRangeMake(kCMTimeZero, assetVideo.duration), ofTrack: assetVideo.tracksWithMediaType(AVMediaTypeVideo)[0], atTime: kCMTimeZero)
} catch _ {
}
//Extract audio from the video and the music
let audioMix: AVMutableAudioMix = AVMutableAudioMix()
var audioMixParam: [AVMutableAudioMixInputParameters] = []
let assetVideoTrack: AVAssetTrack = assetVideo.tracksWithMediaType(AVMediaTypeAudio)[0]
let assetMusicTrack: AVAssetTrack = assetMusic.tracksWithMediaType(AVMediaTypeAudio)[0]
let videoParam: AVMutableAudioMixInputParameters = AVMutableAudioMixInputParameters(track: assetVideoTrack)
videoParam.trackID = assetVideoTrack.trackID
let musicParam: AVMutableAudioMixInputParameters = AVMutableAudioMixInputParameters(track: assetMusicTrack)
musicParam.trackID = assetMusicTrack.trackID
//Set final volume of the audio record and the music
videoParam.setVolume(volumeVideo, atTime: kCMTimeZero)
musicParam.setVolume(volumeAudio, atTime: kCMTimeZero) // <----- This doesn't work on audio file
//Add setting
audioMixParam.append(musicParam)
audioMixParam.append(videoParam)
//Add audio on final record
//First: the audio of the record and Second: the music
do {
try compositionAudioVideo.insertTimeRange(CMTimeRangeMake(kCMTimeZero, assetVideo.duration), ofTrack: assetVideoTrack, atTime: kCMTimeZero)
} catch _ {
assertionFailure()
}
do {
try compositionAudioMusic.insertTimeRange(CMTimeRangeMake(CMTimeMake(Int64(startAudioTime * 10000), 10000), assetVideo.duration), ofTrack: assetMusicTrack, atTime: kCMTimeZero)
} catch _ {
assertionFailure()
}
//Add parameter
audioMix.inputParameters = audioMixParam
//Remove the previous temp video if exist
let filemgr = NSFileManager.defaultManager()
do {
if filemgr.fileExistsAtPath("\(docsDir)/movie-merge-music.mov") {
try filemgr.removeItemAtPath("\(docsDir)/movie-merge-music.mov")
} else {
}
} catch _ {
}
//Exporte the final record’
let completeMovie = "\(docsDir)/movie-merge-music.mov"
let completeMovieUrl = NSURL(fileURLWithPath: completeMovie)
let exporter: AVAssetExportSession = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality)!
exporter.outputURL = completeMovieUrl
exporter.outputFileType = AVFileTypeMPEG4
exporter.audioMix = audioMix
exporter.exportAsynchronouslyWithCompletionHandler({
switch exporter.status{
case AVAssetExportSessionStatus.Failed:
print("failed \(exporter.error)")
complete(nil)
case AVAssetExportSessionStatus.Cancelled:
print("cancelled \(exporter.error)")
complete(nil)
default:
print("complete")
complete(completeMovieUrl)
}
})
}
}
Ok I've find the problem.
The problem is I assign the trackID of the asset and not the trackID of the composition.
For fix it, just replace:
let videoParam: AVMutableAudioMixInputParameters = AVMutableAudioMixInputParameters(track: assetVideoTrack)
videoParam.trackID = assetVideoTrack.trackID
let musicParam: AVMutableAudioMixInputParameters = AVMutableAudioMixInputParameters(track: assetMusicTrack)
musicParam.trackID = assetMusicTrack.trackID
to:
let videoParam: AVMutableAudioMixInputParameters = AVMutableAudioMixInputParameters(track: assetVideoTrack)
videoParam.trackID = compositionAudioVideo.trackID
let musicParam: AVMutableAudioMixInputParameters = AVMutableAudioMixInputParameters(track: assetMusicTrack)
musicParam.trackID = compositionAudioMusic.trackID
the final result:
/**
volume: between 1.0 and 0.0
*/
class func mergeVideoAndMusicWithVolume(videoURL: NSURL, audioURL: NSURL, startAudioTime: Float64, volumeVideo: Float, volumeAudio: Float, complete: (NSURL?) -> Void) -> Void {
//The goal is merging a video and a music from iPod library, and set it a volume
//Get the path of App Document Directory
let dirPaths = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)
let docsDir = dirPaths[0] as String
//Create Asset from record and music
let assetVideo: AVURLAsset = AVURLAsset(URL: videoURL)
let assetMusic: AVURLAsset = AVURLAsset(URL: audioURL)
let composition: AVMutableComposition = AVMutableComposition()
let compositionVideo: AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
let compositionAudioVideo: AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
let compositionAudioMusic: AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
//Add video to the final record
do {
try compositionVideo.insertTimeRange(CMTimeRangeMake(kCMTimeZero, assetVideo.duration), ofTrack: assetVideo.tracksWithMediaType(AVMediaTypeVideo)[0], atTime: kCMTimeZero)
} catch _ {
}
//Extract audio from the video and the music
let audioMix: AVMutableAudioMix = AVMutableAudioMix()
var audioMixParam: [AVMutableAudioMixInputParameters] = []
let assetVideoTrack: AVAssetTrack = assetVideo.tracksWithMediaType(AVMediaTypeAudio)[0]
let assetMusicTrack: AVAssetTrack = assetMusic.tracksWithMediaType(AVMediaTypeAudio)[0]
let videoParam: AVMutableAudioMixInputParameters = AVMutableAudioMixInputParameters(track: assetVideoTrack)
videoParam.trackID = compositionAudioVideo.trackID
let musicParam: AVMutableAudioMixInputParameters = AVMutableAudioMixInputParameters(track: assetMusicTrack)
musicParam.trackID = compositionAudioMusic.trackID
//Set final volume of the audio record and the music
videoParam.setVolume(volumeVideo, atTime: kCMTimeZero)
musicParam.setVolume(volumeAudio, atTime: kCMTimeZero)
//Add setting
audioMixParam.append(musicParam)
audioMixParam.append(videoParam)
//Add audio on final record
//First: the audio of the record and Second: the music
do {
try compositionAudioVideo.insertTimeRange(CMTimeRangeMake(kCMTimeZero, assetVideo.duration), ofTrack: assetVideoTrack, atTime: kCMTimeZero)
} catch _ {
assertionFailure()
}
do {
try compositionAudioMusic.insertTimeRange(CMTimeRangeMake(CMTimeMake(Int64(startAudioTime * 10000), 10000), assetVideo.duration), ofTrack: assetMusicTrack, atTime: kCMTimeZero)
} catch _ {
assertionFailure()
}
//Add parameter
audioMix.inputParameters = audioMixParam
//Remove the previous temp video if exist
let filemgr = NSFileManager.defaultManager()
do {
if filemgr.fileExistsAtPath("\(docsDir)/movie-merge-music.mov") {
try filemgr.removeItemAtPath("\(docsDir)/movie-merge-music.mov")
} else {
}
} catch _ {
}
//Exporte the final record’
let completeMovie = "\(docsDir)/movie-merge-music.mov"
let completeMovieUrl = NSURL(fileURLWithPath: completeMovie)
let exporter: AVAssetExportSession = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality)!
exporter.outputURL = completeMovieUrl
exporter.outputFileType = AVFileTypeMPEG4
exporter.audioMix = audioMix
exporter.exportAsynchronouslyWithCompletionHandler({
switch exporter.status{
case AVAssetExportSessionStatus.Failed:
print("failed \(exporter.error)")
complete(nil)
case AVAssetExportSessionStatus.Cancelled:
print("cancelled \(exporter.error)")
complete(nil)
default:
print("complete")
complete(completeMovieUrl)
}
})
}

Resources