I am trying to merge processing audio url with video url. I have proceed audio in order to change pitch value using audioEngine.renderOfflinemethod. But output audio file return nil value for audioAsset.tracks(withMediaType: .audio).first (Also audioAsset.metadata is empty). Due to nil value I am not able to merge video and audio.
Note: I am able to share proceed Audio File and it works but not working when I am playing same file in AVAudioPlayer.
Also I tried to proceed audio using installTapOnBus method but didn't getting proper output file everytime.
Please help me to fix above error.
Code is given below:
func extractAudio(url:URL) {
// Create a composition
let composition = AVMutableComposition()
let asset = AVURLAsset(url: url)
do {
guard let audioAssetTrack = asset.tracks(withMediaType: AVMediaType.audio).first else { return }
guard let audioCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: kCMPersistentTrackID_Invalid) else { return }
try audioCompositionTrack.insertTimeRange(audioAssetTrack.timeRange, of: audioAssetTrack, at: CMTime.zero)
} catch {
print(error)
}
// Get url for output
let outputUrl = URL(fileURLWithPath: NSTemporaryDirectory() + "out.m4a")
if FileManager.default.fileExists(atPath: outputUrl.path) {
try? FileManager.default.removeItem(atPath: outputUrl.path)
}
// Create an export session
let exportSession = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A)!
exportSession.outputFileType = AVFileType.m4a
exportSession.outputURL = URL.init(fileURLWithPath: outputUrl.path)
exportSession.timeRange = CMTimeRangeMake(start: CMTime.zero, duration: asset.duration)
// Export file
exportSession.exportAsynchronously {
guard case exportSession.status = AVAssetExportSession.Status.completed else { return }
self.sourceFile = try! AVAudioFile(forReading: outputUrl)
self.format = self.sourceFile.processingFormat
self.playAndRecord(pitch: -500, rate: 1.0, reverb: 10, echo: 1.0)
}
}
func playAndRecord(pitch : Float, rate: Float, reverb: Float, echo: Float) {
let engine = AVAudioEngine()
let player = AVAudioPlayerNode()
let reverbEffect = AVAudioUnitReverb()
let pitchEffect = AVAudioUnitTimePitch()
let playbackRateEffect = AVAudioUnitVarispeed()
engine.attach(player)
engine.attach(reverbEffect)
engine.attach(pitchEffect)
engine.attach(playbackRateEffect)
// Set the desired reverb parameters.
reverbEffect.loadFactoryPreset(.mediumHall)
reverbEffect.wetDryMix = reverb
pitchEffect.pitch = pitch
playbackRateEffect.rate = rate
// Connect the nodes.
engine.connect(player, to: reverbEffect, format: format)
engine.connect(reverbEffect, to: pitchEffect, format: format)
engine.connect(pitchEffect, to: playbackRateEffect, format: format)
engine.connect(playbackRateEffect, to: engine.mainMixerNode, format: format)
// Schedule the source file.
player.scheduleFile(sourceFile, at: nil)
do {
// The maximum number of frames the engine renders in any single render call.
let maxFrames: AVAudioFrameCount = 4096
try engine.enableManualRenderingMode(.offline, format: format,
maximumFrameCount: maxFrames)
} catch {
fatalError("Enabling manual rendering mode failed: \(error).")
}
do {
try engine.start()
player.play()
} catch {
fatalError("Unable to start audio engine: \(error).")
}
// The output buffer to which the engine renders the processed data.
let buffer = AVAudioPCMBuffer(pcmFormat: engine.manualRenderingFormat,
frameCapacity: engine.manualRenderingMaximumFrameCount)!
var outputFile: AVAudioFile
do {
let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
let outputURL = documentsURL.appendingPathComponent("EDittedFile.m4a")
outputFile = try AVAudioFile(forWriting: outputURL, settings: sourceFile.fileFormat.settings, commonFormat: .pcmFormatInt32,interleaved: true)
} catch {
fatalError("Unable to open output audio file: \(error).")
}
// Process the file
while engine.manualRenderingSampleTime < sourceFile.length {
do {
let frameCount = sourceFile.length - engine.manualRenderingSampleTime
let framesToRender = min(AVAudioFrameCount(frameCount), buffer.frameCapacity)
let status = try engine.renderOffline(framesToRender, to: buffer)
switch status {
case .success:
// The data rendered successfully. Write it to the output file.
try outputFile.write(from: buffer)
case .insufficientDataFromInputNode:
// Applicable only when using the input node as one of the sources.
break
case .cannotDoInCurrentContext:
// The engine couldn't render in the current render call.
// Retry in the next iteration.
break
case .error:
// An error occurred while rendering the audio.
fatalError("The manual rendering failed.")
}
} catch {
fatalError("The manual rendering failed: \(error).")
}
}
print("finished")
let asset = AVURLAsset.init(url: outputFile.url)
print(asset.tracks(withMediaType: .audio))
player.stop()
engine.stop()
}
Thanks in advance.
Related
i want to convert MKV (Matroska) to MP4 in swift
when i add a file with MKV format my code break in line 8 , what should i do to fix that?
this is my code:
let composition = AVMutableComposition()
do {
let sourceUrl = Bundle.main.url(forResource: "sample", withExtension: "mov")!
let asset = AVURLAsset(url: sourceUrl)
8 ->here guard let videoAssetTrack = asset.tracks(withMediaType: AVMediaType.video).first else { return }
guard let audioCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: kCMPersistentTrackID_Invalid) else { return }
try audioCompositionTrack.insertTimeRange(videoAssetTrack.timeRange, of: videoAssetTrack, at: CMTime.zero)
} catch {
print(error)
}
// Create an export session
let exportSession = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetPassthrough)!
exportSession.outputFileType = AVFileType.mp4
exportSession.outputURL = browseURL
// Export file
exportSession.exportAsynchronously {
guard case exportSession.status = AVAssetExportSession.Status.completed else { return }
DispatchQueue.main.async {
// Present a UIActivityViewController to share audio file
print("completed")
}
}
I'd like to merge two audio files without too much overhead time. The following code successfully merges the audio but takes way too long (>30seconds for more than a couple minutes of audio), and I'm wondering if there is any way to expedite that process. I read in a couple places to use AVAssetExportPresetPassthrough, but I can't seem to get that preset to work with any file type. The only settings I've been able to get to work are using AVAssetExportPresetAppleM4A and exporting as a .m4a
Code to create AVAssetExportSession
if (audioHasBeenRecorded) {
// Merge recordings
let composition = AVMutableComposition()
let compositionAudioTrack = composition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: kCMPersistentTrackID_Invalid)
guard let start = recorder.timeStart else { return }
compositionAudioTrack?.insert(originalRecording: FileManagerHelper.recordingLocalURL(secondRecording: false), insertedRecording: FileManagerHelper.recordingLocalURL(secondRecording: true), startTime: CMTime(seconds: start, preferredTimescale: 1000000))
if let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A) {
print(assetExport.supportedFileTypes)
assetExport.outputFileType = AVFileType.m4a
assetExport.outputURL = FileManagerHelper.recordingLocalURL(secondRecording: false)
do { // Delete old audio
try FileManager.default.removeItem(at: FileManagerHelper.recordingLocalURL(secondRecording: false))
try FileManager.default.removeItem(at: FileManagerHelper.recordingLocalURL(secondRecording: true))
} catch { log(error.localizedDescription, msgType: .error) }
assetExport.exportAsynchronously(completionHandler: {
if let error = assetExport.error {
log(error, msgType: .error)
} else {
log("Successfully merged recordings!", msgType: .error)
self.idea.numberOfPoints = self.audioVisualizer.count
self.idea.save()
self.setupPlayer() // Prepare to play the recorded audio file
self.seekTo(TimeInterval((recorder.timeStart ?? 0) + (recorder.timeEnd ?? 0)))
DispatchQueue.main.async { [ weak self ] in
guard let self = self else { return }
self.audioVisualizer.visualize(self.idea)
}
}
})
}
}
Insert code:
extension AVMutableCompositionTrack {
func insert(originalRecording: URL, insertedRecording: URL, startTime: CMTime) {
let originalAsset = AVURLAsset(url: originalRecording)
let insertedAsset = AVURLAsset(url: insertedRecording)
let range1 = CMTimeRangeMake(start: CMTime.zero, duration: startTime)
let range2 = CMTimeRangeMake(start: CMTime.zero, duration: insertedAsset.duration)
let range3 = CMTimeRangeMake(start: startTime + insertedAsset.duration, duration: originalAsset.duration - startTime)
if let originalTrack = originalAsset.tracks(withMediaType: AVMediaType.audio).first,
let insertedTrack = insertedAsset.tracks(withMediaType: AVMediaType.audio).first {
try? insertTimeRange(range1, of: originalTrack, at: CMTime.zero)
try? insertTimeRange(range2, of: insertedTrack, at: startTime)
try? insertTimeRange(range3, of: originalTrack, at: startTime + insertedAsset.duration)
}
}
}
TLDR: Skip to the updates. I am looking for a way to compress or lower the quality of video output, preferably after not directly after creating, but if that is the only way then so be it
Also if you know of any good cocoa pods which can accomplish this that would be good.
Update 3:
I am looking for a function which can output the compressed URL, and I should be able to control the compression quality...
Update 2:
After trying to make the function work in its current state it doe not work. Yeilding nil. I think as a result of the following:
let outputURL = urlToCompress
assetWriter = try AVAssetWriter(outputURL: outputURL, fileType: AVFileType.mov)
I am trying to compress video in swift. So far all solutions to this have been for use during creation. I am wondering if there is a way to compress after creation? only using the video URL?
If not then how can I make a compression function which compresses the video and returns the compressed URL?
Code I have been working with:
func compressVideo(videoURL: URL) -> URL {
let data = NSData(contentsOf: videoURL as URL)!
print("File size before compression: \(Double(data.length / 1048576)) mb")
let compressedURL = NSURL.fileURL(withPath: NSTemporaryDirectory() + NSUUID().uuidString + ".mov")
compressVideoHelperMethod(inputURL: videoURL , outputURL: compressedURL) { (exportSession) in
}
return compressedURL
}
func compressVideoHelperMethod(inputURL: URL, outputURL: URL, handler:#escaping (_ exportSession: AVAssetExportSession?)-> Void) {
let urlAsset = AVURLAsset(url: inputURL, options: nil)
guard let exportSession = AVAssetExportSession(asset: urlAsset, presetName: AVAssetExportPresetMediumQuality) else {
handler(nil)
return
}
exportSession.outputURL = outputURL
exportSession.outputFileType = AVFileType.mov
exportSession.shouldOptimizeForNetworkUse = true
exportSession.exportAsynchronously { () -> Void in
handler(exportSession)
}
}
Update:
So I have found the code below. I am yet to test it, but I don't know how I can make it so that I choose the quality of the compression:
var assetWriter:AVAssetWriter?
var assetReader:AVAssetReader?
let bitrate:NSNumber = NSNumber(value:250000)
func compressFile(urlToCompress: URL, outputURL: URL, completion:#escaping (URL)->Void){
//video file to make the asset
var audioFinished = false
var videoFinished = false
let asset = AVAsset(url: urlToCompress);
let duration = asset.duration
let durationTime = CMTimeGetSeconds(duration)
print("Video Actual Duration -- \(durationTime)")
//create asset reader
do{
assetReader = try AVAssetReader(asset: asset)
} catch{
assetReader = nil
}
guard let reader = assetReader else{
fatalError("Could not initalize asset reader probably failed its try catch")
}
let videoTrack = asset.tracks(withMediaType: AVMediaType.video).first!
let audioTrack = asset.tracks(withMediaType: AVMediaType.audio).first!
let videoReaderSettings: [String:Any] = [(kCVPixelBufferPixelFormatTypeKey as String?)!:kCVPixelFormatType_32ARGB ]
// ADJUST BIT RATE OF VIDEO HERE
if #available(iOS 11.0, *) {
let videoSettings:[String:Any] = [
AVVideoCompressionPropertiesKey: [AVVideoAverageBitRateKey:self.bitrate],
AVVideoCodecKey: AVVideoCodecType.h264,
AVVideoHeightKey: videoTrack.naturalSize.height,
AVVideoWidthKey: videoTrack.naturalSize.width
]
} else {
// Fallback on earlier versions
}
let assetReaderVideoOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: videoReaderSettings)
let assetReaderAudioOutput = AVAssetReaderTrackOutput(track: audioTrack, outputSettings: nil)
if reader.canAdd(assetReaderVideoOutput){
reader.add(assetReaderVideoOutput)
}else{
fatalError("Couldn't add video output reader")
}
if reader.canAdd(assetReaderAudioOutput){
reader.add(assetReaderAudioOutput)
}else{
fatalError("Couldn't add audio output reader")
}
let audioInput = AVAssetWriterInput(mediaType: AVMediaType.audio, outputSettings: nil)
let videoInput = AVAssetWriterInput(mediaType: AVMediaType.video, outputSettings: videoReaderSettings)
videoInput.transform = videoTrack.preferredTransform
//we need to add samples to the video input
let videoInputQueue = DispatchQueue(label: "videoQueue")
let audioInputQueue = DispatchQueue(label: "audioQueue")
do{
assetWriter = try AVAssetWriter(outputURL: outputURL, fileType: AVFileType.mov)
}catch{
assetWriter = nil
}
guard let writer = assetWriter else{
fatalError("assetWriter was nil")
}
writer.shouldOptimizeForNetworkUse = true
writer.add(videoInput)
writer.add(audioInput)
writer.startWriting()
reader.startReading()
writer.startSession(atSourceTime: CMTime.zero)
let closeWriter:()->Void = {
if (audioFinished && videoFinished){
self.assetWriter?.finishWriting(completionHandler: {
print("------ Finish Video Compressing")
completion((self.assetWriter?.outputURL)!)
})
self.assetReader?.cancelReading()
}
}
audioInput.requestMediaDataWhenReady(on: audioInputQueue) {
while(audioInput.isReadyForMoreMediaData){
let sample = assetReaderAudioOutput.copyNextSampleBuffer()
if (sample != nil){
audioInput.append(sample!)
}else{
audioInput.markAsFinished()
DispatchQueue.main.async {
audioFinished = true
closeWriter()
}
break;
}
}
}
videoInput.requestMediaDataWhenReady(on: videoInputQueue) {
//request data here
while(videoInput.isReadyForMoreMediaData){
let sample = assetReaderVideoOutput.copyNextSampleBuffer()
if (sample != nil){
let timeStamp = CMSampleBufferGetPresentationTimeStamp(sample!)
let timeSecond = CMTimeGetSeconds(timeStamp)
let per = timeSecond / durationTime
print("Duration --- \(per)")
videoInput.append(sample!)
}else{
videoInput.markAsFinished()
DispatchQueue.main.async {
videoFinished = true
closeWriter()
}
break;
}
}
}
}
How can I change this to be able to set the quality? I am looking for compression of about 0.6
I am now playing around with the following code, the issue is that it keeps printing the error (does not seem to work):
func convertVideoToLowQuailty(withInputURL inputURL: URL?, outputURL: URL?, handler: #escaping (AVAssetExportSession?) -> Void) {
do {
if let outputURL = outputURL {
try FileManager.default.removeItem(at: outputURL)
}
} catch {
}
var asset: AVURLAsset? = nil
if let inputURL = inputURL {
asset = AVURLAsset(url: inputURL, options: nil)
}
var exportSession: AVAssetExportSession? = nil
if let asset = asset {
exportSession = AVAssetExportSession(asset: asset, presetName:AVAssetExportPresetMediumQuality)
}
exportSession?.outputURL = outputURL
exportSession?.outputFileType = .mov
exportSession?.exportAsynchronously(completionHandler: {
handler(exportSession)
})
}
func compressVideo(videoURL: URL) -> URL {
var outputURL = URL(fileURLWithPath: "/Users/alexramirezblonski/Desktop/output.mov")
convertVideoToLowQuailty(withInputURL: videoURL, outputURL: outputURL, handler: { exportSession in
print("fdshljfhdlasjkfdhsfsdljk")
if exportSession?.status == .completed {
print("completed\n", exportSession!.outputURL!)
outputURL = exportSession!.outputURL!
} else {
print("error\n")
outputURL = exportSession!.outputURL!//this needs to be fixed and may cause errors
}
})
return outputURL
}
I have looked at your code. Actually, you are compressing video in medium quality which will be around the same as the original video which you have. So, you have to change presetName in export session initialization as follow:
exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetLowQuality)
You can pass AVAssetExportPresetMediumQuality, So it's could be compressed as you expect.
There is the following list of format available to compress video.
1. Available from iOS 11.0
AVAssetExportPresetHEVCHighestQuality
AVAssetExportPresetHEVC1920x1080
AVAssetExportPresetHEVC3840x2160
2. Available from iOS 4.0
AVAssetExportPresetLowQuality
AVAssetExportPresetMediumQuality
AVAssetExportPresetHighestQuality
AVAssetExportPreset640x480
AVAssetExportPreset960x540
AVAssetExportPreset1280x720
AVAssetExportPreset1920x1080
AVAssetExportPreset3840x2160
AVAssetExportPresetAppleM4A
You can use above all format to compress your video based on your requirements.
I hope this will help you.
If you want more customizable compression filters using AVAssetWriter, consider this library that I wrote. You can compress the video with general quality settings or with more detail filters like bitrate, fps, scale, and more.
FYVideoCompressor().compressVideo(yourVideoPath, quality: .lowQuality) { result in
switch result {
case .success(let compressedVideoURL):
case .failure(let error):
}
}
or with more custom configuration:
let config = FYVideoCompressor.CompressionConfig(videoBitrate: 1000_000,
videomaxKeyFrameInterval: 10,
fps: 24,
audioSampleRate: 44100,
audioBitrate: 128_000,
fileType: .mp4,
scale: CGSize(width: 640, height: 480))
FYVideoCompressor().compressVideo(yourVideoPath, config: config) { result in
switch result {
case .success(let compressedVideoURL):
case .failure(let error):
}
}
More: Batch compression is now supported.
I want to record audio file and save it by applying some effects.
Record is okay and also playing this audio with effect is okay too.
The problem is when I try to save such audio offline it produces empty audio file.
Here is my code:
let effect = AVAudioUnitTimePitch()
effect.pitch = -300
self.addSomeEffect(effect)
func addSomeEffect(_ effect: AVAudioUnit) {
try? AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayAndRecord, with: .defaultToSpeaker)
let format = self.audioFile.processingFormat
self.audioEngine.stop()
self.audioEngine.reset()
self.audioEngine = AVAudioEngine()
let audioPlayerNode = AVAudioPlayerNode()
self.audioEngine.attach(audioPlayerNode)
self.audioEngine.attach(effect)
self.audioEngine.connect(audioPlayerNode, to: self.audioEngine.mainMixerNode, format: format)
self.audioEngine.connect(effect, to: self.audioEngine.mainMixerNode, format: format)
audioPlayerNode.scheduleFile(self.audioFile, at: nil)
do {
let maxNumberOfFrames: AVAudioFrameCount = 8096
try self.audioEngine.enableManualRenderingMode(.offline,
format: format,
maximumFrameCount: maxNumberOfFrames)
} catch {
fatalError()
}
do {
try audioEngine.start()
audioPlayerNode.play()
} catch {
}
let outputFile: AVAudioFile
do {
let url = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first!.appendingPathComponent("temp.m4a")
if FileManager.default.fileExists(atPath: url.path) {
try? FileManager.default.removeItem(at: url)
}
let recordSettings = self.audioFile.fileFormat.settings
outputFile = try AVAudioFile(forWriting: url, settings: recordSettings)
} catch {
fatalError()
}
let buffer = AVAudioPCMBuffer(pcmFormat: self.audioEngine.manualRenderingFormat,
frameCapacity: self.audioEngine.manualRenderingMaximumFrameCount)!
while self.audioEngine.manualRenderingSampleTime < self.audioFile.length {
do {
let framesToRender = min(buffer.frameCapacity,
AVAudioFrameCount(self.audioFile.length - self.audioEngine.manualRenderingSampleTime))
let status = try self.audioEngine.renderOffline(framesToRender, to: buffer)
switch status {
case .success:
print("Write to file")
try outputFile.write(from: buffer)
case .error:
fatalError()
default:
break
}
} catch {
fatalError()
}
}
print("Finish write")
audioPlayerNode.stop()
audioEngine.stop()
self.outputFile = outputFile
self.audioPlayer = try? AVAudioPlayer(contentsOf: outputFile.url)
}
AVAudioPlayer fails to open file with output url. I looked at the file through the file system and it is empty and can't be played.
Picking different categories for AVAudioSession is not working too.
Thanks for help!
UPDATE
I switched to use .caf file extension in my record in output file and it worked. Any idea why is .m4a is not working?
You need to nil outputFile to flush the header and close the m4a file.
I'm trying to concatenate two audio files with the following code:
func concatenateFiles(audioFiles: [NSURL], completion: (concatenatedFile: NSURL?) -> ()) {
// Result file
var nextClipStartTime = kCMTimeZero
let composition = AVMutableComposition()
let track = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
// Add each track
for audio in audioFiles {
let asset = AVURLAsset(URL: NSURL(fileURLWithPath: audio.path!), options: nil)
if let assetTrack = asset.tracksWithMediaType(AVMediaTypeAudio).first {
let timeRange = CMTimeRange(start: kCMTimeZero, duration: asset.duration)
do {
try track.insertTimeRange(timeRange, ofTrack: assetTrack, atTime: nextClipStartTime)
nextClipStartTime = CMTimeAdd(nextClipStartTime, timeRange.duration)
} catch {
print("Error concatenating file - \(error)")
completion(concatenatedFile: nil)
return
}
}
}
// Export the new file
if let exportSession = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetPassthrough) {
let paths = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)
let documents = NSURL(string: paths.first!)
if let fileURL = documents?.URLByAppendingPathComponent("\(stringFromDate(NSDate())).m4a") {
// Remove existing file
do {
try NSFileManager.defaultManager().removeItemAtPath(fileURL.path!)
print("Removed \(fileURL)")
} catch {
print("Could not remove file - \(error)")
}
// Configure export session output
exportSession.outputURL = fileURL
exportSession.outputFileType = AVFileTypeAppleM4A
// Perform the export
exportSession.exportAsynchronouslyWithCompletionHandler() { handler -> Void in
if exportSession.status == .Completed {
print("Export complete")
dispatch_async(dispatch_get_main_queue(), {
completion(concatenatedFile: fileURL)
})
return
} else if exportSession.status == .Failed {
print("Export failed - \(exportSession.error)")
}
completion(concatenatedFile: nil)
return
}
}
}
}
but i've receive this error exporting the file:
Export failed - Optional(Error Domain=AVFoundationErrorDomain Code=-11838 "Operation Stopped" UserInfo={NSLocalizedDescription=Operation Stopped, NSLocalizedFailureReason=The operation is not supported for this media.})
i've try to change the format but doesnt work, i have no more idea,
there's some one can help me?
I really don't know why, but the problem was solved when i change the attribution of the outputURL in the exportSession,
before:
exportSession.outputURL = fileURL
now:
exportSession.outputURL = NSURL.fileURLWithPath(fileURL.path!)