I have set up an audio multitrack player using apple's AVFoundation. I use nine AVAudioPlayerNodes attached to an AVAudioEngine and they are played at precisely the same time. In spriteKit, in my game scene, I would like to detect the end of the file in any of the AVAudioPlayerNodes so that I can run subsequent code. How do I do that? Unfortunately AVAudioPlayerNodes don't have the same convenient functions as the simple AVAudioPlayer class. Here is the multiTrack function:
import SpriteKit
import AVFoundation
var onesie = AVAudioPlayer()
var singleTrack = AVAudioPlayerNode()
var trackOne = AVAudioPlayerNode()
var trackTwo = AVAudioPlayerNode()
var trackThree = AVAudioPlayerNode()
var trackFour = AVAudioPlayerNode()
var trackFive = AVAudioPlayerNode()
var trackSix = AVAudioPlayerNode()
var trackSeven = AVAudioPlayerNode()
var trackEight = AVAudioPlayerNode()
var trackNine = AVAudioPlayerNode()
//variables to hold NSURLs as AVAudioFiles for use in AudioPlayer Nodes.
var single = AVAudioFile()
var one = AVAudioFile()
var two = AVAudioFile()
var three = AVAudioFile()
var four = AVAudioFile()
var five = AVAudioFile()
var six = AVAudioFile()
var seven = AVAudioFile()
var eight = AVAudioFile()
var nine = AVAudioFile()
//varibles for audio engine and player nodes. The "mixer" is part of the engine and already hooked up to the output
var engine = AVAudioEngine()
//reference the mixer
let mainMixer = engine.mainMixerNode
func audioMultiTrack(trackOneFN: String, trackTwoFN: String, trackThreeFN: String, trackFourFN: String, trackFiveFN: String, trackSixFN: String, trackSevenFN: String, trackEightFN: String, trackNineFN: String){
/*access audio filess for audio players (tracks)*/
//1
guard let trackOneFile = NSBundle.mainBundle().URLForResource(trackOneFN, withExtension: "mp3") else {
fatalError("File not found.")
}
//2
guard let trackTwoFile = NSBundle.mainBundle().URLForResource(trackTwoFN, withExtension: "mp3") else {
fatalError("File not found.")
}
//3
guard let trackThreeFile = NSBundle.mainBundle().URLForResource(trackThreeFN, withExtension: "mp3") else {
fatalError("File not found.")
}
//4
guard let trackFourFile = NSBundle.mainBundle().URLForResource(trackFourFN, withExtension: "mp3") else {
fatalError("File not found.")
}
//5
guard let trackFiveFile = NSBundle.mainBundle().URLForResource(trackFiveFN, withExtension: "mp3") else {
fatalError("File not found.")
}
//6
guard let trackSixFile = NSBundle.mainBundle().URLForResource(trackSixFN, withExtension: "mp3") else {
fatalError("File not found.")
}
//7
guard let trackSevenFile = NSBundle.mainBundle().URLForResource(trackSevenFN, withExtension: "mp3") else {
fatalError("File not found.")
}
//8
guard let trackEightFile = NSBundle.mainBundle().URLForResource(trackEightFN, withExtension: "mp3") else {
fatalError("File not found.")
}
//9
guard let trackNineFile = NSBundle.mainBundle().URLForResource(trackNineFN, withExtension: "mp3") else {
fatalError("File not found.")
}
//place NSURLs in AVAudioFile variables
//1
do {
try one = AVAudioFile(forReading: trackOneFile)
} catch {
fatalError("error loading track one file.")
}
//2
do {
try two = AVAudioFile(forReading: trackTwoFile)
} catch {
fatalError("error loading track two file.")
}
//3
do {
try three = AVAudioFile(forReading: trackThreeFile)
} catch {
fatalError("error loading track three file.")
}
//4
do {
try four = AVAudioFile(forReading: trackFourFile)
} catch {
fatalError("error loading track four file.")
}
//5
do {
try five = AVAudioFile(forReading: trackFiveFile)
} catch {
fatalError("error loading track five file.")
}
//6
do {
try six = AVAudioFile(forReading: trackSixFile)
} catch {
fatalError("error loading track six file.")
}
//7
do {
try seven = AVAudioFile(forReading: trackSevenFile)
} catch {
fatalError("error loading track six file.")
}
//8
do {
try eight = AVAudioFile(forReading: trackEightFile)
} catch {
fatalError("error loading track six file.")
}
//9
do {
try nine = AVAudioFile(forReading: trackNineFile)
} catch {
fatalError("error loading track six file.")
}
/*hook up audio units*/
//attach audio players (tracks) to audio engine
engine.attachNode(trackOne)
engine.attachNode(trackTwo)
engine.attachNode(trackThree)
engine.attachNode(trackFour)
engine.attachNode(trackFive)
engine.attachNode(trackSix)
engine.attachNode(trackSeven)
engine.attachNode(trackEight)
engine.attachNode(trackNine)
//connect the tracks to the mixer
engine.connect(trackOne, to: mainMixer, format: nil)
engine.connect(trackTwo, to: mainMixer, format: nil)
engine.connect(trackThree, to: mainMixer, format: nil)
engine.connect(trackFour, to: mainMixer, format: nil)
engine.connect(trackFive, to: mainMixer, format: nil)
engine.connect(trackSix, to: mainMixer, format: nil)
engine.connect(trackSeven, to: mainMixer, format: nil)
engine.connect(trackEight, to: mainMixer, format: nil)
engine.connect(trackNine, to: mainMixer, format: nil)
//connect audio files to audio players (tracks)
trackOne.scheduleFile(one, atTime: nil, completionHandler: nil)
trackTwo.scheduleFile(two, atTime: nil, completionHandler: nil)
trackThree.scheduleFile(three, atTime: nil, completionHandler: nil)
trackFour.scheduleFile(four, atTime: nil, completionHandler: nil)
trackFive.scheduleFile(five, atTime: nil, completionHandler: nil)
trackSix.scheduleFile(six, atTime: nil, completionHandler: nil)
trackSeven.scheduleFile(seven, atTime: nil, completionHandler: nil)
trackEight.scheduleFile(eight, atTime: nil, completionHandler: nil)
trackNine.scheduleFile(nine, atTime: nil, completionHandler: nil)
//try to start the audio engine
do {
try engine.start()
} catch {
print("error starting engine")
}
//function to create a precice time to start all audio players (tracks)
func startTime () ->AVAudioTime{
let samplerate = one.processingFormat.sampleRate
let sampleTime = AVAudioFramePosition(samplerate)
let time = AVAudioTime(sampleTime: sampleTime, atRate: samplerate)
return time
}
//start audio players (tracks) at precise time
trackOne.playAtTime(startTime())
trackTwo.playAtTime(startTime())
trackThree.playAtTime(startTime())
trackFour.playAtTime(startTime())
trackFive.playAtTime(startTime())
trackSix.playAtTime(startTime())
trackSeven.playAtTime(startTime())
trackEight.playAtTime(startTime())
trackNine.playAtTime(startTime())
}
Related
I have this button to play and loop a wav. But what if I have a second button with another loop? I want this second loop to start playing once pressed, however, the first loop must finish his 'round'. And vice versa (play and loop the second loop, press button first loop and it takes over)
#IBAction func playButtonTapped(_ sender: Any) {
guard let filePath: String = Bundle.main.path(forResource: "25loop110", ofType: "wav") else{ return }
print("\(filePath)")
let fileURL: URL = URL(fileURLWithPath: filePath)
guard
let audioFile = try? AVAudioFile(forReading: fileURL) else{ return }
let audioFormat = audioFile.processingFormat
let audioFrameCount = UInt32(audioFile.length)
guard let audioFileBuffer = AVAudioPCMBuffer(pcmFormat: audioFormat, frameCapacity: audioFrameCount) else{ return }
do{
try audioFile.read(into: audioFileBuffer)
timeShift.rate = adjustedBpm/bpm
playerNode.scheduleFile(audioFile, at: nil, completionHandler: nil)
} catch{
print("over")
}
try? audioEngine.start()
playerNode.play()
playerNode.scheduleBuffer(audioFileBuffer, at: nil, options:.loops,completionHandler: nil)
}
You can handle this behavior by using the completionHandler parameter of .scheduleBuffer.
For example, you could do something like this:
var nextAudioFilePath: String
var isPlaying: Bool = false
#IBAction func playLoopA() {
guard let path = Bundle.main.path(forResource: "audioFileA", ofType: "wav") else { return }
nextAudioFilePath = path
guard !isPlaying else { return }
play()
}
#IBAction func playLoopB() {
guard let path = Bundle.main.path(forResource: "audioFileB", ofType: "wav") else { return }
nextAudioFilePath = path
guard !isPlaying else { return }
play()
}
private func play() {
let fileURL = URL(fileURLWithPath: nextAudioFilePath)
...
playerNode.scheduleBuffer(audioFileBuffer, at: nil, options: [], completionHandler: { [weak self] in
self?.play()
})
}
I also found this solution:
playerNode.scheduleBuffer(audioFileBuffer, at: nil, options:[.interruptsAtLoop, .loops],completionHandler: nil)
I have an array of [AVAsset](). Whenever I record different videos at different durations the below code merges all the durations into 1 video but it will only play the last video in a loop.
For eg. video1 is 1 minute and shows a dog walking, video2 is 1 minute and shows a bird flying, video3 is 1 minute and shows a horse running. The video will merge and play for 3 minutes but it will only show the horse running for 1 minute each three consecutive times.
Where am I going wrong at?
var movieFileOutput = AVCaptureMovieFileOutput()
var arrayVideos = [AVAsset]()
var videoFileUrl: URL?
// button to record video
#objc func recordButtonTapped() {
// Stop recording
if movieFileOutput.isRecording {
movieFileOutput.stopRecording()
print("Stop Recording")
} else {
// Start recording
movieFileOutput.connection(with: AVMediaType.video)?.videoOrientation = videoOrientation()
movieFileOutput.maxRecordedDuration = maxRecordDuration()
videoFileUrl = URL(fileURLWithPath: videoFileLocation())
if let videoFileUrlFromCamera = videoFileUrl {
movieFileOutput.startRecording(to: videoFileUrlFromCamera, recordingDelegate: self)
}
}
}
func videoFileLocation() -> String {
return NSTemporaryDirectory().appending("videoFile.mov")
}
// button to save the merged video
#objc func saveButtonTapped() {
mergeVids()
}
// function to merge and save videos
func mergeVids() {
let mixComposition = AVMutableComposition()
let compositionVideoTrack = mixComposition.addMutableTrack(withMediaType: .video,
preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
compositionVideoTrack?.preferredTransform = CGAffineTransform(rotationAngle: .pi / 2)
let soundtrackTrack = mixComposition.addMutableTrack(withMediaType: .audio,
preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
var insertTime = CMTime.zero
for videoAsset in arrayVideos {
do {
try compositionVideoTrack?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero,
duration: videoAsset.duration),
of: videoAsset.tracks(withMediaType: .video)[0],
at: insertTime)
try soundtrackTrack?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero,
duration: videoAsset.duration),
of: videoAsset.tracks(withMediaType: .audio)[0],
at: insertTime)
insertTime = CMTimeAdd(insertTime, videoAsset.duration)
} catch let error as NSError {
print("\(error.localizedDescription)")
}
}
let outputFileURL = URL(fileURLWithPath: NSTemporaryDirectory() + "merge.mp4")
let path = outputFileURL.path
if FileManager.default.fileExists(atPath: path) {
try! FileManager.default.removeItem(atPath: path)
}
let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)
exporter!.outputURL = outputFileURL
exporter!.outputFileType = AVFileType.mp4
exporter!.shouldOptimizeForNetworkUse = true
exporter!.exportAsynchronously { [weak self] in
let cameraVideoURL = exporter!.outputURL!
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: cameraVideoURL)
}) { (saved, error) in
if let error = error { return }
if !saved { return }
// url is saved
self?.videoFileUrl = nil
self?.arrayVideos.removeAll()
}
}
}
// AVCaptureFileOutputRecording Delegates
func fileOutput(_ output: AVCaptureFileOutput, didStartRecordingTo fileURL: URL, from connections: [AVCaptureConnection]) {
print("+++++++++++++++Started")
print("*****Started recording: \(fileURL)\n")
}
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
if error == nil {
let asset = AVAsset(url: outputFileURL)
arrayVideos.append(asset)
print(arrayVideos.count)
} else {
print("Error recording movie: \(error!.localizedDescription)")
}
func cleanUp() {
let path = outputFileURL.path
if FileManager.default.fileExists(atPath: path) {
do {
try FileManager.default.removeItem(atPath: path)
} catch {
print("Could not remove file at url: \(outputFileURL)")
}
}
}
}
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
print("++++++Frame Drop: \(connection.description)")
}
Thanks to #alxlives for testing out the merge function and pointing out that since it was fine on his machine the problem must've been somewhere else.
The problem was here:
func videoFileLocation() -> String {
return NSTemporaryDirectory().appending("videoFile.mov")
}
In the recordButtonTapped when it used the above code it kept using the same "videoFile.mov" extension:
videoFileUrl = URL(fileURLWithPath: videoFileLocation()) // <<< it gets called here every time a new video runs
if let videoFileUrlFromCamera = videoFileUrl {
movieFileOutput.startRecording(to: videoFileUrlFromCamera, recordingDelegate: self)
}
To fix it I needed to make each extension unique:
func videoFileLocation() -> String {
let uuid = UUID().uuidString
return NSTemporaryDirectory().appending("videoFile_\(uuid).mov")
}
I want to change rate and pitch of audio file. when i'm changing rate of audio pitch automatically changing and getting not clear sound of audio.
i'm using this code
var engine: AVAudioEngine!
var player: AVAudioPlayerNode!
var pitch : AVAudioUnitTimePitch!
var file = AVAudioFile()
var totalDuration : TimeInterval!
func configurePlayer() {
engine = AVAudioEngine()
player = AVAudioPlayerNode()
player.volume = 1.0
let path = Bundle.main.path(forResource: "sample", ofType: "wav")
let url = URL.init(fileURLWithPath: path!)
file = try! AVAudioFile(forReading: url)
let buffer = AVAudioPCMBuffer(pcmFormat: file.processingFormat, frameCapacity: AVAudioFrameCount(file.length))
do {
try file.read(into: buffer!)
} catch _ {
}
pitch = AVAudioUnitTimePitch()
pitch.rate = 1
engine.attach(player)
engine.attach(pitch)
engine.attach(speedEffect)
engine.connect(player, to: pitch, format: buffer?.format)
engine.connect(pitch, to: engine.mainMixerNode, format: buffer?.format)
player.scheduleBuffer(buffer!, at: nil, options: AVAudioPlayerNodeBufferOptions.loops, completionHandler: nil)
engine.prepare()
do {
try engine.start()
} catch _ {
}
}
#IBAction func slideValueChanged(_ sender: UISlider) {
let newRate = sender.value/120;
pitch.rate = newRate
}
when changing rate using slider getting bad sound.
slider minValue: 60 maxValue: 240
Once I have connected the Bluetooth LE peripheral (headphones) to my device.
How I could use it to play sound ?
EDIT: I want to force play sound on peripheral
Actually I'm using this code to play sound on device speaker:
var engine = AVAudioEngine()
var player = AVAudioPlayerNode()
var pitch = AVAudioUnitTimePitch()
override func viewDidLoad() {
super.viewDidLoad()
player.volume = 1.0
let path = NSBundle.mainBundle().pathForResource("Test", ofType: "m4a")!
let url = NSURL.fileURLWithPath(path)
let file = try? AVAudioFile(forReading: url)
let buffer = AVAudioPCMBuffer(PCMFormat: file!.processingFormat, frameCapacity: AVAudioFrameCount(file!.length))
do {
try file!.readIntoBuffer(buffer)
} catch _ {
}
engine.attachNode(player)
engine.attachNode(pitch)
engine.connect(player, to: pitch, format: buffer.format)
engine.connect(pitch, to: engine.mainMixerNode, format: buffer.format)
player.scheduleBuffer(buffer, atTime: nil, options: AVAudioPlayerNodeBufferOptions.Loops, completionHandler: nil)
engine.prepare()
do {
try engine.start()
} catch _ {
}
}
I'm working on an iOS app with Swift and Xcode 6. What I would like to do is play an audio file using an AVAudioEngine, and until this point everything OK. But how can I play it without stopping, I mean, that when it ends playing it starts again?
This is my code:
/*==================== CONFIGURATES THE AVAUDIOENGINE ===========*/
audioEngine.reset() //Resets any previous configuration on AudioEngine
let audioPlayerNode = AVAudioPlayerNode() //The node that will play the actual sound
audioEngine.attachNode(audioPlayerNode) //Attachs the node to the audioengine
audioEngine.connect(audioPlayerNode, to: audioEngine.outputNode, format: nil) //Connects the applause playback node to the sound output
audioPlayerNode.scheduleFile(applause.applauseFile, atTime: nil, completionHandler: nil)
audioEngine.startAndReturnError(nil)
audioPlayerNode.play() //Plays the sound
Before saying me that I should use AVAudioPlayer for this, I can't because later I will have to use some effects and play three audio files at the same time, also repeatedly.
I found the solution in another question, asked and also auto-answered by #CarveDrone , so I've just copied the code he used:
class aboutViewController: UIViewController {
var audioEngine: AVAudioEngine = AVAudioEngine()
var audioFilePlayer: AVAudioPlayerNode = AVAudioPlayerNode()
override func viewDidLoad() {
super.viewDidLoad()
let filePath: String = NSBundle.mainBundle().pathForResource("chimes", ofType: "wav")!
println("\(filePath)")
let fileURL: NSURL = NSURL(fileURLWithPath: filePath)!
let audioFile = AVAudioFile(forReading: fileURL, error: nil)
let audioFormat = audioFile.processingFormat
let audioFrameCount = UInt32(audioFile.length)
let audioFileBuffer = AVAudioPCMBuffer(PCMFormat: audioFormat, frameCapacity: audioFrameCount)
audioFile.readIntoBuffer(audioFileBuffer, error: nil)
var mainMixer = audioEngine.mainMixerNode
audioEngine.attachNode(audioFilePlayer)
audioEngine.connect(audioFilePlayer, to:mainMixer, format: audioFileBuffer.format)
audioEngine.startAndReturnError(nil)
audioFilePlayer.play()
audioFilePlayer.scheduleBuffer(audioFileBuffer, atTime: nil, options:.Loops, completionHandler: nil)
}
The only thing you have to change is the filePath constant. Here is the link to the original answer: Having AVAudioEngine repeat a sound
Swift 5, thanks at #Guillermo Barreiro
var audioEngine: AVAudioEngine = AVAudioEngine()
var audioFilePlayer: AVAudioPlayerNode = AVAudioPlayerNode()
override func viewDidLoad() {
super. viewDidLoad()
guard let filePath: String = Bundle.main.path(forResource: "chimes", ofType: "wav") else{ return }
print("\(filePath)")
let fileURL: URL = URL(fileURLWithPath: filePath)
guard let audioFile = try? AVAudioFile(forReading: fileURL) else{ return }
let audioFormat = audioFile.processingFormat
let audioFrameCount = UInt32(audioFile.length)
guard let audioFileBuffer = AVAudioPCMBuffer(pcmFormat: audioFormat, frameCapacity: audioFrameCount) else{ return }
do{
try audioFile.read(into: audioFileBuffer)
} catch{
print("over")
}
let mainMixer = audioEngine.mainMixerNode
audioEngine.attach(audioFilePlayer)
audioEngine.connect(audioFilePlayer, to:mainMixer, format: audioFileBuffer.format)
try? audioEngine.start()
audioFilePlayer.play()
audioFilePlayer.scheduleBuffer(audioFileBuffer, at: nil, options:AVAudioPlayerNodeBufferOptions.loops)
}