Having AVAudioEngine repeat a sound - ios

I've been having trouble making the code below repeat the sound at audioURL over and over again. Right now it just plays it once when the view is opened, then stops.
import UIKit
import AVFoundation
class aboutViewController: UIViewController {
var audioUrl = NSURL(fileURLWithPath: NSBundle.mainBundle().pathForResource("chimes", ofType: "wav")!)
var audioEngine = AVAudioEngine()
var myPlayer = AVAudioPlayerNode()
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
audioEngine.attachNode(myPlayer)
var audioFile = AVAudioFile(forReading: audioUrl, error: nil)
var audioError: NSError?
audioEngine.connect(myPlayer, to: audioEngine.mainMixerNode, format: audioFile.processingFormat)
myPlayer.scheduleFile(audioFile, atTime: nil, completionHandler: nil)
audioEngine.startAndReturnError(&audioError)
myPlayer.play()
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
}
Thanks!

After hours and hour of searching, this did it:
class aboutViewController: UIViewController {
var audioEngine: AVAudioEngine = AVAudioEngine()
var audioFilePlayer: AVAudioPlayerNode = AVAudioPlayerNode()
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
let filePath: String = NSBundle.mainBundle().pathForResource("chimes", ofType: "wav")!
println("\(filePath)")
let fileURL: NSURL = NSURL(fileURLWithPath: filePath)!
let audioFile = AVAudioFile(forReading: fileURL, error: nil)
let audioFormat = audioFile.processingFormat
let audioFrameCount = UInt32(audioFile.length)
let audioFileBuffer = AVAudioPCMBuffer(PCMFormat: audioFormat, frameCapacity: audioFrameCount)
audioFile.readIntoBuffer(audioFileBuffer, error: nil)
var mainMixer = audioEngine.mainMixerNode
audioEngine.attachNode(audioFilePlayer)
audioEngine.connect(audioFilePlayer, to:mainMixer, format: audioFileBuffer.format)
audioEngine.startAndReturnError(nil)
audioFilePlayer.play()
audioFilePlayer.scheduleBuffer(audioFileBuffer, atTime: nil, options:.Loops, completionHandler: nil)
}
...
}

Related

Getting distorted sound if changing rate of audio using AVAudioPlayerNode and AVAudioEngine

I want to change rate and pitch of audio file. when i'm changing rate of audio pitch automatically changing and getting not clear sound of audio.
i'm using this code
var engine: AVAudioEngine!
var player: AVAudioPlayerNode!
var pitch : AVAudioUnitTimePitch!
var file = AVAudioFile()
var totalDuration : TimeInterval!
func configurePlayer() {
engine = AVAudioEngine()
player = AVAudioPlayerNode()
player.volume = 1.0
let path = Bundle.main.path(forResource: "sample", ofType: "wav")
let url = URL.init(fileURLWithPath: path!)
file = try! AVAudioFile(forReading: url)
let buffer = AVAudioPCMBuffer(pcmFormat: file.processingFormat, frameCapacity: AVAudioFrameCount(file.length))
do {
try file.read(into: buffer!)
} catch _ {
}
pitch = AVAudioUnitTimePitch()
pitch.rate = 1
engine.attach(player)
engine.attach(pitch)
engine.attach(speedEffect)
engine.connect(player, to: pitch, format: buffer?.format)
engine.connect(pitch, to: engine.mainMixerNode, format: buffer?.format)
player.scheduleBuffer(buffer!, at: nil, options: AVAudioPlayerNodeBufferOptions.loops, completionHandler: nil)
engine.prepare()
do {
try engine.start()
} catch _ {
}
}
#IBAction func slideValueChanged(_ sender: UISlider) {
let newRate = sender.value/120;
pitch.rate = newRate
}
when changing rate using slider getting bad sound.
slider minValue: 60 maxValue: 240

Custom camera , video is not playing with Audio in swift

I am new in swift also stake overflow. Advanced thank's for attention.
Basically am trying to build a custom camera that will record video with Audio. it means video will play with sound when i play this video. las few days i was try to build this custom camera. i already followed my tutorial but Still missing something from my camera. i was try as per my custom camera is only recording video. maybe it not recording audio. i don't understand. i was searching for this answer, not find appropriate answer for this.
here is What i did
import UIKit
import AVFoundation
import SVProgressHUD
import MediaPlayer
import MobileCoreServices
import AVKit
var videoUrl = [AnyObject]()
class TestViewController: UIViewController {
#IBOutlet var viewVidioPlayer: UIView!
#IBOutlet weak var myView: UIView!
var session: AVCaptureSession?
var userreponsevideoData = NSData()
var userreponsethumbimageData = NSData()
override func viewDidLoad() {
super.viewDidLoad()
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
}
// here i create session
func createSession() {
var input: AVCaptureDeviceInput?
let movieFileOutput = AVCaptureMovieFileOutput()
var prevLayer: AVCaptureVideoPreviewLayer?
prevLayer?.frame.size = myView.frame.size
session = AVCaptureSession()
let error: NSError? = nil
do {
input = try AVCaptureDeviceInput(device: self.cameraWithPosition(position: .front)!) } catch {return}
if error == nil {
session?.addInput(input)
} else {
print("camera input error: \(String(describing: error))")
}
prevLayer = AVCaptureVideoPreviewLayer(session: session)
prevLayer?.frame.size = myView.frame.size
prevLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill
prevLayer?.connection.videoOrientation = .portrait
myView.layer.addSublayer(prevLayer!)
let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
let filemainurl = NSURL(string: ("\(documentsURL.appendingPathComponent("temp"))" + ".mp4"))
let maxDuration: CMTime = CMTimeMake(600, 10)
movieFileOutput.maxRecordedDuration = maxDuration
movieFileOutput.minFreeDiskSpaceLimit = 1024 * 1024
if self.session!.canAddOutput(movieFileOutput) {
self.session!.addOutput(movieFileOutput)
}
session?.startRunning()
movieFileOutput.startRecording(toOutputFileURL: filemainurl! as URL, recordingDelegate: self)
}
func cameraWithPosition(position: AVCaptureDevicePosition) -> AVCaptureDevice? {
let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo)
for device in devices! {
if (device as AnyObject).position == position {
return device as? AVCaptureDevice
}
}
return nil
}
#IBAction func pressbackbutton(sender: AnyObject) {
session?.stopRunning()
}
#IBAction func Record(_ sender: Any) {
createSession()
}
#IBAction func play(_ sender: Any) {
self.videoPlay()
}
func videoPlay()
{
let documentsUrl = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first!
do {
// Get the directory contents urls (including subfolders urls)
let directoryContents = try FileManager.default.contentsOfDirectory(at: documentsUrl, includingPropertiesForKeys: nil, options: [])
print(directoryContents)
// if you want to filter the directory contents you can do like this:
videoUrl = directoryContents.filter{ $0.pathExtension == "mp4" } as [AnyObject]
print("mp3 urls:",videoUrl)
let playerController = AVPlayerViewController()
playerController.delegate = self as? AVPlayerViewControllerDelegate
let movieURL = videoUrl[0]
print(movieURL)
let player = AVPlayer(url: movieURL as! URL)
playerController.player = player
self.addChildViewController(playerController)
self.view.addSubview(playerController.view)
playerController.view.frame = self.view.frame
player.play()
player.volume = 1.0
player.rate = 1.0
} catch let error as NSError {
print(error.localizedDescription)
}
}
}
extension TestViewController: AVCaptureFileOutputRecordingDelegate
{
#available(iOS 4.0, *)
private func captureOutput(captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAtURL fileURL: URL!, fromConnections connections: [AnyObject]!) {
print(fileURL)
}
func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) {
let filemainurl = outputFileURL
do
{
let asset = AVURLAsset(url: filemainurl! as URL, options: nil)
//AVURLAsset(URL: filemainurl as! URL, options: nil)
print(asset)
let imgGenerator = AVAssetImageGenerator(asset: asset)
imgGenerator.appliesPreferredTrackTransform = true
let cgImage = try imgGenerator.copyCGImage(at: CMTimeMake(0, 1), actualTime: nil)
let uiImage = UIImage(cgImage: cgImage)
userreponsethumbimageData = try NSData(contentsOf: filemainurl! as URL)
print(userreponsethumbimageData.length)
print(uiImage)
// imageData = UIImageJPEGRepresentation(uiImage, 0.1)
}
catch let error as NSError
{
print(error)
return
}
SVProgressHUD.show(with: SVProgressHUDMaskType.clear)
let VideoFilePath = NSURL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent("mergeVideo\(arc4random()%1000)d")!.appendingPathExtension("mp4").absoluteString
if FileManager.default.fileExists(atPath: VideoFilePath)
{
do
{
try FileManager.default.removeItem(atPath: VideoFilePath)
}
catch { }
}
let tempfilemainurl = NSURL(string: VideoFilePath)!
let sourceAsset = AVURLAsset(url: filemainurl! as URL, options: nil)
let assetExport: AVAssetExportSession = AVAssetExportSession(asset: sourceAsset, presetName: AVAssetExportPresetMediumQuality)!
assetExport.outputFileType = AVFileTypeQuickTimeMovie
assetExport.outputURL = tempfilemainurl as URL
assetExport.exportAsynchronously { () -> Void in
switch assetExport.status
{
case AVAssetExportSessionStatus.completed:
DispatchQueue.main.async(execute: {
do
{
SVProgressHUD .dismiss()
self.userreponsevideoData = try NSData(contentsOf: tempfilemainurl as URL, options: NSData.ReadingOptions())
print("MB - \(self.userreponsevideoData.length) byte")
}
catch
{
SVProgressHUD .dismiss()
print(error)
}
})
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport.error)")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport.error)")
default:
print("complete")
SVProgressHUD .dismiss()
}
}
}
}
There all i have done. so I don't understand what is missing from this code. Why audio is not playing with video or why not recoding audio with video.
Use this cocopods for your project. It makes your job quiet easy.
It has all instructions on what to do and also contains a demo project to test it works as you intended it to.
SwiftyCam

Use peripheral as AVAudio output

Once I have connected the Bluetooth LE peripheral (headphones) to my device.
How I could use it to play sound ?
EDIT: I want to force play sound on peripheral
Actually I'm using this code to play sound on device speaker:
var engine = AVAudioEngine()
var player = AVAudioPlayerNode()
var pitch = AVAudioUnitTimePitch()
override func viewDidLoad() {
super.viewDidLoad()
player.volume = 1.0
let path = NSBundle.mainBundle().pathForResource("Test", ofType: "m4a")!
let url = NSURL.fileURLWithPath(path)
let file = try? AVAudioFile(forReading: url)
let buffer = AVAudioPCMBuffer(PCMFormat: file!.processingFormat, frameCapacity: AVAudioFrameCount(file!.length))
do {
try file!.readIntoBuffer(buffer)
} catch _ {
}
engine.attachNode(player)
engine.attachNode(pitch)
engine.connect(player, to: pitch, format: buffer.format)
engine.connect(pitch, to: engine.mainMixerNode, format: buffer.format)
player.scheduleBuffer(buffer, atTime: nil, options: AVAudioPlayerNodeBufferOptions.Loops, completionHandler: nil)
engine.prepare()
do {
try engine.start()
} catch _ {
}
}

detect the end of a file in AVAudioPlayerNode

I have set up an audio multitrack player using apple's AVFoundation. I use nine AVAudioPlayerNodes attached to an AVAudioEngine and they are played at precisely the same time. In spriteKit, in my game scene, I would like to detect the end of the file in any of the AVAudioPlayerNodes so that I can run subsequent code. How do I do that? Unfortunately AVAudioPlayerNodes don't have the same convenient functions as the simple AVAudioPlayer class. Here is the multiTrack function:
import SpriteKit
import AVFoundation
var onesie = AVAudioPlayer()
var singleTrack = AVAudioPlayerNode()
var trackOne = AVAudioPlayerNode()
var trackTwo = AVAudioPlayerNode()
var trackThree = AVAudioPlayerNode()
var trackFour = AVAudioPlayerNode()
var trackFive = AVAudioPlayerNode()
var trackSix = AVAudioPlayerNode()
var trackSeven = AVAudioPlayerNode()
var trackEight = AVAudioPlayerNode()
var trackNine = AVAudioPlayerNode()
//variables to hold NSURLs as AVAudioFiles for use in AudioPlayer Nodes.
var single = AVAudioFile()
var one = AVAudioFile()
var two = AVAudioFile()
var three = AVAudioFile()
var four = AVAudioFile()
var five = AVAudioFile()
var six = AVAudioFile()
var seven = AVAudioFile()
var eight = AVAudioFile()
var nine = AVAudioFile()
//varibles for audio engine and player nodes. The "mixer" is part of the engine and already hooked up to the output
var engine = AVAudioEngine()
//reference the mixer
let mainMixer = engine.mainMixerNode
func audioMultiTrack(trackOneFN: String, trackTwoFN: String, trackThreeFN: String, trackFourFN: String, trackFiveFN: String, trackSixFN: String, trackSevenFN: String, trackEightFN: String, trackNineFN: String){
/*access audio filess for audio players (tracks)*/
//1
guard let trackOneFile = NSBundle.mainBundle().URLForResource(trackOneFN, withExtension: "mp3") else {
fatalError("File not found.")
}
//2
guard let trackTwoFile = NSBundle.mainBundle().URLForResource(trackTwoFN, withExtension: "mp3") else {
fatalError("File not found.")
}
//3
guard let trackThreeFile = NSBundle.mainBundle().URLForResource(trackThreeFN, withExtension: "mp3") else {
fatalError("File not found.")
}
//4
guard let trackFourFile = NSBundle.mainBundle().URLForResource(trackFourFN, withExtension: "mp3") else {
fatalError("File not found.")
}
//5
guard let trackFiveFile = NSBundle.mainBundle().URLForResource(trackFiveFN, withExtension: "mp3") else {
fatalError("File not found.")
}
//6
guard let trackSixFile = NSBundle.mainBundle().URLForResource(trackSixFN, withExtension: "mp3") else {
fatalError("File not found.")
}
//7
guard let trackSevenFile = NSBundle.mainBundle().URLForResource(trackSevenFN, withExtension: "mp3") else {
fatalError("File not found.")
}
//8
guard let trackEightFile = NSBundle.mainBundle().URLForResource(trackEightFN, withExtension: "mp3") else {
fatalError("File not found.")
}
//9
guard let trackNineFile = NSBundle.mainBundle().URLForResource(trackNineFN, withExtension: "mp3") else {
fatalError("File not found.")
}
//place NSURLs in AVAudioFile variables
//1
do {
try one = AVAudioFile(forReading: trackOneFile)
} catch {
fatalError("error loading track one file.")
}
//2
do {
try two = AVAudioFile(forReading: trackTwoFile)
} catch {
fatalError("error loading track two file.")
}
//3
do {
try three = AVAudioFile(forReading: trackThreeFile)
} catch {
fatalError("error loading track three file.")
}
//4
do {
try four = AVAudioFile(forReading: trackFourFile)
} catch {
fatalError("error loading track four file.")
}
//5
do {
try five = AVAudioFile(forReading: trackFiveFile)
} catch {
fatalError("error loading track five file.")
}
//6
do {
try six = AVAudioFile(forReading: trackSixFile)
} catch {
fatalError("error loading track six file.")
}
//7
do {
try seven = AVAudioFile(forReading: trackSevenFile)
} catch {
fatalError("error loading track six file.")
}
//8
do {
try eight = AVAudioFile(forReading: trackEightFile)
} catch {
fatalError("error loading track six file.")
}
//9
do {
try nine = AVAudioFile(forReading: trackNineFile)
} catch {
fatalError("error loading track six file.")
}
/*hook up audio units*/
//attach audio players (tracks) to audio engine
engine.attachNode(trackOne)
engine.attachNode(trackTwo)
engine.attachNode(trackThree)
engine.attachNode(trackFour)
engine.attachNode(trackFive)
engine.attachNode(trackSix)
engine.attachNode(trackSeven)
engine.attachNode(trackEight)
engine.attachNode(trackNine)
//connect the tracks to the mixer
engine.connect(trackOne, to: mainMixer, format: nil)
engine.connect(trackTwo, to: mainMixer, format: nil)
engine.connect(trackThree, to: mainMixer, format: nil)
engine.connect(trackFour, to: mainMixer, format: nil)
engine.connect(trackFive, to: mainMixer, format: nil)
engine.connect(trackSix, to: mainMixer, format: nil)
engine.connect(trackSeven, to: mainMixer, format: nil)
engine.connect(trackEight, to: mainMixer, format: nil)
engine.connect(trackNine, to: mainMixer, format: nil)
//connect audio files to audio players (tracks)
trackOne.scheduleFile(one, atTime: nil, completionHandler: nil)
trackTwo.scheduleFile(two, atTime: nil, completionHandler: nil)
trackThree.scheduleFile(three, atTime: nil, completionHandler: nil)
trackFour.scheduleFile(four, atTime: nil, completionHandler: nil)
trackFive.scheduleFile(five, atTime: nil, completionHandler: nil)
trackSix.scheduleFile(six, atTime: nil, completionHandler: nil)
trackSeven.scheduleFile(seven, atTime: nil, completionHandler: nil)
trackEight.scheduleFile(eight, atTime: nil, completionHandler: nil)
trackNine.scheduleFile(nine, atTime: nil, completionHandler: nil)
//try to start the audio engine
do {
try engine.start()
} catch {
print("error starting engine")
}
//function to create a precice time to start all audio players (tracks)
func startTime () ->AVAudioTime{
let samplerate = one.processingFormat.sampleRate
let sampleTime = AVAudioFramePosition(samplerate)
let time = AVAudioTime(sampleTime: sampleTime, atRate: samplerate)
return time
}
//start audio players (tracks) at precise time
trackOne.playAtTime(startTime())
trackTwo.playAtTime(startTime())
trackThree.playAtTime(startTime())
trackFour.playAtTime(startTime())
trackFive.playAtTime(startTime())
trackSix.playAtTime(startTime())
trackSeven.playAtTime(startTime())
trackEight.playAtTime(startTime())
trackNine.playAtTime(startTime())
}

Playing an audio file repeatedly with AVAudioEngine

I'm working on an iOS app with Swift and Xcode 6. What I would like to do is play an audio file using an AVAudioEngine, and until this point everything OK. But how can I play it without stopping, I mean, that when it ends playing it starts again?
This is my code:
/*==================== CONFIGURATES THE AVAUDIOENGINE ===========*/
audioEngine.reset() //Resets any previous configuration on AudioEngine
let audioPlayerNode = AVAudioPlayerNode() //The node that will play the actual sound
audioEngine.attachNode(audioPlayerNode) //Attachs the node to the audioengine
audioEngine.connect(audioPlayerNode, to: audioEngine.outputNode, format: nil) //Connects the applause playback node to the sound output
audioPlayerNode.scheduleFile(applause.applauseFile, atTime: nil, completionHandler: nil)
audioEngine.startAndReturnError(nil)
audioPlayerNode.play() //Plays the sound
Before saying me that I should use AVAudioPlayer for this, I can't because later I will have to use some effects and play three audio files at the same time, also repeatedly.
I found the solution in another question, asked and also auto-answered by #CarveDrone , so I've just copied the code he used:
class aboutViewController: UIViewController {
var audioEngine: AVAudioEngine = AVAudioEngine()
var audioFilePlayer: AVAudioPlayerNode = AVAudioPlayerNode()
override func viewDidLoad() {
super.viewDidLoad()
let filePath: String = NSBundle.mainBundle().pathForResource("chimes", ofType: "wav")!
println("\(filePath)")
let fileURL: NSURL = NSURL(fileURLWithPath: filePath)!
let audioFile = AVAudioFile(forReading: fileURL, error: nil)
let audioFormat = audioFile.processingFormat
let audioFrameCount = UInt32(audioFile.length)
let audioFileBuffer = AVAudioPCMBuffer(PCMFormat: audioFormat, frameCapacity: audioFrameCount)
audioFile.readIntoBuffer(audioFileBuffer, error: nil)
var mainMixer = audioEngine.mainMixerNode
audioEngine.attachNode(audioFilePlayer)
audioEngine.connect(audioFilePlayer, to:mainMixer, format: audioFileBuffer.format)
audioEngine.startAndReturnError(nil)
audioFilePlayer.play()
audioFilePlayer.scheduleBuffer(audioFileBuffer, atTime: nil, options:.Loops, completionHandler: nil)
}
The only thing you have to change is the filePath constant. Here is the link to the original answer: Having AVAudioEngine repeat a sound
Swift 5, thanks at #Guillermo Barreiro
var audioEngine: AVAudioEngine = AVAudioEngine()
var audioFilePlayer: AVAudioPlayerNode = AVAudioPlayerNode()
override func viewDidLoad() {
super. viewDidLoad()
guard let filePath: String = Bundle.main.path(forResource: "chimes", ofType: "wav") else{ return }
print("\(filePath)")
let fileURL: URL = URL(fileURLWithPath: filePath)
guard let audioFile = try? AVAudioFile(forReading: fileURL) else{ return }
let audioFormat = audioFile.processingFormat
let audioFrameCount = UInt32(audioFile.length)
guard let audioFileBuffer = AVAudioPCMBuffer(pcmFormat: audioFormat, frameCapacity: audioFrameCount) else{ return }
do{
try audioFile.read(into: audioFileBuffer)
} catch{
print("over")
}
let mainMixer = audioEngine.mainMixerNode
audioEngine.attach(audioFilePlayer)
audioEngine.connect(audioFilePlayer, to:mainMixer, format: audioFileBuffer.format)
try? audioEngine.start()
audioFilePlayer.play()
audioFilePlayer.scheduleBuffer(audioFileBuffer, at: nil, options:AVAudioPlayerNodeBufferOptions.loops)
}

Resources