I made MPNowPlayingInfoCenter but it isn't working. Background playing is working but MPNowPlayingCenter doesn't appear. This is how I made it.
import UIKit
import AVFoundation
import MediaPlayer
class AudioPlayViewController: UIViewController, AVAudioPlayerDelegate {
var player: AVAudioPlayer!
var updater: CADisplayLink! = nil
#IBOutlet weak var playButton: UIButton!
var url_2 = URL(string: "")
var selectedFileName:String = ""
override func viewDidLoad() {
super.viewDidLoad()
let path = selectedFileName
let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
let url = documentsURL.appendingPathComponent(path)
url_2 = url
do {
player = try AVAudioPlayer(contentsOf: url)
updater = CADisplayLink(target: self, selector: #selector(self.trackAudio))
updater.preferredFramesPerSecond = 1
updater.add(to: RunLoop.current, forMode: RunLoop.Mode.common)
player.prepareToPlay()
player.delegate = self
catch {
print(error)
}
let session = AVAudioSession.sharedInstance()
do{
try session.setCategory(AVAudioSession.Category.playback)
}
catch{
}
}
func play(){
player.play()
MPNowPlayingInfoCenter.default().nowPlayingInfo = [
MPMediaItemPropertyTitle: selectedFileName,
MPMediaItemPropertyArtist: "Ariana",
MPMediaItemPropertyLyrics: "test"
]
}
}
I set Background Modes > Audio, Airplay, and Pictures in picture in Targets > Signing&Capabilities.
I solved it by adding
func setupNowPlayingInfoCenter(){
UIApplication.shared.beginReceivingRemoteControlEvents()
MPRemoteCommandCenter.shared().playCommand.addTarget {event in
self.play()
return .success
}
MPRemoteCommandCenter.shared().pauseCommand.addTarget {event in
self.pause()
return .success
}
MPRemoteCommandCenter.shared().nextTrackCommand.addTarget {event in
self.goForward()
return .success
}
MPRemoteCommandCenter.shared().previousTrackCommand.addTarget {event in
self.goBackward()
return .success
}
}
I found that you need to call UIApplication.shared.beginReceivingRemoteControlEvents() after you've setup the audio session, so the following worked for me:
try? AVAudioSession.sharedInstance().setCategory(.playback)
try? AVAudioSession.sharedInstance().setActive(true)
UIApplication.shared.beginReceivingRemoteControlEvents()
Related
I am new to swift and I'm working in project that record user sound ,and convert sound file into text file every two minutes. I use timer to repeat the step every 2 minutes.
the problem is that the recorder is disable for the second call. Also, the text file does not clear the content to be prepared to the next call.
here is the full code.
import UIKit
import Speech
import AVFoundation
class ViewController: UIViewController {
var audioRecorder:AVAudioRecorder!
var inString = ""
let fileName = "Test"
var str=""
appropriateFor: nil, create: true)
let recordSettings = [AVSampleRateKey : NSNumber(value: Float(44100.0)),
AVFormatIDKey : NSNumber(value: Int32(kAudioFormatMPEG4AAC)),
AVNumberOfChannelsKey : NSNumber(value: Int32(1)),
AVEncoderAudioQualityKey : NSNumber(value: Int32(AVAudioQuality.high.rawValue))]
var timer = Timer()
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
var audioSession = AVAudioSession.sharedInstance()
do {
try audioSession.setCategory(AVAudioSessionCategoryPlayAndRecord)
try audioRecorder = AVAudioRecorder(url: directoryURL()!, settings: recordSettings)
audioRecorder.prepareToRecord()
} catch {
print("error")
}
audioSession = AVAudioSession.sharedInstance()
do {
try audioSession.setCategory(AVAudioSessionCategoryPlayAndRecord)
requestSpeechAuth()
} catch {}
timer = Timer.scheduledTimer(timeInterval: 120, target: self, selector: #selector (ViewController.stopAudio), userInfo: nil, repeats: true)
}
#objc func stopAudio() {
audioRecorder.stop()
let audioSession = AVAudioSession.sharedInstance()
do {
try audioSession.setActive(false)
let recognizer = SFSpeechRecognizer(locale: Locale(identifier: "ar_SA"))
let request = SFSpeechURLRecognitionRequest(url: audioRecorder.url)
recognizer?.recognitionTask(with: request) { (result, error) in
if let error = error {
print("There was an error: \(error)")
} else {
let dir = try? FileManager.default.url(for: .documentDirectory,
in: .userDomainMask, appropriateFor: nil, create: true)
if let fileURL = dir?.appendingPathComponent(self.fileName).appendingPathExtension("txt") {
do {
self.str=""
self.str = (result?.bestTranscription.formattedString)!
try self.str.write(to: fileURL, atomically: true, encoding: .utf8)
} catch {
print("Failed writing to URL: \(fileURL), Error: " + error.localizedDescription)
}
do {
self.inString = try String(contentsOf: fileURL)
} catch {
print("Failed reading from URL: \(fileURL), Error: " + error.localizedDescription)
}
self.getIqama(fileN: self.inString,status: self.str)
}
}//end elsd
} //end result
} catch {} //end do for false
// requestSpeechAuth()
}
func directoryURL() -> URL? {
let fileManager = FileManager.default
let urls = fileManager.urls(for: .documentDirectory, in: .userDomainMask)
let documentDirectory = urls[0] as URL
let soundURL = documentDirectory.appendingPathComponent("AqimAlsalat.m4a")
return soundURL
}
func getIqama(fileN : String, status:String)
{
var st: String!
st = "السلام عليكم ورحمة الله السلام عليكم ورحمة الله"
let st1 : String!
st1 = String (fileN)
print(st1)
if st1 == st {
// audioEngine.stop()
//speechRecognitionRequest?.endAudio()
print(st1)
print("JJalal")
}
else {
print("Dalal")
print(fileN)
}
}
func requestSpeechAuth(){
SFSpeechRecognizer.requestAuthorization { authStatus in
if authStatus == SFSpeechRecognizerAuthorizationStatus.authorized {
let audioSession = AVAudioSession.sharedInstance()
do {
try audioSession.setActive(true)
self.audioRecorder.record()
} catch {}
}
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
}
any suggestion or idea?
Thanks
It seems like you need to call self.audioRecorder.record() again after you stop the recording to convert the soundfile into text. The Apple docs say that calling record() will create or erase an audio file, so that should solve your problem.
However, you may encounter another problem where you miss a period of recording while you are transcribing the text. You could consider fixing that problem by switching back and forth between two recorders, or you could try to change the audio recorder's file location (or change the location of the previous file) before starting to record again.
How would I stream audio from a URL in Swift without downloading the mp3 file on the device? What do I need to import? Do I need certain libraries? Add anything to the info.plist? Please comment your code.
You can use iOS AVPLayer for Streaming audio from url.
var player: AVPlayer!
let url = URL.init(string: "https://www.soundhelix.com/examples/mp3/SoundHelix-Song-1.mp3")
let playerItem: AVPlayerItem = AVPlayerItem(url: url!)
player = AVPlayer(playerItem: playerItem)
let playerLayer = AVPlayerLayer(player: player!)
playerLayer?.frame = CGRect(x: 0, y: 0, width: 10, height: 50)
self.view.layer.addSublayer(playerLayer!)
player.play()
class MusicPlayer {
public static var instance = MusicPlayer()
var player = AVPlayer()
func initPlayer(url: String) {
guard let url = URL(string: url) else { return }
let playerItem = AVPlayerItem(url: url)
player = AVPlayer(playerItem: playerItem)
playAudioBackground()
}
func playAudioBackground() {
do {
try AVAudioSession.sharedInstance().setCategory(AVAudioSession.Category.playback, mode: AVAudioSession.Mode.default, options: [.mixWithOthers, .allowAirPlay])
print("Playback OK")
try AVAudioSession.sharedInstance().setActive(true)
print("Session is Active")
} catch {
print(error)
}
}
func pause(){
player.pause()
}
func play() {
player.play()
}
}
This class will play music in the background and play any audio/video URL.
For online streaming you have to use AVFoundation framework.
var player: AVPlayer!
let url = URL.init(string: "https://www.soundhelix.com/examples/mp3/SoundHelix-Song-1.mp3")
player = AVPlayer.init(url: url!)
To play:
player.play()
To pause:
player.pause()
I test it with your url and it work
var player: AVPlayer?
let url = "https://www.soundhelix.com/examples/mp3/SoundHelix-Song-1.mp3"
let playerItem = AVPlayerItem( url:NSURL( string:url )! as URL )
player = AVPlayer(playerItem:playerItem)
player!.rate = 1.0;
player!.play()
here you can go
import AVFoundation
var progressTimer:Timer?
{
willSet {
progressTimer?.invalidate()
}
}
var playerStream: AVPlayer?
var playerItem: AVPlayerItem?
func playerStream(urlStream : String) {
if let playerStream = playerStream {
if playerStream.isPlaying {
stopProgressTimer()
playerStream.pause()
} else {
startProgressTimer()
playerStream.play()
}
} else {
if let urlStr = urlStream.addingPercentEncoding(withAllowedCharacters: .urlQueryAllowed) {
if let TempURL = URL.init(string: urlStr) {
playerItem = AVPlayerItem(url: TempURL)
playerStream = AVPlayer(playerItem: playerItem)
NotificationCenter.default.addObserver(self, selector: #selector(playerItemDidPlayToEndTime), name: NSNotification.Name.AVPlayerItemDidPlayToEndTime, object: playerItem)
}
}
}
}
func playerItemDidPlayToEndTime() {
stopProgressTimer()
self.playProgressView.progress = 0.0
if let playerStream = self.playerStream {
playerStream.replaceCurrentItem(with: playerItem)
playerStream.seek(to: kCMTimeZero)
// playerStream.seek(to: .zero) swift 4.0
}
}
func stopProgressTimer() {
progressTimer?.invalidate()
progressTimer = nil
}
func startProgressTimer() {
if #available(iOS 10.0, *) {
progressTimer = Timer.scheduledTimer(withTimeInterval: 0.1, repeats: true){ [weak self] in
self?.updateProgressTimer()
}
} else {
progressTimer = Timer.scheduledTimer(timeInterval: 1.0, target: self, selector: #selector(self.updateProgressTimer), userInfo: nil, repeats: true)
}
}
#objc func updateProgressTimer() {
if let playerItem = playerItem {
if let pa = playerStream {
let floatTime = Float(CMTimeGetSeconds(pa.currentTime()))
let floatTimeDu = Float(CMTimeGetSeconds(playerItem.duration))
playProgressView.progress = Double(floatTime / floatTimeDu)
}
}
}
func playAudioBackground() {
do {
try AVAudioSession.sharedInstance().setCategory(AVAudioSession.Category.playback, mode: AVAudioSession.Mode.default, options: [.mixWithOthers, .allowAirPlay])
print("Playback OK")
try AVAudioSession.sharedInstance().setActive(true)
print("Session is Active")
} catch {
print(error)
}
}
should be
func playAudioBackground() {
do {
try AVAudioSession.sharedInstance().setCategory(AVAudioSession.Category.playback, mode: AVAudioSession.Mode.default, options: [.mixWithOthers])
print("Playback OK")
try AVAudioSession.sharedInstance().setActive(true)
print("Session is Active")
} catch {
print(error)
}
}
because .allowAirPlay is not allowed with AVAudioSession.Category.playback and will cause a real device to throw an exception. It works fine on a simulator but not device and as a result your audio session will not be configured properly.
I would have replied to it, but my reputation wasn't high enough to allow me to...
I'm new to Swift, but I want to change my view controller to play a remote mp3 file in my iOS app. I started with this code to play a song locally, and it works (with functions for the player after):
import AVFoundation
class Music1ViewController: UIViewController {
//5 -
var songPlayer = AVAudioPlayer()
//15 -
var hasBeenPaused = false
//6 -
func prepareSongAndSession() {
do {
//7 - Insert the song from our Bundle into our AVAudioPlayer
songPlayer = try AVAudioPlayer(contentsOf: URL.init(fileURLWithPath: Bundle.main.path(forResource: "localsong", ofType: "mp3")!))
//8 - Prepare the song to be played
songPlayer.prepareToPlay()
After looking at the AVAudioPlayer documentation, .prepareToPlay() preloads the buffer, which makes me think all I need to do is change the initializer to target a URL.
Then I change the initializer:
songPlayer = try AVAudioPlayer(contentsOf: URL(string: "https://s3.amazonaws.com/kargopolov/kukushka.mp3")!)
I don't get any errors in XCode, but when I run it, I see an error in the console for Thread 1: EXC_BAD_ACCESS (code=1, address=0x48) which makes me think I am approaching this wrong.
Is there a better way to access the remote mp3 file?
Try this code :
You need to add AVKit & AVFoundation to your frameworks path and import them :
import UIKit
import AVKit
import AVFoundation
class ViewController: UIViewController {
var player = AVPlayer()
override func viewDidLoad() {
super.viewDidLoad()
}
#IBAction func localPress(_ sender: Any) {
let path = Bundle.main.resourcePath!+"/sound.mp3"
print(path)
let url = URL(fileURLWithPath: path)
let playerItem = AVPlayerItem(url: url)
player = AVPlayer(playerItem: playerItem)
player.play()
}// i have created a btn for playing a local file, this is it's action
#IBAction func urlPressed(_ sender: Any) {
let playerItem = AVPlayerItem(url: URL(string: "https://yourURL.mp3")!)
player = AVPlayer(playerItem: playerItem)
player.play()
}// i have created another btn for playing a URL file, this is it's action
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
}
}
My approach
func preparePlayer() {
guard let url = URL(string: "https://yourURL.mp3") else {
print("Invalid URL")
return
}
do {
let session = AVAudioSession.sharedInstance()
try session.setCategory(AVAudioSessionCategoryPlayback)
let soundData = try Data(contentsOf: url)
audioPlayer = try AVAudioPlayer(data: soundData)
audioPlayer.volume = 1
let minuteString = String(format: "%02d", (Int(audioPlayer.duration) / 60))
let secondString = String(format: "%02d", (Int(audioPlayer.duration) % 60))
print("TOTAL TIMER: \(minuteString):\(secondString)")
} catch {
print(error)
}
}
I am new in swift also stake overflow. Advanced thank's for attention.
Basically am trying to build a custom camera that will record video with Audio. it means video will play with sound when i play this video. las few days i was try to build this custom camera. i already followed my tutorial but Still missing something from my camera. i was try as per my custom camera is only recording video. maybe it not recording audio. i don't understand. i was searching for this answer, not find appropriate answer for this.
here is What i did
import UIKit
import AVFoundation
import SVProgressHUD
import MediaPlayer
import MobileCoreServices
import AVKit
var videoUrl = [AnyObject]()
class TestViewController: UIViewController {
#IBOutlet var viewVidioPlayer: UIView!
#IBOutlet weak var myView: UIView!
var session: AVCaptureSession?
var userreponsevideoData = NSData()
var userreponsethumbimageData = NSData()
override func viewDidLoad() {
super.viewDidLoad()
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
}
// here i create session
func createSession() {
var input: AVCaptureDeviceInput?
let movieFileOutput = AVCaptureMovieFileOutput()
var prevLayer: AVCaptureVideoPreviewLayer?
prevLayer?.frame.size = myView.frame.size
session = AVCaptureSession()
let error: NSError? = nil
do {
input = try AVCaptureDeviceInput(device: self.cameraWithPosition(position: .front)!) } catch {return}
if error == nil {
session?.addInput(input)
} else {
print("camera input error: \(String(describing: error))")
}
prevLayer = AVCaptureVideoPreviewLayer(session: session)
prevLayer?.frame.size = myView.frame.size
prevLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill
prevLayer?.connection.videoOrientation = .portrait
myView.layer.addSublayer(prevLayer!)
let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
let filemainurl = NSURL(string: ("\(documentsURL.appendingPathComponent("temp"))" + ".mp4"))
let maxDuration: CMTime = CMTimeMake(600, 10)
movieFileOutput.maxRecordedDuration = maxDuration
movieFileOutput.minFreeDiskSpaceLimit = 1024 * 1024
if self.session!.canAddOutput(movieFileOutput) {
self.session!.addOutput(movieFileOutput)
}
session?.startRunning()
movieFileOutput.startRecording(toOutputFileURL: filemainurl! as URL, recordingDelegate: self)
}
func cameraWithPosition(position: AVCaptureDevicePosition) -> AVCaptureDevice? {
let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo)
for device in devices! {
if (device as AnyObject).position == position {
return device as? AVCaptureDevice
}
}
return nil
}
#IBAction func pressbackbutton(sender: AnyObject) {
session?.stopRunning()
}
#IBAction func Record(_ sender: Any) {
createSession()
}
#IBAction func play(_ sender: Any) {
self.videoPlay()
}
func videoPlay()
{
let documentsUrl = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first!
do {
// Get the directory contents urls (including subfolders urls)
let directoryContents = try FileManager.default.contentsOfDirectory(at: documentsUrl, includingPropertiesForKeys: nil, options: [])
print(directoryContents)
// if you want to filter the directory contents you can do like this:
videoUrl = directoryContents.filter{ $0.pathExtension == "mp4" } as [AnyObject]
print("mp3 urls:",videoUrl)
let playerController = AVPlayerViewController()
playerController.delegate = self as? AVPlayerViewControllerDelegate
let movieURL = videoUrl[0]
print(movieURL)
let player = AVPlayer(url: movieURL as! URL)
playerController.player = player
self.addChildViewController(playerController)
self.view.addSubview(playerController.view)
playerController.view.frame = self.view.frame
player.play()
player.volume = 1.0
player.rate = 1.0
} catch let error as NSError {
print(error.localizedDescription)
}
}
}
extension TestViewController: AVCaptureFileOutputRecordingDelegate
{
#available(iOS 4.0, *)
private func captureOutput(captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAtURL fileURL: URL!, fromConnections connections: [AnyObject]!) {
print(fileURL)
}
func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) {
let filemainurl = outputFileURL
do
{
let asset = AVURLAsset(url: filemainurl! as URL, options: nil)
//AVURLAsset(URL: filemainurl as! URL, options: nil)
print(asset)
let imgGenerator = AVAssetImageGenerator(asset: asset)
imgGenerator.appliesPreferredTrackTransform = true
let cgImage = try imgGenerator.copyCGImage(at: CMTimeMake(0, 1), actualTime: nil)
let uiImage = UIImage(cgImage: cgImage)
userreponsethumbimageData = try NSData(contentsOf: filemainurl! as URL)
print(userreponsethumbimageData.length)
print(uiImage)
// imageData = UIImageJPEGRepresentation(uiImage, 0.1)
}
catch let error as NSError
{
print(error)
return
}
SVProgressHUD.show(with: SVProgressHUDMaskType.clear)
let VideoFilePath = NSURL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent("mergeVideo\(arc4random()%1000)d")!.appendingPathExtension("mp4").absoluteString
if FileManager.default.fileExists(atPath: VideoFilePath)
{
do
{
try FileManager.default.removeItem(atPath: VideoFilePath)
}
catch { }
}
let tempfilemainurl = NSURL(string: VideoFilePath)!
let sourceAsset = AVURLAsset(url: filemainurl! as URL, options: nil)
let assetExport: AVAssetExportSession = AVAssetExportSession(asset: sourceAsset, presetName: AVAssetExportPresetMediumQuality)!
assetExport.outputFileType = AVFileTypeQuickTimeMovie
assetExport.outputURL = tempfilemainurl as URL
assetExport.exportAsynchronously { () -> Void in
switch assetExport.status
{
case AVAssetExportSessionStatus.completed:
DispatchQueue.main.async(execute: {
do
{
SVProgressHUD .dismiss()
self.userreponsevideoData = try NSData(contentsOf: tempfilemainurl as URL, options: NSData.ReadingOptions())
print("MB - \(self.userreponsevideoData.length) byte")
}
catch
{
SVProgressHUD .dismiss()
print(error)
}
})
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport.error)")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport.error)")
default:
print("complete")
SVProgressHUD .dismiss()
}
}
}
}
There all i have done. so I don't understand what is missing from this code. Why audio is not playing with video or why not recoding audio with video.
Use this cocopods for your project. It makes your job quiet easy.
It has all instructions on what to do and also contains a demo project to test it works as you intended it to.
SwiftyCam
I have this code in a very simple, single view Swift application in my ViewController:
var audioPlayer = AVAudioPlayer()
#IBAction func playMyFile(sender: AnyObject) {
let fileString = NSBundle.mainBundle().pathForResource("audioFile", ofType: "m4a")
let url = NSURL(fileURLWithPath: fileString)
var error : NSError?
audioPlayer = AVAudioPlayer(contentsOfURL: url, error: &error)
audioPlayer.delegate = self
audioPlayer.prepareToPlay()
if (audioPlayer.isEqual(nil)) {
println("There was an error: (er)")
} else {
audioPlayer.play()
NSLog("working")
}
I have added import AVFoundation and audioPlayer is a global variable. When I execute the code, it does print "working", so it makes it through without errors but no sound is played. The device is not in silent.
There's so much wrong with your code that Socratic method breaks down; it will probably be easiest just to throw it out and show you:
var player : AVAudioPlayer! = nil // will be Optional, must supply initializer
#IBAction func playMyFile(sender: AnyObject?) {
let path = NSBundle.mainBundle().pathForResource("audioFile", ofType:"m4a")
let fileURL = NSURL(fileURLWithPath: path)
player = AVAudioPlayer(contentsOfURL: fileURL, error: nil)
player.prepareToPlay()
player.delegate = self
player.play()
}
I have not bothered to do any error checking, but the upside is you'll crash if there's a problem.
One final point, which may or may not be relevant: not every m4a file is playable. A highly compressed file, for example, can fail silently (pun intended).
Important that AvPlayer is class member and not in the given function, else it goes out of scope... :)
I had to declare a global player variable
var player: AVAudioPlayer!
and set it in viewDidLoad
override func viewDidLoad() {
super.viewDidLoad()
player = AVAudioPlayer()
}
Then I could play the audio file wherever like this:
func playAudioFile(){
do {
if audioFileUrl == nil{
return
}
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default)
try AVAudioSession.sharedInstance().setActive(true)
/* The following line is required for the player to work on iOS 11. Change the file type accordingly*/
player = try AVAudioPlayer(contentsOf: audioFileUrl, fileTypeHint: AVFileType.m4a.rawValue)
/* iOS 10 and earlier require the following line:
player = try AVAudioPlayer(contentsOf: url, fileTypeHint: AVFileTypeMPEGLayer3) */
guard let player = player else { return }
player.play()
print("PLAYING::::: \(audioFileUrl)")
}
catch let error {
print(error.localizedDescription)
}
}
}
Here is a working snippet from my swift project. Replace "audiofile" by your file name.
var audioPlayer = AVAudioPlayer()
let audioPath = NSURL(fileURLWithPath: NSBundle.mainBundle().pathForResource("audiofile", ofType: "mp3"))
audioPlayer = AVAudioPlayer(contentsOfURL: audioPath, error: nil)
audioPlayer.delegate = self
audioPlayer.prepareToPlay()
audioPlayer.play()
You can download fully functional Swift Audio Player application source code from here https://github.com/bpolat/Music-Player
for some reason (probably a bug) Xcode can't play certain music files in the .m4a and the .mp3 format I would recommend changing them all to .wav files to get it to play
//top of your class
var audioPlayer = AVAudioPlayer
//where you want to play your sound
let Sound = NSURL(fileURLWithPath: Bundle.main.path(forResource: "sound", ofType: "wav")!)
do {
audioPlayer = try AVAudioPlayer(contentsOf: Sound as URL)
audioPlayer.prepareToPlay()
} catch {
print("Problem in getting File")
}
audioPlayer.play()
var audioPlayer = AVAudioPlayer()
var alertSound = NSURL(fileURLWithPath: NSBundle.mainBundle().pathForResource("KiepRongBuon", ofType: "mp3")!)
AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayback, error: nil)
AVAudioSession.sharedInstance().setActive(true, error: nil)
var error:NSError?
audioPlayer = AVAudioPlayer(contentsOfURL: alertSound, error: &error)
audioPlayer.prepareToPlay()
audioPlayer.play()
I used the below code in my app and it works. Hope that is helpful.
var audioPlayer: AVAudioPlayer!
if var filePath = NSBundle.mainBundle().pathForResource("audioFile", ofType:"mp3"){
var filePathUrl = NSURL.fileURLWithPath(filePath)
audioPlayer = AVAudioPlayer(contentsOfURL: filePathUrl, error: nil)
audioPlayer.play()
}else {
println("Path for audio file not found")
}
In Swift Coding using Try catch, this issues will solve and play audio for me and my code below,
var playerVal = AVAudioPlayer()
#IBAction func btnPlayAction(sender: AnyObject) {
let fileURL: NSURL = NSURL(string: url)!
let soundData = NSData(contentsOfURL: fileURL)
do {
playerVal = try AVAudioPlayer(data: soundData!)
}
catch {
print("Something bad happened. Try catching specific errors to narrow things down",error)
}
playerVal.delegate = self
playerVal.prepareToPlay()
playerVal.play()
}
Based on #matt answer but little bit detailed 'cause original answer did not completely satisfied me.
import AVFoundation
class YourController: UIViewController {
private var player : AVAudioPlayer?
override func viewDidLoad() {
super.viewDidLoad()
prepareAudioPlayer()
}
#IBAction func playAudio() {
player?.play()
}
}
extension YourController: AVAudioPlayerDelegate {}
private extension YourController {
func prepareAudioPlayer() {
guard let path = Bundle.main.path(forResource: "you-audio", ofType:"mp3") else {
return
}
let fileURL = URL(fileURLWithPath: path)
do {
player = try AVAudioPlayer(contentsOf: fileURL)
} catch let ex {
print(ex.localizedDescription)
}
player?.prepareToPlay()
player?.delegate = self
}
}
swift 3.0:
import UIKit
import AVFoundation
class ViewController: UIViewController
{
var audioplayer = AVAudioPlayer()
#IBAction func Play(_ sender: Any)
{
audioplayer.play()
}
#IBAction func Pause(_ sender: Any)
{
if audioplayer.isPlaying
{
audioplayer.pause()
}
else
{
}
}
#IBAction func Restart(_ sender: Any)
{
if audioplayer.isPlaying
{
audioplayer.currentTime = 0
audioplayer.play()
}
else
{
audioplayer.play()
}
}
override func viewDidLoad()
{
super.viewDidLoad()
do
{
audioplayer = try AVAudioPlayer(contentsOf:URL.init(fileURLWithPath:Bundle.main.path(forResource:"bahubali", ofType: "mp3")!))
audioplayer.prepareToPlay()
var audioSession = AVAudioSession.sharedInstance()
do
{
try audioSession.setCategory(AVAudioSessionCategoryPlayback)
}
catch
{
}
}
catch
{
print (error)
}
}
}