I have TableViewController and AudioPlayerViewController. I have a problem with using MPRemoteCommandCenter. For example: In TableViewController I click on cell and go toAudioPlayerViewController next I lock device and control my music with MPRemoteCommandCenter - all works fine. But if I further unlock device return to TableViewController go again to AudioPlayerViewController lock device and press play/pause button my music will play two times at the same time. If I will repeat the action my music will play three times at the same time. And etc... How to fix it?
code:
import UIKit
import AVFoundation
import MediaPlayer
class ViewController: UIViewController, AVAudioPlayerDelegate, UIAlertViewDelegate {
var audioPlayer: AVAudioPlayer!
let musicOperation = OperationQueue()
var timer: Timer?
#IBOutlet weak var playButton: UIButton!
#IBOutlet var timeElapsed: UILabel!
#IBOutlet var timeDuration: UILabel!
#IBOutlet weak var logo: UIImageView!
#IBOutlet weak var slider: UISlider!
#IBOutlet weak var volumeView: UIView!
var index = 0
var buttonIndex = 0
var endOfChapterSleepTimer = false
override func viewDidLoad() {
super.viewDidLoad()
// Table View Index
buttonIndex = masterIndex
musicOperation.maxConcurrentOperationCount = 1
try? AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayback)
UserDefaults.standard.set(index, forKey: "index")
if index > 0 {
let fileManager = FileManager.default
let urls = fileManager.urls(for: .documentDirectory, in: .userDomainMask)
if let documentDirectoryURL: NSURL = urls.first as NSURL? {
let soundURL = documentDirectoryURL.appendingPathComponent("/\(masterIndex)/\(index).mp3")
UserDefaults.standard.set(index, forKey: "\(masterIndex)")
do {
audioPlayer = try AVAudioPlayer(contentsOf: soundURL!)
audioPlayer.delegate = self
audioPlayer.prepareToPlay()
play(sender:AnyObject.self as AnyObject)
restorePlayerCurrentTime()
setupMediaPlayerNotificationView()
lockScreen()
} catch {
}
}
} else {
let url = Bundle.main.url(forResource: "\(masterIndex)0", withExtension: "m4a")!
do {
audioPlayer = try AVAudioPlayer(contentsOf: url)
audioPlayer.delegate = self
audioPlayer.prepareToPlay()
play(sender:AnyObject.self as AnyObject)
setupMediaPlayerNotificationView()
lockScreen()
} catch {
}
}
}
// MARK: - Audio player controller
#IBAction func play(sender: AnyObject) {
if !audioPlayer.isPlaying{
audioPlayer.play()
slider.maximumValue = Float(audioPlayer.duration)
timer = Timer(timeInterval: 0.1, target: self, selector: #selector(self.updateTime), userInfo: nil, repeats: true)
RunLoop.main.add(timer!, forMode: .commonModes)
restorePlayerCurrentTime()
playButton.setImage(UIImage(named: "pause.png"), for: UIControlState.normal)
} else {
audioPlayer.pause()
playButton.setImage(UIImage(named: "play.png"), for: UIControlState.normal)
timer?.invalidate()
}
}
#IBAction func fastForward(sender: AnyObject) {
var time: TimeInterval = audioPlayer.currentTime
time += 15.0 // Go Forward by 15 Seconds
if time > audioPlayer.duration {
audioPlayerDidFinishPlaying(audioPlayer, successfully: true)
} else {
audioPlayer.currentTime = time
updateTime()
}
}
#IBAction func fastBackward(sender: AnyObject) {
var time: TimeInterval = audioPlayer.currentTime
time -= 15.0 // Go Back by 15 Seconds
if time < 0 {
audioPlayer.currentTime = 0
updateTime()
} else {
audioPlayer.currentTime = time
updateTime()
}
}
// MARK: - Audio player time
private func restorePlayerCurrentTime() {
let currentTimeFromUserDefaults : Double? = UserDefaults.standard.value(forKey: "currentTime\(masterIndex)\(index)") as! Double?
if let currentTimeFromUserDefaultsValue = currentTimeFromUserDefaults {
audioPlayer.currentTime = currentTimeFromUserDefaultsValue
slider.value = Float.init(audioPlayer.currentTime)
}
}
#objc func updateTime() {
let currentTime = Int(audioPlayer.currentTime)
let minutes = currentTime/60
let seconds = currentTime - minutes * 60
let durationTime = Int(audioPlayer.duration) - Int(audioPlayer.currentTime)
let minutes1 = durationTime/60
let seconds1 = durationTime - minutes1 * 60
timeElapsed.text = NSString(format: "%02d:%02d", minutes,seconds) as String
timeDuration.text = NSString(format: "-%02d:%02d", minutes1,seconds1) as String
UserDefaults.standard.set(currentTime, forKey: "currentTime\(masterIndex)\(index)")
UserDefaults.standard.set(durationTime, forKey: "durationTime\(masterIndex)\(index)")
slider.value = Float.init(audioPlayer.currentTime)
}
func audioPlayerDidFinishPlaying(_ audioPlayer: AVAudioPlayer, successfully flag: Bool) {
playButton.setImage(UIImage(named: "play.png"), for: UIControlState.normal)
let currentTime = 0
let durationTime = 0.1
UserDefaults.standard.set(currentTime, forKey: "currentTime\(masterIndex)\(index)")
UserDefaults.standard.set(durationTime, forKey: "durationTime\(masterIndex)\(index)")
slider.value = Float.init(audioPlayer.currentTime)
timer?.invalidate()
let path = NSSearchPathForDirectoriesInDomains(FileManager.SearchPathDirectory.documentDirectory, FileManager.SearchPathDomainMask.userDomainMask, true)
let documentDirectoryPath:String = path[0]
let fileManager = FileManager()
let destinationURLForFile = URL(fileURLWithPath: documentDirectoryPath.appendingFormat("/\(masterIndex)/\(index+1).mp3"))
if fileManager.fileExists(atPath: destinationURLForFile.path){
index = index + 1
viewDidLoad()
} else {
}
}
// MARK: - Audio player lock screen
func lockScreen() {
var albumArtwork : MPMediaItemArtwork!
let image:UIImage = UIImage(named: "infoImage")!
albumArtwork = MPMediaItemArtwork.init(boundsSize: image.size, requestHandler: { (size) -> UIImage in
return image
})
let infotitle = "title"
MPNowPlayingInfoCenter.default().nowPlayingInfo = [
MPMediaItemPropertyArtist : "",
MPMediaItemPropertyTitle : infotitle,
MPMediaItemPropertyArtwork : albumArtwork,
MPMediaItemPropertyAlbumTitle : "",
MPNowPlayingInfoPropertyElapsedPlaybackTime : Int(audioPlayer.currentTime),
MPMediaItemPropertyPlaybackDuration: Int(audioPlayer.duration)]
}
func setupMediaPlayerNotificationView() {
let commandCenter = MPRemoteCommandCenter.shared()
commandCenter.playCommand.addTarget { event in
self.audioPlayer.play()
self.lockScreen()
self.playButton.setImage(UIImage(named: "pause.png"), for: UIControlState.normal)
print("play")
return .success
}
commandCenter.pauseCommand.addTarget { event in
self.audioPlayer.pause()
self.lockScreen()
self.playButton.setImage(UIImage(named: "play.png"), for: UIControlState.normal)
print("pause")
return .success
}
commandCenter.skipBackwardCommand.preferredIntervals = [15]
commandCenter.skipBackwardCommand.addTarget { event in
self.fastBackward(sender: self)
self.lockScreen()
print("fastBackward")
return .success
}
commandCenter.skipForwardCommand.preferredIntervals = [15]
commandCenter.skipForwardCommand.addTarget { event in
self.fastForward(sender: self)
self.lockScreen()
print("fastForward")
return .success
}
commandCenter.changePlaybackPositionCommand.addTarget(self, action: #selector(self.changedThumbSlider(_:)))
#objc func changedThumbSlider(_ event: MPChangePlaybackPositionCommandEvent) -> MPRemoteCommandHandlerStatus {
let time = event.positionTime
audioPlayer.currentTime = TimeInterval(time)
self.lockScreen()
return .success
}
// MARK: - Audio player slider
#IBAction func slide(_ slider: UISlider) {
musicOperation.cancelAllOperations()
let operation = BlockOperation()
audioPlayer.currentTime = TimeInterval(slider.value)
self.lockScreen()
musicOperation.addOperation(operation)
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
audioPlayer.pause()
timer?.invalidate()
musicOperation.cancelAllOperations()
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
UIView.setAnimationsEnabled(true)
// Navigation Bar
self.navigationController?.navigationBar.prefersLargeTitles = true
self.navigationItem.largeTitleDisplayMode = .always
self.navigationController?.navigationBar.shadowImage = UIImage()
self.navigationController?.navigationBar.barTintColor = UIColor(red: 55/255, green: 60/255, blue: 65/255, alpha: 1.0)
self.navigationController?.navigationBar.isTranslucent = true
self.navigationController?.navigationBar.tintColor = .white
self.navigationItem.backBarButtonItem = UIBarButtonItem(title: "", style: .plain, target: nil, action: nil)
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
}
}
whats happening is that everytime you goto AudioPlayerViewController you enable MPRemoteCommandCenter, however when you go back to TableViewController you are not calling removeTarget. this is your issue.
calling something like the below in viewWillDisappear to removeTarget
func setupMediaPlayerNotificationView(_ enable: Bool) {
let commandCenter = MPRemoteCommandCenter.shared()
if enable {
commandCenter.playCommand.addTarget(self, action: #selector(self.lockScreenPlay(_:)))
commandCenter.pauseCommand.addTarget(self, action: #selector(self.lockScreenPlay(_:)))
commandCenter.nextTrackCommand.addTarget(self, action: #selector(self.lockScreenNextTrack(_:)))
commandCenter.previousTrackCommand.addTarget(self, action: #selector(self.lockScreenPreviousTrack(_:)))
} else {
commandCenter.playCommand.removeTarget(self, action: #selector(self.lockScreenPlay(_:)))
commandCenter.pauseCommand.removeTarget(self, action: #selector(self.lockScreenPlay(_:)))
commandCenter.nextTrackCommand.removeTarget(self, action: #selector(self.lockScreenNextTrack(_:)))
commandCenter.previousTrackCommand.removeTarget(self, action: #selector(self.lockScreenPreviousTrack(_:)))
}
}
example to follow up question:
#objc func lockScreenPlay(_ event: MPRemoteCommandEvent) -> MPRemoteCommandHandlerStatus {
self.playButton(AnyObject.self)
return .success
}
#objc func lockScreenNextTrack(_ event: MPRemoteCommandEvent) -> MPRemoteCommandHandlerStatus {
self.playerDidFinishPlaying()
return .success
}
Related
I have a tabbed app that starts recording on one tab, and plots the mic levels on another tab.
In the first VC, I'm gathering mic levels and storing them in an array in the model. I'm using another method in the model to update the data, and I'm calling it in the second VC in order to update the view.
What I want to do is update the chart in the second view controller from the first view controller (where the logic for storing data in the model is)
Model:
Chart.swift
import Charts
class Chart {
static let sharedInstance = Chart()
var lineChartView: LineChartView!
func setChartValues() {
let entries = (0..<GraphData.sharedInstance.array.count).map { (i) -> ChartDataEntry in
let val = GraphData.sharedInstance.array[i]
print(ChartDataEntry(x: Double(i), y: val))
return ChartDataEntry(x: Double(i), y: val)
}
let set1 = LineChartDataSet(values: entries, label: "DataSet 1")
let data = LineChartData(dataSet: set1)
lineChartView.data = data
}
}
GraphData.swift
class GraphData {
static let sharedInstance = GraphData()
var array = [Double]()
}
View Controllers:
First VC: (complete code per comment)
import UIKit
import AVFoundation
class SoundController: UIViewController, AVAudioRecorderDelegate {
var recordingSession: AVAudioSession!
var audioRecorder: AVAudioRecorder!
var timer = Timer()
#IBOutlet weak var errorLbl: UILabel!
#IBOutlet weak var recordBtn: UIButton!
#IBAction func recordButton(_ sender: UIButton) {
if audioRecorder == nil {
startRecording()
} else {
finishRecording(success: true)
}
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(false)
errorLbl.text = ""
}
override func viewDidLoad() {
super.viewDidLoad()
recordPermission()
}
func recordPermission() {
recordingSession = AVAudioSession.sharedInstance()
do {
try recordingSession.setCategory(.playAndRecord, mode: .default)
try recordingSession.setActive(true)
recordingSession.requestRecordPermission() { allowed in
DispatchQueue.main.async {
if allowed {
print("recording allowed")
} else {
self.errorLbl.text = "Recording Permission was Denied. Please open settings and allow Cry It Out to access the microphone."
}
}
}
} catch {
self.errorLbl.text = "Recording Permission was Denied. Please open settings and allow the app to access the microphone."
}
}
func getDocumentsDirectory() -> URL {
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
return paths[0]
}
func startRecording() {
if recordBtn.titleLabel?.text == "Tap to Re-record" {
//reset values array
GraphData.sharedInstance.array = []
}
let audioFilename = getDocumentsDirectory().appendingPathComponent("baby.m4a")
let settings = [
AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
AVSampleRateKey: 12000,
AVNumberOfChannelsKey: 1,
AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue
]
do {
audioRecorder = try AVAudioRecorder(url: audioFilename, settings: settings)
audioRecorder.delegate = self
audioRecorder.isMeteringEnabled = true
runTimer()
audioRecorder.record()
runTimer()
recordBtn.setTitle("Tap to Stop", for: .normal)
} catch {
finishRecording(success: false)
}
}
func levelTimerCallback() -> Float {
if audioRecorder != nil {
audioRecorder.updateMeters()
//If we are beyond a threshold value (-15)
if audioRecorder.averagePower(forChannel: 0) > -15 {
return audioRecorder.averagePower(forChannel: 0)
}
}
return 0
}
func finishRecording(success: Bool) {
//stop recording and reset recorder to nil for other checks
audioRecorder.stop()
audioRecorder = nil
if success {
recordBtn.setTitle("Tap to Re-record", for: .normal)
if timer.isValid {
timer.invalidate()
}
} else {
//Recording Failed
recordBtn.setTitle("Tap to Record", for: .normal)
//disable timer if running (might be running or might not)
if timer.isValid {
timer.invalidate()
}
}
}
func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully flag: Bool) {
if !flag {
finishRecording(success: false)
}
}
//MARK: Timers
#objc func updateTimer() {
if levelTimerCallback() != 0 {
let date = Date()
let calendar = Calendar.current
let month = calendar.component(.month, from: date)
let day = calendar.component(.day, from: date)
let hour = calendar.component(.hour, from: date)
let minutes = calendar.component(.minute, from: date)
let seconds = calendar.component(.second, from: date)
let prettyDate = "\(month)/\(day) \(hour):\(minutes) and \(seconds) seconds"
print(prettyDate)
GraphData.sharedInstance.array.append(Double(levelTimerCallback()))
//does this run the method? It should
GraphController.sharedInstance.lineChartView?.data = Chart.sharedInstance.setChartValues()
}
}
func runTimer() {
timer = Timer.scheduledTimer(timeInterval: 1, target: self, selector: (#selector(SoundController.updateTimer)), userInfo: nil, repeats: true)
}
func stopTimer() {
timer.invalidate()
}
}
Second VC:
import UIKit
import Charts
class GraphController: UIViewController {
static let sharedInstance = GraphController()
#IBOutlet weak var lineChartView: LineChartView!
override func viewDidLoad() {
super.viewDidLoad()
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(true)
self.lineChartView.data = Chart.sharedInstance.setChartValues()
}
}
Try this solution without lambda functions. You don't need use static values.
1. Prepare your GraphController to have a function to receive data
class GraphController: UIViewController {
...
func dataReceived ( gData : GraphData ) {
DispatchQueue.main.async {
// Update your chart with gData
}
}
}
2. Get the reference of GraphController and use the function of step 1 to make your updates.
Please, get the reference of your GraphController from tab and use this reference to call a function to make your chart updates. I don't know exactely your situation, but if you have problems to make it, please look this: https://stackoverflow.com/a/39499751/5140756
class SoundController: UIViewController, AVAudioRecorderDelegate {
var graphController : GraphController?
...
override func viewDidLoad() {
super.viewDidLoad()
...
// get graph controller reference from tabbar.
self.graphController = self.tabBarController.viewControllers![INDEX_OF_VIEW_CONTROLLER] as! GraphController
}
// finally on your function call the function's graph controller receive data
#objc func updateTimer() {
if levelTimerCallback() != 0 {
let date = Date()
let calendar = Calendar.current
let month = calendar.component(.month, from: date)
let day = calendar.component(.day, from: date)
let hour = calendar.component(.hour, from: date)
let minutes = calendar.component(.minute, from: date)
let seconds = calendar.component(.second, from: date)
let prettyDate = "\(month)/\(day) \(hour):\(minutes) and \(seconds) seconds"
print(prettyDate)
GraphData.sharedInstance.array.append(Double(levelTimerCallback()))
//does this run the method? It should
//GraphController.sharedInstance.lineChartView?.data = Chart.sharedInstance.setChartValues()
if graphController != nil {
self.graphController!.dataReceived( gData: GraphData.sharedInstance )
}
}
}
}
Please, look the code, and make some changes that you need, I tried automate the max that I can.
Hey I am working on a project(which is in swift) and it compares two audio signals and measure the correctness. AUDIOKIT pod is used to convert the audio from microphone(AKAmplitudeTracker) to float numbers. I am trying to implement the same method by applying the tracker on the AKAudioPlayer. What I am trying to do is performing sampling on the source signal and the reference signal and get it as amplitude data only, and then performing DTW(Dynamic time warping) algorithm.
Is there any means by which I can get the AKAudioPlayer music to be converted as amplitude data? Is it possible to add a tracker to the AKAudioPlayer currently playing music? Codes are given below. I need some expert advices, Thanks in advance and Happy coding.
//
// Conductor.swift
// AmplitudeTracker
//
// Created by Mark Jeschke on 10/3/17.
// Copyright © 2017 Mark Jeschke. All rights reserved.
//
import AudioKit
import AudioKitUI
// Treat the conductor like a manager for the audio engine.
class Conductor {
// Singleton of the Conductor class to avoid multiple instances of the audio engine
var url:URL?
var fileName:String?
var type:String?
static let sharedInstance = Conductor()
var isPlayingKit:Bool?
var micTracker: AKAmplitudeTracker!
var mp3Tracker: AKAmplitudeTracker!
var player:AKAudioPlayer!
var mic: AKMicrophone!
var delay: AKDelay!
var reverb: AKCostelloReverb!
// Balance between the delay and reverb mix.
var reverbAmountMixer = AKDryWetMixer()
func play(file: String, type: String) -> AKAudioPlayer? {
let url = Bundle.main.url(forResource: file, withExtension: type)
let file = try! AKAudioFile(forReading: url!)
player = try! AKAudioPlayer(file: file)
if self.isPlayingKit! {
player.play()
mp3Tracker = AKAmplitudeTracker(player)
delay = AKDelay(mp3Tracker)
delay.time = 0.0
delay.feedback = 0.0
delay.dryWetMix = 0.5
reverb = AKCostelloReverb(delay)
reverb.presetShortTailCostelloReverb()
reverbAmountMixer = AKDryWetMixer(delay, reverb, balance: 0.8)
AudioKit.output = reverbAmountMixer
}
else {
self.isPlayingKit = true
AudioKit.output = nil
player.stop()
}
return player
}
init() {
AKSettings.playbackWhileMuted = true
mic = AKMicrophone()
print("INIT CONDUCTOR")
micTracker = AKAmplitudeTracker(mic)
delay = AKDelay(micTracker)
delay.time = 0.5
delay.feedback = 0.1
delay.dryWetMix = 0.5
reverb = AKCostelloReverb(delay)
reverb.presetShortTailCostelloReverb()
reverbAmountMixer = AKDryWetMixer(delay, reverb, balance: 0.8)
AudioKit.output = reverbAmountMixer
isPlayingKit = true
startAudioEngine()
}
func startAudioEngine() {
AudioKit.start()
isPlayingKit = true
print("Audio engine started")
}
func stopAudioEngine() {
AudioKit.stop()
isPlayingKit = false
print("Audio engine stopped")
}
}
The above mentioned method captures the amplitude of the microphone.
The below given is the location where I tried to use AKAmplitudeTracker on AKAudioPlayer.
//
// ViewController.swift
// AudioBoom
//
// Created by Alex Babu on 20/06/18.
// Copyright © 2018 Naico. All rights reserved.
//
import AudioKit
class ViewController: UIViewController {
var instantanousAmplitudeData = [Double]()
var timer:Timer?
var timerCount:Int?
let conductor = Conductor.sharedInstance
var player:AKAudioPlayer?
#IBOutlet weak var holderView: UIView!
#IBOutlet weak var equalizer: UILabel!
#IBOutlet weak var percentageLabel: UILabel!
override func viewDidLoad() {
super.viewDidLoad()
timerCount = 0
playMusicOutlet.layer.cornerRadius = playMusicOutlet.frame.size.height/2
playMusicOutlet.layer.borderColor = UIColor.cyan.cgColor
playMusicOutlet.layer.borderWidth = 2.0
playMusicOutlet.clipsToBounds = true
musicDTW.layer.cornerRadius = musicDTW.frame.size.height/2
musicDTW.layer.borderColor = UIColor.cyan.cgColor
musicDTW.layer.borderWidth = 2.0
musicDTW.clipsToBounds = true
holderView.layer.cornerRadius = holderView.frame.size.width/2
holderView.clipsToBounds = true
}
override var preferredStatusBarStyle: UIStatusBarStyle {
return .lightContent
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
}
#IBOutlet weak var playMusicOutlet: UIButton!
#IBAction func playMusic(_ sender: Any) {
playMusicOutlet.setTitle("Talk now", for: .normal)
self.equalizer.isHidden = false
timerCount = 0
AVAudioSession.sharedInstance().requestRecordPermission({(_ granted: Bool) -> Void in
if granted {
print("Permission granted")
self.timer = Timer.scheduledTimer(withTimeInterval: 0.05, repeats: true) { [unowned self] (timer) in
if let count = self.timerCount {
DispatchQueue.main.async {
self.timerCount = count + 1
print("Amplitude of mic detected:\(self.conductor.micTracker.amplitude)")
print("Amplitude of mic counter:\(String(describing: count))")
print("Amplitude of mp3 detected:\(self.conductor.micTracker.amplitude)")
print("Amplitude of mp3 Counter:\(String(describing: count))")
self.instantanousAmplitudeData.append(self.conductor.micTracker.amplitude)
self.equalizer.frame.size.height = CGFloat(self.conductor.micTracker.amplitude * 500)
self.percentageLabel.text = String(Int(((self.conductor.micTracker.amplitude * 500)/500) * 100)) + "%"
if count == 10000 {
timer.invalidate()
self.equalizer.isHidden = true
}
}
}
}
}
else {
print("Permission denied")
}
})
}
#IBOutlet weak var musicDTW: UIButton!
#IBAction func musicDTWAction(_ sender: Any) {
let anotherConductor = Conductor.sharedInstance
if let ccc = anotherConductor.play(file: "Timebomb", type: "mp3") {
musicDTW.setTitle("Music DTW on", for: .normal)
if let mp3Tracker = conductor.mp3Tracker {
self.equalizer.frame.size.height = CGFloat(mp3Tracker.amplitude * 500)
}
}
else {
musicDTW.setTitle("Music DTW off", for: .normal)
}
}
}
There's a lot going on with all this code, so its hard to debug it but what you describe is definitely possible and you probably just have some small thing wrong. Perhaps you can share the repo with me privately and I can fix it for you.
Try these out!
Conductor Class
import AudioKit
import AudioKitUI
// Treat the conductor like a manager for the audio engine.
class Conductor {
// Singleton of the Conductor class to avoid multiple instances of the audio engine
var url:URL?
var fileName:String?
var type:String?
static let sharedInstance = Conductor()
var isPlayingKit:Bool?
var micTracker: AKAmplitudeTracker!
var mp3Tracker: AKAmplitudeTracker!
var player:AKAudioPlayer!
var mic: AKMicrophone!
var delay: AKDelay!
var reverb: AKCostelloReverb!
// Balance between the delay and reverb mix.
var reverbAmountMixer = AKDryWetMixer()
func play(file: String, type: String) -> AKAudioPlayer? {
let url = Bundle.main.url(forResource: file, withExtension: type)
let file = try! AKAudioFile(forReading: url!)
player = try! AKAudioPlayer(file: file)
if self.isPlayingKit! {
mp3Tracker = AKAmplitudeTracker(player)
delay = AKDelay(mp3Tracker)
delay.time = 0.0
delay.feedback = 0.0
delay.dryWetMix = 0.5
reverb = AKCostelloReverb(delay)
reverb.presetShortTailCostelloReverb()
reverbAmountMixer = AKDryWetMixer(delay, reverb, balance: 0.8)
AudioKit.output = reverbAmountMixer //#1
player.play() //#2
}
else {
self.isPlayingKit = true
AudioKit.output = nil
// player.stop()
stopAudioEngine()
}
return player
}
func isPlayingAudioKit() -> Bool {
return isPlayingKit!
}
init() {
self.isPlayingKit = false
}
func initMicrophone() {
AKSettings.playbackWhileMuted = true
mic = AKMicrophone()
print("INIT CONDUCTOR")
micTracker = AKAmplitudeTracker(mic)
delay = AKDelay(micTracker)
delay.time = 1.5
delay.feedback = 0.1
delay.dryWetMix = 1.0
reverb = AKCostelloReverb(delay)
reverb.presetShortTailCostelloReverb()
reverbAmountMixer = AKDryWetMixer(delay, reverb, balance: 0.5)
AudioKit.output = reverbAmountMixer
isPlayingKit = true
startAudioEngine()
}
func startAudioEngine() {
AudioKit.start()
isPlayingKit = true
print("Audio engine started")
}
func stopAudioEngine() {
AudioKit.stop()
isPlayingKit = false
print("Audio engine stopped")
}
}
ViewController
import AudioKit
class ViewController: UIViewController {
var instantanousUserAudioData = [Float]()
var referenceAudioData = [Float]()
var timer:Timer?
var timerCount:Int?
let conductor = Conductor.sharedInstance
#IBOutlet weak var holderView: UIView!
#IBOutlet weak var equalizer: UILabel!
#IBOutlet weak var percentageLabel: UILabel!
#IBOutlet weak var timerOutlet: UIButton!
#IBOutlet weak var micOutlet: UIButton!
#IBOutlet weak var DTWOutlet: UIButton!
#IBOutlet weak var musicOutlet: UIButton!
#IBOutlet weak var resultLabel: UILabel!
#IBAction func timerAction(_ sender: Any) {
self.timer?.invalidate()
}
override func viewDidLoad() {
super.viewDidLoad()
timerCount = 0
micOutlet.layer.cornerRadius = micOutlet.frame.size.height/2
micOutlet.layer.borderColor = UIColor.cyan.cgColor
micOutlet.layer.borderWidth = 2.0
micOutlet.clipsToBounds = true
musicOutlet.layer.cornerRadius = musicOutlet.frame.size.height/2
musicOutlet.layer.borderColor = UIColor.cyan.cgColor
musicOutlet.layer.borderWidth = 2.0
musicOutlet.clipsToBounds = true
DTWOutlet.layer.cornerRadius = DTWOutlet.frame.size.height/2
DTWOutlet.layer.borderColor = UIColor.cyan.cgColor
DTWOutlet.layer.borderWidth = 2.0
DTWOutlet.clipsToBounds = true
timerOutlet.layer.cornerRadius = timerOutlet.frame.size.height/2
timerOutlet.layer.borderColor = UIColor.cyan.cgColor
timerOutlet.layer.borderWidth = 2.0
timerOutlet.clipsToBounds = true
holderView.layer.cornerRadius = holderView.frame.size.width/2
holderView.clipsToBounds = true
self.micOutlet.isEnabled = false
self.musicOutlet.isEnabled = false
AVAudioSession.sharedInstance().requestRecordPermission({(_ granted: Bool) -> Void in
self.micOutlet.isEnabled = granted
self.musicOutlet.isEnabled = granted
})
}
override var preferredStatusBarStyle: UIStatusBarStyle {
return .lightContent
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
}
#IBAction func micAction(_ sender: Any) {
conductor.initMicrophone()
self.timerCount = 0
self.equalizer.isHidden = false
self.timer = Timer.scheduledTimer(withTimeInterval: 0.05, repeats: true) { [unowned self] (timer) in
if let count = self.timerCount {
DispatchQueue.main.async {
self.timerCount = count + 1
print("Amplitude of mic detected:\(self.conductor.micTracker.amplitude)")
print("Amplitude of mp3 Counter:\(String(describing: count))")
self.instantanousUserAudioData.append(Float(self.conductor.micTracker.amplitude))
self.equalizer.frame.size.height = CGFloat(self.conductor.micTracker.amplitude * 500)
self.percentageLabel.text = String(Int(((self.conductor.micTracker.amplitude * 500)/500) * 100)) + "%"
if count > 10 && self.conductor.micTracker.amplitude == 0.0 && self.instantanousUserAudioData.last == 0.0 {
self.micOutlet.backgroundColor = .green
self.micOutlet.setTitleColor(.black, for: .normal)
self.micOutlet.layer.borderColor = UIColor.red.cgColor
timer.invalidate()
}
if count == 0 {
self.micOutlet.backgroundColor = .clear
self.micOutlet.setTitleColor(.cyan, for: .normal)
self.micOutlet.layer.borderColor = UIColor.cyan.cgColor
}
}
}
}
}
#IBAction func musicAction(_ sender: Any) {
self.timerCount = 0
if self.conductor.play(file: voiceReference, type: type_mp3) != nil {
self.timer?.invalidate()
self.timer = Timer.scheduledTimer(withTimeInterval: 0.05, repeats: true) { [unowned self] (timer) in
if let count = self.timerCount {
DispatchQueue.main.async {
self.timerCount = count + 1
print("Amplitude of mp3 detected:\(self.conductor.mp3Tracker.amplitude)")
print("Amplitude of mp3 Counter:\(String(describing: count))")
self.referenceAudioData.append(Float(self.conductor.mp3Tracker.amplitude))
self.equalizer.frame.size.height = CGFloat(self.conductor.mp3Tracker.amplitude * 500)
self.equalizer.isHidden = false
self.percentageLabel.text = String(Int(((self.conductor.mp3Tracker.amplitude * 500)/500) * 100)) + "%"
if count > 10 && self.conductor.mp3Tracker.amplitude == 0.0 && self.referenceAudioData.last == 0.0 {
self.musicOutlet.backgroundColor = .green
self.musicOutlet.setTitleColor(.black, for: .normal)
self.musicOutlet.layer.borderColor = UIColor.red.cgColor
timer.invalidate()
}
if count == 0 {
self.musicOutlet.backgroundColor = .clear
self.musicOutlet.setTitleColor(.cyan, for: .normal)
self.musicOutlet.layer.borderColor = UIColor.cyan.cgColor
}
}
}
}
}
else {
}
}
#IBAction func resultAction(_ sender: Any) {
print("mic array:\(instantanousUserAudioData)")
print("song array:\(referenceAudioData)")
self.timer?.invalidate()
if referenceAudioData.count > 0, instantanousUserAudioData.count > 0 {
let refData = knn_curve_label_pair(curve: referenceAudioData,label: "reference")
let userData = knn_curve_label_pair(curve: instantanousUserAudioData,label: "userData")
let attempt:KNNDTW = KNNDTW()
attempt.train(data_sets: [refData,userData])
let prediction: knn_certainty_label_pair = attempt.predict(curve_to_test: referenceAudioData)
print("predicted :" + prediction.label, "with ", prediction.probability * 100,"% certainty")
resultLabel.text = "DTW cost is " + String(attempt.dtw_cost(y: referenceAudioData, x: instantanousUserAudioData))
print("COST OF THE DTW ALGORITHM IS : \(String(attempt.dtw_cost(y: referenceAudioData, x: instantanousUserAudioData)))")
}
}
}
I am trying to combine MicrophoneAnalysis and Recorder examples. It keeps crashing at the line try recorder.record.
2018-01-08 21:21:48.507019-0800 Music Practice[90266:18761122] [avae]
AVAEInternal.h:70:_AVAE_Check: required condition is false: [AVAEGraphNode.mm:804:CreateRecordingTap: (nullptr == Tap())]
2018-01-08 21:21:48.527443-0800 Music Practice[90266:18761122] * Terminating app due to uncaught exception 'com.apple.coreaudio.avfaudio', reason: 'required condition is false: nullptr == Tap()'
* First throw call stack:
// -----------
import AudioKit
import AudioKitUI
import UIKit
class SecondViewController: UIViewController {
#IBOutlet private var inputPlot: AKNodeOutputPlot!
#IBOutlet weak var tempViewForRecordingAndPlay: UIView!
#IBOutlet weak var outputWavePlot: AKOutputWaveformPlot!
// for microphone Analysis
#IBOutlet weak var frequencyLabel: UILabel!
#IBOutlet weak var amplitudeLabel: UILabel!
#IBOutlet weak var noteNameWithSharpsLabel: UILabel!
#IBOutlet weak var noteNameWithFlatsLabel: UILabel!
#IBOutlet private var audioInputPlot: EZAudioPlot!
var micMixer: AKMixer!
var recorder: AKNodeRecorder!
var player: AKAudioPlayer!
var tape: AKAudioFile!
var micBooster: AKBooster!
var moogLadder: AKMoogLadder!
var delay: AKDelay!
var mainMixer: AKMixer!
let mic = AKMicrophone()
var state = State.readyToRecord
#IBOutlet private weak var infoLabel: UILabel!
#IBOutlet private weak var resetButton: UIButton!
#IBOutlet private weak var RecordOrPlay_Btn: UIButton!
#IBOutlet private weak var frequencySlider: AKSlider!
#IBOutlet private weak var resonanceSlider: AKSlider!
#IBOutlet private weak var loopButton: UIButton!
#IBOutlet private weak var moogLadderTitle: UILabel!
enum State {
case readyToRecord
case recording
case readyToPlay
case playing
}
var plot: AKNodeOutputPlot!
var micNew: AKMicrophone!
var tracker: AKFrequencyTracker!
var silence: AKBooster!
let noteFrequencies = [16.35, 17.32, 18.35, 19.45, 20.6, 21.83, 23.12, 24.5, 25.96, 27.5, 29.14, 30.87]
let noteNamesWithSharps = ["C", "C♯", "D", "D♯", "E", "F", "F♯", "G", "G♯", "A", "A♯", "B"]
let noteNamesWithFlats = ["C", "D♭", "D", "E♭", "E", "F", "G♭", "G", "A♭", "A", "B♭", "B"]
#objc func updateUI() {
if tracker.amplitude > 0.1 {
frequencyLabel.text = String(format: "%0.1f", tracker.frequency)
var frequency = Float(tracker.frequency)
while frequency > Float(noteFrequencies[noteFrequencies.count - 1]) {
frequency /= 2.0
}
while frequency < Float(noteFrequencies[0]) {
frequency *= 2.0
}
var minDistance: Float = 10_000.0
var index = 0
for i in 0..<noteFrequencies.count {
let distance = fabsf(Float(noteFrequencies[i]) - frequency)
if distance < minDistance {
index = i
minDistance = distance
}
}
let octave = Int(log2f(Float(tracker.frequency) / frequency))
noteNameWithSharpsLabel.text = "\(noteNamesWithSharps[index])\(octave)"
noteNameWithFlatsLabel.text = "\(noteNamesWithFlats[index])\(octave)"
}
amplitudeLabel.text = String(format: "%0.2f", tracker.amplitude)
}
#if NOT_USED
func setupPlot() {
plot = AKNodeOutputPlot(micNew, frame: audioInputPlot.bounds)
plot.plotType = .rolling
plot.shouldFill = true
plot.shouldMirror = true
plot.color = UIColor.blue
audioInputPlot.addSubview(plot)
}
#endif
func setupPlot_forMic() {
plot = AKNodeOutputPlot(micMixer, frame: audioInputPlot.bounds)
plot.plotType = .rolling
plot.shouldFill = true
plot.shouldMirror = true
plot.color = UIColor.red
audioInputPlot.addSubview(plot)
}
func execute_viewDidAppear_micAnalysis() {
//AudioKit.output = silence
//AudioKit.start()
setupPlot_forMic()
Timer.scheduledTimer(timeInterval: 0.1,
target: self,
selector: #selector(SecondViewController.updateUI),
userInfo: nil,
repeats: true)
}
//view DID APPEAR
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
execute_viewDidAppear_micAnalysis()
}
// View DID DOWNLOAD
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
self.tempViewForRecordingAndPlay.addSubview(inputPlot);
self.tempViewForRecordingAndPlay.addSubview(outputWavePlot);
//parentView.bringSubview(toFront: childView)
tempViewForRecordingAndPlay.bringSubview(toFront: inputPlot);
tempViewForRecordingAndPlay.bringSubview(toFront: outputWavePlot);
recorderPlayerSettings()
#if TEMP
#else
micAnalysisSettings()
#endif
}
func recorderPlayerSettings() {
setupButtonNames()
// Clean tempFiles!
AKAudioFile.cleanTempDirectory()
// Session settings
AKSettings.bufferLength = .medium
do {
try AKSettings.setSession(category: .playAndRecord, with: .allowBluetoothA2DP)
} catch {
AKLog("Could not set session category.")
}
AKSettings.defaultToSpeaker = true
// Patching
inputPlot.node = mic
micMixer = AKMixer(mic)
micBooster = AKBooster(micMixer)
//play(from: innerTime, to: endTime, when: 0)
// passing 0 for endTime will use the duration.
// Will set the level of microphone monitoring
micBooster.gain = 0
recorder = try? AKNodeRecorder(node: micMixer)
if let file = recorder.audioFile {
player = try? AKAudioPlayer(file: file)
}
player.looping = true
player.completionHandler = playingEnded
moogLadder = AKMoogLadder(player)
mainMixer = AKMixer(moogLadder, micBooster)
AudioKit.output = mainMixer
AudioKit.start()
setupUIForRecording()
}
func micAnalysisSettings() {
AKSettings.audioInputEnabled = true
//micNew = AKMicrophone()
tracker = AKFrequencyTracker(micMixer)
//tracker = AKFrequencyTracker(mic)
silence = AKBooster(tracker, gain: 0)
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
// CallBack triggered when playing has ended
// Must be seipatched on the main queue as completionHandler
// will be triggered by a background thread
func playingEnded() {
DispatchQueue.main.async {
self.setupUIForPlaying ()
}
}
#IBAction func RecordOrPlay_BtnTouched(_ sender: UIButton) {
switch state {
case .readyToRecord :
infoLabel.text = "Recording"
RecordOrPlay_Btn.setTitle("Stop", for: .normal)
state = .recording
// microphone will be monitored while recording
// only if headphones are plugged
if AKSettings.headPhonesPlugged {
micBooster.gain = 1
}
do {
try recorder.record()
} catch { print("Errored recording.") }
case .recording :
// Microphone monitoring is muted
micBooster.gain = 0
do {
try player.reloadFile()
} catch { print("Errored reloading.") }
let recordedDuration = player != nil ? player.audioFile.duration : 0
if recordedDuration > 0.0 {
recorder.stop()
player.audioFile.exportAsynchronously(name: "TempTestFile.m4a",
baseDir: .documents,
exportFormat: .m4a) {_, exportError in
if let error = exportError {
print("Export Failed \(error)")
} else {
print("Export succeeded")
}
}
setupUIForPlaying ()
}
case .readyToPlay :
player.play()
infoLabel.text = "Playing..."
RecordOrPlay_Btn.setTitle("Stop", for: .normal)
state = .playing
case .playing :
player.stop()
setupUIForPlaying()
}
}
struct Constants {
static let empty = ""
}
func setupButtonNames() {
resetButton.setTitle(Constants.empty, for: UIControlState.disabled)
RecordOrPlay_Btn.setTitle(Constants.empty, for: UIControlState.disabled)
loopButton.setTitle(Constants.empty, for: UIControlState.disabled)
}
func setupUIForRecording () {
state = .readyToRecord
infoLabel.text = "Ready to record"
RecordOrPlay_Btn.setTitle("Record", for: .normal)
resetButton.isEnabled = false
resetButton.isHidden = true
micBooster.gain = 0
setSliders(active: false)
}
func setupUIForPlaying () {
let recordedDuration = player != nil ? player.audioFile.duration : 0
infoLabel.text = "Recorded: \(String(format: "%0.1f", recordedDuration)) seconds"
RecordOrPlay_Btn.setTitle("Play", for: .normal)
state = .readyToPlay
resetButton.isHidden = false
resetButton.isEnabled = true
setSliders(active: true)
frequencySlider.value = moogLadder.cutoffFrequency
resonanceSlider.value = moogLadder.resonance
}
func setSliders(active: Bool) {
loopButton.isEnabled = active
moogLadderTitle.isEnabled = active
frequencySlider.callback = updateFrequency
frequencySlider.isHidden = !active
resonanceSlider.callback = updateResonance
resonanceSlider.isHidden = !active
frequencySlider.range = 10 ... 2_000
moogLadderTitle.text = active ? "Moog Ladder Filter" : Constants.empty
}
#IBAction func loopButtonTouched(_ sender: UIButton) {
if player.looping {
player.looping = false
sender.setTitle("Loop is Off", for: .normal)
} else {
player.looping = true
sender.setTitle("Loop is On", for: .normal)
}
}
func updateFrequency(value: Double) {
moogLadder.cutoffFrequency = value
frequencySlider.property = "Frequency"
frequencySlider.format = "%0.0f"
}
func updateResonance(value: Double) {
moogLadder.resonance = value
resonanceSlider.property = "Resonance"
resonanceSlider.format = "%0.3f"
}
#IBAction func resetEverything(_ sender: Any) {
player.stop()
do {
try recorder.reset()
} catch { print("Errored resetting.") }
//try? player.replaceFile((recorder.audioFile)!)
setupUIForRecording()
}
// convert to a generic view animate function and dissociate from the button
#IBAction func animateButton(_ sender: UIButton) {
UIView.animate(withDuration: 0.5, delay: 0.0, options: UIViewAnimationOptions.curveEaseIn, animations: {
//Frame Option 1:
self.tempViewForRecordingAndPlay.frame = CGRect(x: self.tempViewForRecordingAndPlay.frame.origin.x, y: 20, width: self.tempViewForRecordingAndPlay.frame.width, height: self.tempViewForRecordingAndPlay.frame.height)
//Frame Option 2:
self.tempViewForRecordingAndPlay.center = CGPoint(x: self.view.frame.width / 2, y: self.view.frame.height / 4)
//self.tempViewForRecordingAndPlay.backgroundColor = .blue
},completion: { finish in
/*
UIView.animate(withDuration: 1, delay: 0.25,options: UIViewAnimationOptions.curveEaseOut,animations: {
self.tempViewForRecordingAndPlay.backgroundColor = .orange
self.tempViewForRecordingAndPlay.transform = CGAffineTransform(scaleX: 0.25, y: 0.25)
//self.animationButton.isEnabled = false // If you want to restrict the button not to repeat animation..You can enable by setting into true
},completion: nil)})
*/
UIView.animate(withDuration: 1.0, delay: 0.25, usingSpringWithDamping:
0.6, initialSpringVelocity: 0.3, options:
UIViewAnimationOptions.allowAnimatedContent, animations: { () -> Void in
//do actual move
self.tempViewForRecordingAndPlay.center = self.tempViewForRecordingAndPlay.center
}, completion: nil)})
}
You probably already have a tap on the bus and you can not have another on the same bus.
Try to micMixer.outputNode.removeTap(onBus: 0) before You calling recorder.record().
I tried for a few days to make a button which includes the next song but I can not
Here is my question
How to put the next екфсл at the click of a button with avPlayer
it is my code
class ViewControllerAudioDetail: UIViewController {
var avPlayer:AVQueuePlayer?
var status = false
fileprivate let seekDuration: Float64 = 10
fileprivate let seekDurationThirty: Float64 = 30
var mod = [Modal]()
#IBOutlet weak var ImageView: UIImageView!
#IBOutlet weak var startTime: UILabel!
#IBOutlet weak var endTime: UILabel!
#IBOutlet weak var sliderSong: UISlider!
#IBOutlet weak var name: UILabel!
#IBOutlet weak var Volume: UISlider!
#IBOutlet weak var iconChange: UIButton!
override func viewDidLoad() {
super.viewDidLoad()
Volume.setThumbImage(UIImage(named:"Play.png"), for: .normal)
sliderSong.minimumValue = 0
sliderSong.maximumValue = 1
name.text = mod[thisSong].AudioName
ImageView.image = mod[0].ImageViewAudio
let url = URL(string: mod[thisSong].UrlName!)
let playerItem = AVPlayerItem(url: url!)
avPlayer = AVQueuePlayer(playerItem:playerItem)
let _ = avPlayer!.addPeriodicTimeObserver(forInterval: CMTime(seconds: 1, preferredTimescale: CMTimeScale(NSEC_PER_SEC)), queue: DispatchQueue.main) { [weak self] (time) in
let duration = CMTimeGetSeconds((self?.avPlayer!.currentItem!.asset.duration)!)
self?.sliderSong.value = Float(CMTimeGetSeconds(time)) / Float(duration)
}
let duration = CMTimeGetSeconds(avPlayer!.currentItem!.asset.duration)
let minutesTextOut = Int(duration) / 60 % 60
let secondsTextOut = Int(duration) % 60
let strDuration = String(format:"%02d:%02d", minutesTextOut, secondsTextOut)
endTime.text = strDuration
}
#IBAction func sliderSong(_ sender: UISlider) {
//перемотка аудиозвука
let duration = CMTimeGetSeconds(avPlayer!.currentItem!.asset.duration)
let value = sliderSong.value
let durationToSeek = Float(duration) * value
avPlayer?.seek(to: CMTimeMakeWithSeconds(Float64(durationToSeek),avPlayer!.currentItem!.duration.timescale)) { [](state) in
if (self.iconChange.currentImage?.isEqual(UIImage(named: "Play.png")))! {
self.avPlayer?.pause()
} else if (self.iconChange.currentImage?.isEqual(UIImage(named: "Pause.png")))!{
self.avPlayer?.play()
}
}
}
#IBAction func volume(_ sender: UISlider) {
avPlayer?.volume = sender.value
}
#IBAction func minusThirtySec(_ sender: Any) {
let playerCurrentTime = CMTimeGetSeconds((avPlayer?.currentTime())!)
var newTime = playerCurrentTime - seekDurationThirty
if newTime < 0 {
newTime = 0
}
let time2: CMTime = CMTimeMake(Int64(newTime * 1000 as Float64), 1000)
avPlayer?.seek(to: time2, toleranceBefore: kCMTimeZero, toleranceAfter: kCMTimeZero)
}
#IBAction func minusTenSec(_ sender: Any) {
let playerCurrentTime = CMTimeGetSeconds((avPlayer?.currentTime())!)
var newTime = playerCurrentTime - seekDuration
if newTime < 0 {
newTime = 0
}
let time2: CMTime = CMTimeMake(Int64(newTime * 1000 as Float64), 1000)
avPlayer?.seek(to: time2, toleranceBefore: kCMTimeZero, toleranceAfter: kCMTimeZero)
}
#IBAction func plusTenSec(_ sender: Any) {
guard let duration = avPlayer?.currentItem?.duration else{
return
}
let playerCurrentTime = CMTimeGetSeconds((avPlayer?.currentTime())!)
let newTime = playerCurrentTime + seekDuration
if newTime < (CMTimeGetSeconds(duration) - seekDuration) {
let time2: CMTime = CMTimeMake(Int64(newTime * 1000 as Float64), 1000)
avPlayer?.seek(to: time2, toleranceBefore: kCMTimeZero, toleranceAfter: kCMTimeZero)
}
}
#IBAction func plusThirtySec(_ sender: Any) {
guard let duration = avPlayer?.currentItem?.duration else{
return
}
let playerCurrentTime = CMTimeGetSeconds((avPlayer?.currentTime())!)
let newTime = playerCurrentTime + seekDurationThirty
if newTime < (CMTimeGetSeconds(duration) - seekDuration) {
let time2: CMTime = CMTimeMake(Int64(newTime * 1000 as Float64), 1000)
avPlayer?.seek(to: time2, toleranceBefore: kCMTimeZero, toleranceAfter: kCMTimeZero)
}
}
#IBAction func Next(_ sender: Any) {
let url = URL(string: mod[thisSong].UrlName!)
let playerItem = AVPlayerItem(url: url!)
avPlayer = AVQueuePlayer(playerItem:playerItem)
avPlayer?.insert(playerItem, after: playerItem)
avPlayer?.advanceToNextItem()
// if thisSong + 1 > mod.count {
// thisSong = 0
// } else {
// thisSong += 1
// }
//
//
// if thisSong != mod.count{
//
// name.text = mod[thisSong].AudioName
// player(urlSong:mod[thisSong].UrlName!)
// avPlayer?.play()
//
//
//
// }
}
#IBAction func Back(_ sender: Any) {
// if thisSong != 0{
// thisSong -= 1
// name.text = mod[thisSong].AudioName
// player(urlSong:mod[thisSong].UrlName!)
// avPlayer?.play()
//
//
// }
}
#IBAction func Play(_ sender: Any) {
if avPlayer?.rate == 0 {
avPlayer?.play()
avPlayer?.rate = 1.0
iconChange.setImage(UIImage(named:"Pause.png"), for: .normal)
avPlayer?.addPeriodicTimeObserver(forInterval: CMTimeMakeWithSeconds(1, 1), queue: nil, using: {
(CMTime) -> Void in
self.updateProgressBar()
})
} else {
avPlayer?.rate = 0.0
avPlayer?.pause()
iconChange.setImage(UIImage(named:"Play.png"), for: .normal)
}
}
func player(urlSong:String) {
}
func updateProgressBar(){
let timeNow = Int(avPlayer!.currentTime().value) / Int(avPlayer!.currentTime().timescale)
let minutesText = timeNow / 60
let secondsText = timeNow % 60
let duration = String(format:"%02d:%02d", minutesText, secondsText)
startTime.text = duration
}
And tableview from here I get the data
var thisSong = 0
class TableViewControllerAudioList: UITableViewController {
override func viewDidLoad() {
super.viewDidLoad()
tableView.estimatedRowHeight = 50
tableView.rowHeight = UITableViewAutomaticDimension
}
override func numberOfSections(in tableView: UITableView) -> Int {
// #warning Incomplete implementation, return the number of sections
return 1
}
override func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
// #warning Incomplete implementation, return the number of rows
return modalsF.count
}
override func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
let cell = tableView.dequeueReusableCell(withIdentifier: "cell", for: indexPath) as! TableViewCellAudioList
cell.name.text = modalsF[indexPath.row].AudioName
cell.number.text = "\(indexPath.row + 1)"
thisSong = indexPath.row
return cell
}
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
let vc = segue.destination as? ViewControllerAudioDetail
vc?.mod = [modalsF[(tableView.indexPathForSelectedRow?.row)!]]
}
Play the selected / current song
func play(at index: Int) {
audioPlayer.removeAllItems()
playerItem = audioItems[index]
playerItem?.seek(to: kCMTimeZero)
audioPlayer.insert(playerItem!, after: nil)
audioPlayer.play()
isPlaying = true
self.playAllSongsTableView.reloadData()
}
To play previuos song....
#IBAction func backWordAction(_ sender: UIButton) {
var index = audioItems.index(of: audioPlayer.currentItem!) ?? 0
if index > 0 {
index = index - 1
}
play(at: index)
}
To play next song....
#IBAction func forwordAction(_ sender: UIButton) {
var index = audioItems.index(of: audioPlayer.currentItem!) ?? 0
if index < (audioItems.count - 1) {
index = index + 1
}
play(at: index)
}
Using AVQueuePlayer instead of AVPlayer will let you directly add items;
AVQueuePlayer.insert(item: AVPlayerItem, after: AVPlayerItem?)
AVQueuePlayer.advanceToNextItem()
You can also initialize the player with an array of AVPlayerItems
AVQueuePlayer = AVQueuePlayer(items: [AVPlayerItem])
In its tableview I added func didSelectRowAt in which I wrote a global counter that received the cell number
override func tableView(_ tableView: UITableView, didSelectRowAt indexPath: IndexPath) {
thisSong = indexPath.row
}
And pass the data to the controller through prepareForSegue
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
if segue.identifier == "listToDetail"{
let vc = segue.destination as? ViewControllerAudioDetail
vc?.mod = mod
}
}
In my detailed controller I created two buttons for the previous song and the next one
var avPlayer:AVPlayer?
var mod = [Modal]()
#IBAction func Next(_ sender: Any) {
if thisSong == mod.count - 1 {
thisSong = 0
} else {
thisSong += 1
}
if thisSong != mod.count{
avPlayer?.removeTimeObserver(sliderDuration)
name.text = mod[thisSong].AudioName
player(urlSong:mod[thisSong].UrlName!)
Status()
}
}
#IBAction func Back(_ sender: Any) {
if thisSong != 0 {
thisSong -= 1
} else {
thisSong = mod.endIndex
}
avPlayer?.removeTimeObserver(sliderDuration)
name.text = mod[thisSong].AudioName
player(urlSong:mod[thisSong].UrlName!)
Status()
}
And the function determines which was the status of the player and sets the same data
func Status(){
if status == true {
iconChange.setImage(UIImage(named:"Pause.png"), for: .normal)
avPlayer?.play()
} else {
iconChange.setImage(UIImage(named:"Play.png"), for: .normal)
avPlayer?.pause()
}
}
Here I create a player
func player(urlSong:String) {
let url = URL(string: urlSong)
let playerItem = AVPlayerItem(url: url!)
avPlayer = AVPlayer(playerItem:playerItem)
sliderDuration = avPlayer!.addPeriodicTimeObserver(forInterval: CMTime(seconds: 1, preferredTimescale: CMTimeScale(NSEC_PER_SEC)), queue: DispatchQueue.main) { [weak self] (time) in
let duration = CMTimeGetSeconds((self?.avPlayer!.currentItem!.asset.duration)!)
self?.sliderSong.value = Float(CMTimeGetSeconds(time)) / Float(duration)
}
let duration = CMTimeGetSeconds(avPlayer!.currentItem!.asset.duration)
let minutesTextOut = Int(duration) / 60 % 60
let secondsTextOut = Int(duration) % 60
let strDuration = String(format:"%02d:%02d", minutesTextOut, secondsTextOut)
endTime.text = strDuration
}
How can I get AvPlayer to work like AvAudioPlayer. What I mean is, I cannot seem to get duration to work at all. Here is what I need to convert to AvPlayer:
import UIKit
import AVKit
import AVFoundation
class ModalViewController: UIViewController {
var audioPlayer = AVAudioPlayer()
let varSend = VarSend.sharedInstance
var timer:NSTimer!
var toggleState = 2
#IBOutlet weak var slider: UISlider!
#IBOutlet weak var sermonImage: UIImageView!
#IBOutlet weak var sermont: UILabel!
#IBOutlet weak var sermond: UILabel!
#IBOutlet weak var sermonE: UILabel!
#IBOutlet weak var sermonL: UILabel!
#IBOutlet weak var play: UIButton!
#IBOutlet var layer: UIView!
override func viewDidLoad() {
super.viewDidLoad()
let url = varSend.url
print("Setting up.")
do {
let data1 = NSData(contentsOfURL: NSURL(string:url)!)
audioPlayer = try AVAudioPlayer(data: data1!)
audioPlayer.prepareToPlay()
audioPlayer.volume = 1.0
audioPlayer.play()
} catch {
print("Error getting the audio file")
}
slider.maximumValue = Float(audioPlayer.duration)
timer = NSTimer.scheduledTimerWithTimeInterval(0.1, target: self, selector: Selector("updateSlider"), userInfo: nil, repeats: true)
slider.setThumbImage(UIImage(named: "circle"), forState: .Normal)
slider.setThumbImage(UIImage(named: "circle"), forState: .Highlighted)
let title = varSend.sermonName
self.sermont.text = title
let date = varSend.sermonDate
self.sermond.text = date
let image = varSend.sermonPic
ImageLoader.sharedLoader.imageForUrl(image, completionHandler:{(image: UIImage?, url: String) in
self.sermonImage.image = image!
})
}
#IBAction func ChangeAudioTime(sender: AnyObject) {
audioPlayer.stop()
audioPlayer.currentTime = NSTimeInterval(slider.value)
audioPlayer.prepareToPlay()
audioPlayer.volume = 1.0
audioPlayer.play()
}
func updateSlider() {
slider.value = Float(audioPlayer.currentTime)
let currentTime = Int(audioPlayer.currentTime)
let minutes = currentTime / 60
let seconds = currentTime - minutes * 60
let con = Int(audioPlayer.duration)
let currentItem = con - currentTime
let minutesU = Int(currentItem / 60)
let secondsU = Int(currentItem % 60)
sermonE.text = NSString(format: "%02d:%02d", minutes,seconds) as String
let timeLeft = NSString(format: "%02d:%02d", minutesU,secondsU) as String
sermonL.text = "-\(timeLeft)"
if currentItem == 1 {
//audioPlayer.pause()
toggleState = 1
print(toggleState)
play.setImage(UIImage(named:"play.png"),forState:UIControlState.Normal)
}
}
#IBAction func playPauseButton(sender: AnyObject) {
let playBtn = sender as! UIButton
if toggleState == 1 {
audioPlayer.play()
toggleState = 2
playBtn.setImage(UIImage(named:"pause.png"),forState:UIControlState.Normal)
} else {
audioPlayer.pause()
toggleState = 1
playBtn.setImage(UIImage(named:"play.png"),forState:UIControlState.Normal)
}
}
#IBAction func play(sender: AnyObject) {
audioPlayer.play()
}
#IBAction func pause(sender: AnyObject) {
audioPlayer.pause()
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
}
#IBAction func close(sender: AnyObject) {
self.dismissViewControllerAnimated(true, completion: nil)
audioPlayer.stop()
}
}
And here is what I have tried already:
import UIKit
import Alamofire
import SwiftyJSON
import AVKit
import AVFoundation
import CoreMedia
class test: UIViewController {
var audioPlayer:AVPlayer!
var playerItem:AVPlayerItem!
var timer:NSTimer!
#IBOutlet weak var sermonE: UILabel!
#IBOutlet weak var sermonL: UILabel!
#IBOutlet weak var slider: UISlider!
override func viewDidLoad() {
super.viewDidLoad()
//let playerItem:AVPlayerItem!;
let playerURL = "example.com"
let steamingURL:NSURL = NSURL(string:playerURL)!
audioPlayer = AVPlayer(URL: steamingURL)
timer = NSTimer.scheduledTimerWithTimeInterval(0.1, target: self, selector: Selector("updateSlider"), userInfo: nil, repeats: true)
}
func updateSlider() {
let item = audioPlayer?.currentItem
let durationInSeconds = CMTimeGetSeconds(item!.duration)
print(durationInSeconds)
}
#IBAction func ChangeAudioTime(sender: AnyObject) {
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
#IBAction func Play(sender: AnyObject) {
audioPlayer.play()
}
}
I have been searching for days and Apple's docs are very hard to make out.
I even tried
self.player.currentItem.asset.duration
from: How to get Duration from AVPlayer (Not AVAudioPlayer)?
Any help would be much appreciated.
Swift 2.0
loadedTimeRanges The array contains NSValue objects containing a CMTimeRange value
indicating the times ranges for which the player item has media data
readily available. The time ranges returned may be discontinuous.
So I call myplayer.getCurrentTrackDuration every 1 second and noticed that when I'm streaming I got the correct final duration after 3-4 second.
extension AVPlayer {
//run this every 1 second of streaming (or use KVO)
//In Http stream the duration it going to increase and probably finallize near to 7% of the total duration of the song
func getCurrentTrackDuration () -> Float64 {
guard let currentItem = self.currentItem else { return 0.0 }
guard currentItem.loadedTimeRanges.count > 0 else { return 0.0 }
let timeInSecond = CMTimeGetSeconds((currentItem.loadedTimeRanges[0].CMTimeRangeValue).duration);
return timeInSecond >= 0.0 ? timeInSecond : 0.0
}
}