play sound based on the number of times listed on a textfield - ios

My code below is has a textfield and a sound function. I would like the user to have the sound file be played the number of times the int is in the textfield when the button is pressed.
import UIKit
import AVFoundation
class ViewController: UIViewController {
#IBOutlet var sam : UITextField!
var bombSoundEffect: AVAudioPlayer?
func judo(){
let path = Bundle.main.path(forResource: "example.mp3", ofType:nil)!
let url = URL(fileURLWithPath: path)
do {
bombSoundEffect = try AVAudioPlayer(contentsOf: url)
bombSoundEffect?.play()
} catch {
// couldn't load file :(
}
}
}

Please use numberOfLoops:
"numberOfLoops" is the number of times that the sound will return to the beginning upon reaching the end.
A value of zero means to play the sound just once.
A value of one will result in playing the sound twice, and so on..
Any negative number will loop indefinitely until stopped.
bombSoundEffect.numberOfLoops = 1
You can do this in another way with some delay:
import UIKit
import AVFoundation
class ViewController: UIViewController {
#IBOutlet weak var textField: UITextField!
var player = AVAudioPlayer()
var timer = Timer()
var count: Int = 0
override func viewDidLoad() {
super.viewDidLoad()
}
#IBAction func playAction(_ sender: Any) {
let alertSound = URL(fileURLWithPath: Bundle.main.path(forResource: "stereo", ofType: "wav")!)
do {
player = try AVAudioPlayer(contentsOf: alertSound)
} catch {
print("No sound found by URL")
}
if let textValue = self.textField.text, let inputNumber = Int(textValue), inputNumber > 0 {
playWith(repeatCount: inputNumber)
} else {
let alert = UIAlertController(title: "Alert", message: "Please enter number.", preferredStyle: .alert)
alert.addAction(UIAlertAction(title: "OK", style: UIAlertActionStyle.cancel, handler: nil))
self.present(alert, animated: true, completion: nil)
}
}
func playWith(repeatCount: Int) {
player.play()
self.timer = Timer.scheduledTimer(withTimeInterval: 0.36, repeats: true, block: { (timer) in
self.count += 1
print(self.count)
if self.count != repeatCount {
self.player.play()
} else {
self.count = 0
self.player.stop()
self.timer.invalidate()
}
})
}
}
This is exactly what you want. In this demo, you have to enter input number in the text field and after that sound will play input number times.
If you want that time interval will also dynamic means time interval will also come from textField2 then use this method:
func playWith(repeatCount: Int) {
var timeInterval = 0.36
if let textValue = self.textField2.text, let inputNumber = Double(textValue), inputNumber > 0 {
timeInterval = inputNumber
}
player.play()
self.timer = Timer.scheduledTimer(withTimeInterval: timeInterval, repeats: true, block: { (timer) in
self.count += 1
print(self.count)
if self.count != repeatCount {
self.player.play()
} else {
self.count = 0
self.player.stop()
self.timer.invalidate()
}
})
}

Related

Autopan in iOS with AVAudioPlayer smoothly

I am trying to build autopan(move the audio between left, middle and right channel) with AVaudioPlayer to play my music files. I could get it through AVAudioPlayer.pan method. Implemented to get AUTO pan by using timer in my swift code. Now the issue is ,audio is not playing smoothly and it breaks in between.
Here is my present code ,
class AudioPlayer: UIViewController {
var player = AVAudioPlayer()
override func viewDidLoad() {
super.viewDidLoad()
prepareAudioSession()
}
func prepareAudioSession() {
let audioFileName = "LPNumb"
let audioFileExtension = "mp3"
guard let filePath = Bundle.main.path(forResource: audioFileName, ofType: audioFileExtension) else {
print("Audio file not found at specified path")
return
}
do {
let alertSound = URL(fileURLWithPath: filePath)
try? player = AVAudioPlayer(contentsOf: alertSound)
} catch {
print(error)
}
}
#IBAction func play(_ sender: AnyObject){
player.play()
if player.isPlaying{
_ = Timer.scheduledTimer(timeInterval: 0.50, target: self, selector: #selector(self.update1), userInfo: nil, repeats: true)
_ = Timer.scheduledTimer(timeInterval: 1.0, target: self, selector: #selector(self.update2), userInfo: nil, repeats: true)
_ = Timer.scheduledTimer(timeInterval: 1.50, target: self, selector: #selector(self.update3), userInfo: nil, repeats: true)
}
}
#objc func update1() {
player.pan = 0
}
#objc func update2() {
player.pan = 1
}
#objc func update3() {
player.pan = -1
}
}
I want to make the output audio as MONO and require audio to be played AUTOPANNED smoothly.
I think, you need make just one Timer for this task.Take a look through the code:
import UIKit
import AVFoundation
enum PanState {
case up
case down
}
class ViewController: UIViewController {
var audioPlayer: AVAudioPlayer?
var timerForPan: Timer?
var pan: Double = 0.0
var panState: PanState = .up
override func viewDidLoad() {
super.viewDidLoad()
if let path = Bundle.main.path(forResource: "LPNumb", ofType: "mp3") {
let record = URL(fileURLWithPath: path)
setupRecordToPlayer(from: record)
}
setTimers()
}
func setTimers() {
timerForPan = Timer.scheduledTimer(timeInterval: 1, target: self, selector: #selector(self.updatePan), userInfo: nil, repeats: true)
}
func setupRecordToPlayer(from url: URL) {
do {
audioPlayer = try AVAudioPlayer(contentsOf: url)
} catch let error {
debugPrint(error.localizedDescription)
}
}
#IBAction func playButtonPressed(_ sender: UIButton) {
audioPlayerToPlay()
}
#objc func updatePan() {
if audioPlayer?.isPlaying ?? false {
switch panState {
case .up:
self.pan += 0.1
if pan >= 1 {
self.panState = .down
}
case .down:
self.pan -= 0.1
if pan <= -1 {
self.panState = .up
}
}
audioPlayer?.pan = Float(self.pan)
}
}
/// Prepare Audio player to play
func audioPlayerToPlay() {
audioPlayer?.prepareToPlay()
do {
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default, options: [])
} catch {
print(error.localizedDescription)
}
audioPlayer?.play()
}
}

How do I record audio and save it to Sandbox?

I am working on application that records audio from the user, and displays all earlier recordings made from this application on another screen in a table view. On clicking a particular row, the recording must be played. How do I achieve this and are there any resources to help me out with the same?
My code currently saves the recording and plays it on the same screen. However, new recordings overwrite earlier ones and only one recording is saved into the file manager.
I have added the "Privacy - Microphone usage required" to the plist.
The audio is successfully recorded and played.
import UIKit
import AVFoundation
import MobileCoreServices
class ViewController: UIViewController, AVAudioRecorderDelegate, AVAudioPlayerDelegate{
#IBOutlet var recordingTimeLabel: UILabel!
#IBOutlet var record_btn_ref: UIButton!
#IBOutlet var play_btn_ref: UIButton!
//Variables:
var audioRecorder: AVAudioRecorder!
var audioPlayer : AVAudioPlayer!
var meterTimer:Timer!
var isAudioRecordingGranted: Bool!
var isRecording = false
var isPlaying = false
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
//Check recording permission:
check_record_permission()
//Add right bar button:
navigationItem.rightBarButtonItem = UIBarButtonItem(title: "Documents", style: .plain, target: self, action: #selector(OpenDoc))
}
//Button action to start recording:
#IBAction func start_recording(_ sender: UIButton)
{
//If already recording:
if(isRecording)
{
//Stop recording:
finishAudioRecording(success: true)
//Set the title back to "Record":
record_btn_ref.setTitle("Record", for: .normal)
//Enable the play button:
play_btn_ref.isEnabled = true
//Set the value of the variable "isRecording" to false
isRecording = false
}
//If audio was not being recorded:
else
{
//Setup the recorder:
setup_recorder()
//Start recording:
audioRecorder.record()
//Update label every 1 sec:
meterTimer = Timer.scheduledTimer(timeInterval: 0.1, target:self, selector:#selector(self.updateAudioMeter(timer:)), userInfo:nil, repeats:true)
//Set the title of the label to "Stop":
record_btn_ref.setTitle("Stop", for: .normal)
//Enable the play button:
play_btn_ref.isEnabled = false
//Set "isRecording" to true:
isRecording = true
}
}
//Button action for play/pause:
#IBAction func play_recording(_ sender: Any)
{
//If audio is already being played (i.e. it should pause on being clicked again):
if(isPlaying)
{
//Stop audio player
audioPlayer.stop()
//Enable record button:
record_btn_ref.isEnabled = true
//Set the title to "Play"
play_btn_ref.setTitle("Play", for: .normal)
//Set value of "isPlaying" to false:
isPlaying = false
}
//It is not playing (i.e. it should play when button is clicked)
else
{
//If file path exists:
if FileManager.default.fileExists(atPath: getFileUrl().path)
{
//Disable the record button:
record_btn_ref.isEnabled = false
//Set the title of the button to "Pause":
play_btn_ref.setTitle("Pause", for: .normal)
//Prepare to play:
prepare_play()
//Implement play method of audioPlayer:
audioPlayer.play()
//Set variable "isPlaying" to true:
isPlaying = true
}
//If file path doesn't exist:
else
{
display_alert(msg_title: "Error", msg_desc: "Audio file is missing.", action_title: "OK")
}
}
}
//Function that checks permission to record:
func check_record_permission()
{
//Switch record permission instances:
switch AVAudioSession.sharedInstance().recordPermission {
//Case granted:
case AVAudioSessionRecordPermission.granted:
isAudioRecordingGranted = true
break
//Case denied:
case AVAudioSessionRecordPermission.denied:
isAudioRecordingGranted = false
break
//Case not determined, in which case ask for permission:
case AVAudioSessionRecordPermission.undetermined:
AVAudioSession.sharedInstance().requestRecordPermission({ (allowed) in
if allowed {
self.isAudioRecordingGranted = true
} else {
self.isAudioRecordingGranted = false
}
})
break
//Default case:
default:
break
}
}
//Function that gets the directory path:
func getDocumentsDirectory() -> URL
{
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
let documentsDirectory = paths[0]
return documentsDirectory
}
//Function that gets the URL file path:
func getFileUrl() -> URL
{
let filename = "myRecording.m4a"
let filePath = getDocumentsDirectory().appendingPathComponent(filename)
return filePath
}
//Function that sets up the recorder:
func setup_recorder()
{
if isAudioRecordingGranted
{
let session = AVAudioSession.sharedInstance()
do
{
try session.setCategory(AVAudioSession.Category.playAndRecord, options: .defaultToSpeaker)
try session.setActive(true)
let settings = [
AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
AVSampleRateKey: 44100,
AVNumberOfChannelsKey: 2,
AVEncoderAudioQualityKey:AVAudioQuality.high.rawValue
]
audioRecorder = try AVAudioRecorder(url: getFileUrl(), settings: settings)
audioRecorder.delegate = self
audioRecorder.isMeteringEnabled = true
audioRecorder.prepareToRecord()
}
catch let error {
display_alert(msg_title: "Error", msg_desc: error.localizedDescription, action_title: "OK")
}
}
else
{
display_alert(msg_title: "Error", msg_desc: "Don't have access to use your microphone.", action_title: "OK")
}
}
//Objective C function to update text of the timer label:
#objc func updateAudioMeter(timer: Timer)
{
if audioRecorder.isRecording
{
let hr = Int((audioRecorder.currentTime / 60) / 60)
let min = Int(audioRecorder.currentTime / 60)
let sec = Int(audioRecorder.currentTime.truncatingRemainder(dividingBy: 60))
let totalTimeString = String(format: "%02d:%02d:%02d", hr, min, sec)
recordingTimeLabel.text = totalTimeString
audioRecorder.updateMeters()
}
}
//Function for finish audio recording:
func finishAudioRecording(success: Bool)
{
//If recording was successful:
if success
{
audioRecorder.stop()
audioRecorder = nil
meterTimer.invalidate()
print("recorded successfully.")
}
//If recording was not successful:
else
{
display_alert(msg_title: "Error", msg_desc: "Recording failed.", action_title: "OK")
}
}
//Prepare to play:
func prepare_play()
{
do
{
audioPlayer = try AVAudioPlayer(contentsOf: getFileUrl())
audioPlayer.delegate = self
audioPlayer.prepareToPlay()
}
catch{
print("Error")
}
}
//Function for audio record did finish recording:
func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully flag: Bool)
{
if !flag
{
finishAudioRecording(success: false)
}
play_btn_ref.isEnabled = true
}
//If recorded audio was played:
func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool)
{
record_btn_ref.isEnabled = true
play_btn_ref.setTitle("Play", for: .normal)
}
//Function to display alerts:
func display_alert(msg_title : String , msg_desc : String ,action_title : String)
{
let ac = UIAlertController(title: msg_title, message: msg_desc, preferredStyle: .alert)
ac.addAction(UIAlertAction(title: action_title, style: .default)
{
(result : UIAlertAction) -> Void in
_ = self.navigationController?.popViewController(animated: true)
})
present(ac, animated: true)
}
#objc func OpenDoc()
{
let documentPicker = UIDocumentPickerViewController(documentTypes: [kUTTypeMPEG4Audio as String], in: .import)
documentPicker.delegate = self as? UIDocumentPickerDelegate
documentPicker.allowsMultipleSelection = false
present(documentPicker, animated: true, completion: nil)
}
}
My code currently saves the recording and plays it on the same screen. However, new recordings overwrite earlier ones and only one recording is saved into the file manager.
As far as I understood, you're giving the same name for recordings here in this function func getFileUrl().
The easiest way for each recording to have a different name is to add the timestamp to it, for example using Date()timeIntervalSince1970.
func getFileUrl() -> URL
{
let currentTime = Date().timeIntervalSince1970
let filename = "myRecording-\(currentTime).m4a"
let filePath = getDocumentsDirectory().appendingPathComponent(filename)
return filePath
}
And next, your play_recording function should get URL of the recording as a parameter from TableView on another screen.
After going through a few articles and the answer here: https://stackoverflow.com/a/57629598/11830020, I have figured out the answer and it is as below:
The storyboard contains two buttons,(one for record/stop and the other for play/pause) and one label to display the length of the recorded sound.
import UIKit
import AVFoundation
import MobileCoreServices
class ViewController: UIViewController,AVAudioRecorderDelegate,AVAudioPlayerDelegate {
//Variables:
var audioRecorder: AVAudioRecorder!
var player: AVAudioPlayer!
var meterTimer:Timer!
var isAudioRecordingGranted: Bool!
var isRecording = false
var isPlaying = false
var totalTimeString = ""
//IBOutlets:
#IBOutlet var recordingTimeLabel: UILabel!
#IBOutlet var record_btn_ref: UIButton!
#IBOutlet weak var playBtn: UIButton!
//View Did Load:
override func viewDidLoad() {
super.viewDidLoad()
//Check for recording permission:
check_record_permission()
}
//Button action to start recording:
#IBAction func start_recording(_ sender: UIButton)
{
//If already recording:
if(isRecording)
{
//Stop recording:
finishAudioRecording(success: true)
//Set the title back to "Record":
record_btn_ref.setTitle("Record", for: .normal)
//Enable the play button:
playBtn.isEnabled = true
//Set the value of the variable "isRecording" to false
isRecording = false
}
//If audio was not being recorded:
else
{
//Setup the recorder:
setup_recorder()
//Start recording:
audioRecorder.record()
//Update label every 1 sec:
meterTimer = Timer.scheduledTimer(timeInterval: 0.1, target:self, selector:#selector(self.updateAudioMeter(timer:)), userInfo:nil, repeats:true)
//Set the title of the label to "Stop":
record_btn_ref.setTitle("Stop", for: .normal)
//Disable play:
playBtn.isEnabled = false
//Set "isRecording" to true:
isRecording = true
}
}
//Play/pause button action
#IBAction func playBtnAction(_ sender: Any) {
//playSound()
//If audio is already being played (i.e. it should pause on being clicked again):
if(isPlaying)
{
//Stop audio player
player.stop()
//Enable record button:
record_btn_ref.isEnabled = true
//Set the title to "Play"
playBtn.setTitle("Play", for: .normal)
//Set value of "isPlaying" to false:
isPlaying = false
}
//It is not playing (i.e. it should play when button is clicked)
else
{
let filename = "myRecording\(totalTimeString).m4a"
let url = getDocumentsDirectory().appendingPathComponent(filename)
//If file path exists:
if FileManager.default.fileExists(atPath: url.path)
{
//Disable the record button:
record_btn_ref.isEnabled = false
//Set the title of the button to "Pause":
playBtn.setTitle("Pause", for: .normal)
//Prepare to play:
prepare_play()
//Implement play method of audioPlayer:
player.play()
//Set variable "isPlaying" to true:
isPlaying = true
}
//If file path doesn't exist:
else
{
display_alert(msg_title: "Error", msg_desc: "Audio file is missing.", action_title: "OK")
}
}
}
//Recording permissions:
//Function that checks for permission to record:
func check_record_permission()
{
//Switch record permission instances:
switch AVAudioSession.sharedInstance().recordPermission {
//Case granted:
case AVAudioSessionRecordPermission.granted:
isAudioRecordingGranted = true
break
//Case denied:
case AVAudioSessionRecordPermission.denied:
isAudioRecordingGranted = false
break
//Case not determined, in which case ask for permission:
case AVAudioSessionRecordPermission.undetermined:
AVAudioSession.sharedInstance().requestRecordPermission({ (allowed) in
if allowed {
self.isAudioRecordingGranted = true
} else {
self.isAudioRecordingGranted = false
}
})
break
//Default case:
default:
break
}
}
//Function that gets the directory path:
func getDocumentsDirectory() -> URL
{
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
let documentsDirectory = paths[0]
return documentsDirectory
}
//Function that gets the URL file path:
func getFileUrl() -> URL
{
let date = Date()
let calendar = Calendar.current
let hr = calendar.component(.hour, from: date)
let min = calendar.component(.minute, from: date)
let sec = calendar.component(.second, from: date)
totalTimeString = String(format: "%02d.%02d.%02d", hr, min, sec)
let filename = "myRecording\(totalTimeString).m4a"
let filePath = getDocumentsDirectory().appendingPathComponent(filename)
return filePath
}
//Audio recorder functions:
//Function that sets up the recorder:
func setup_recorder()
{
//If access to record:
if isAudioRecordingGranted
{
let session = AVAudioSession.sharedInstance()
do
{
try session.setCategory(AVAudioSession.Category.playAndRecord, options: .defaultToSpeaker)
try session.setActive(true)
let settings = [
AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
AVSampleRateKey: 44100,
AVNumberOfChannelsKey: 2,
AVEncoderAudioQualityKey:AVAudioQuality.high.rawValue
]
audioRecorder = try AVAudioRecorder(url: getFileUrl(), settings: settings)
audioRecorder.delegate = self
audioRecorder.isMeteringEnabled = true
audioRecorder.prepareToRecord()
}
catch let error {
display_alert(msg_title: "Error", msg_desc: error.localizedDescription, action_title: "OK")
}
}
//If permission not granted:
else
{
display_alert(msg_title: "Error", msg_desc: "Don't have access to use your microphone.", action_title: "OK")
}
}
//Function that defines what to do when audio recording is finished successfully/unsuccessfully:
func finishAudioRecording(success: Bool)
{
//If recording was successful:
if success
{
//Stop recording
audioRecorder.stop()
//Reset recorder
audioRecorder = nil
//Invalidate meter timer:
meterTimer.invalidate()
}
//If recording was not successful:
else
{
//Call function to display alert:
display_alert(msg_title: "Error", msg_desc: "Recording failed.", action_title: "OK")
}
}
//Function for audio record did finish recording:
func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully flag: Bool)
{
if !flag
{
//Audio recording was not successful:
finishAudioRecording(success: false)
}
//Enable play button
playBtn.isEnabled = true
}
//Play/pause recorded audio:
//Prepare to play:
func prepare_play()
{
do
{
let filename = "myRecording\(totalTimeString).m4a"
let url = getDocumentsDirectory().appendingPathComponent(filename)
player = try AVAudioPlayer(contentsOf: url)
player.delegate = self
player.prepareToPlay()
}
catch{
print("Error")
}
}
//If recorded audio was played:
func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool)
{
//Enable record button:
record_btn_ref.isEnabled = true
//Set title of play button to Play:
playBtn.setTitle("Play", for: .normal)
}
//Alerts:
//Function to display alerts:
func display_alert(msg_title : String , msg_desc : String ,action_title : String)
{
let ac = UIAlertController(title: msg_title, message: msg_desc, preferredStyle: .alert)
ac.addAction(UIAlertAction(title: action_title, style: .default)
{
(result : UIAlertAction) -> Void in
_ = self.navigationController?.popViewController(animated: true)
})
present(ac, animated: true)
}
//Timer label:
//Objective C function to update text of the timer label:
#objc func updateAudioMeter(timer: Timer)
{
if audioRecorder.isRecording
{
let hr = Int((audioRecorder.currentTime / 60) / 60)
let min = Int(audioRecorder.currentTime / 60)
let sec = Int(audioRecorder.currentTime.truncatingRemainder(dividingBy: 60))
let totalTimeString = String(format: "%02d:%02d:%02d", hr, min, sec)
recordingTimeLabel.text = totalTimeString
audioRecorder.updateMeters()
}
}
}

Metronome ios swift beat visuals lag

I'm trying to create an metronome app by implementing the sample code provided by apple. Everything works fine but i'm seeing an delay in the beat visuals its not properly synchronised with the player time. Here is the sample code provided by apple
let secondsPerBeat = 60.0 / tempoBPM
let samplesPerBeat = Float(secondsPerBeat * Float(bufferSampleRate))
let beatSampleTime: AVAudioFramePosition = AVAudioFramePosition(nextBeatSampleTime)
let playerBeatTime: AVAudioTime = AVAudioTime(sampleTime: AVAudioFramePosition(beatSampleTime), atRate: bufferSampleRate)
// This time is relative to the player's start time.
player.scheduleBuffer(soundBuffer[bufferNumber]!, at: playerBeatTime, options: AVAudioPlayerNodeBufferOptions(rawValue: 0), completionHandler: {
self.syncQueue!.sync() {
self.beatsScheduled -= 1
self.bufferNumber ^= 1
self.scheduleBeats()
}
})
beatsScheduled += 1
if (!playerStarted) {
// We defer the starting of the player so that the first beat will play precisely
// at player time 0. Having scheduled the first beat, we need the player to be running
// in order for nodeTimeForPlayerTime to return a non-nil value.
player.play()
playerStarted = true
}
let callbackBeat = beatNumber
beatNumber += 1
// calculate the beattime for animating the UI based on the playerbeattime.
let nodeBeatTime: AVAudioTime = player.nodeTime(forPlayerTime: playerBeatTime)!
let output: AVAudioIONode = engine.outputNode
let latencyHostTicks: UInt64 = AVAudioTime.hostTime(forSeconds: output.presentationLatency)
//calcualte the final dispatch time which will update the UI in particualr intervals
let dispatchTime = DispatchTime(uptimeNanoseconds: nodeBeatTime.hostTime + latencyHostTicks)**
// Visuals.
DispatchQueue.global(qos: .userInitiated).asyncAfter(deadline: dispatchTime) {
if (self.isPlaying) {
// send current call back beat.
self.delegate!.metronomeTicking!(self, bar: (callbackBeat / 4) + 1, beat: (callbackBeat % 4) + 1)
}
}
}
// my view controller class where i'm showing the beat number
class ViewController: UIViewController ,UIGestureRecognizerDelegate,Metronomedelegate{
#IBOutlet var rhythmlabel: UILabel!
//view did load method
override func viewDidLoad() {
}
//delegate method for getting the beat value from metronome engine and showing in the UI label.
func metronomeTicking(_ metronome: Metronome, bar: Int, beat: Int) {
DispatchQueue.main.async {
print("Playing Beat \(beat)")
//show beat in label
self.rhythmlabel.text = "\(beat)"
}
}
}
I think you are approaching this a bit too complex for no reason. All you really need is to set a DispatchTime when you start the metronome, and fire a function call whenever the DispatchTime is up, update the dispatch time based on the desired frequency, and loop as long as the metronome is enabled.
I prepared a project for you which implements this method so you can play with and use as you see fit: https://github.com/ekscrypto/Swift-Tutorial-Metronome
Good luck!
Metronome.swift
import Foundation
import AVFoundation
class Metronome {
var bpm: Float = 60.0 { didSet {
bpm = min(300.0,max(30.0,bpm))
}}
var enabled: Bool = false { didSet {
if enabled {
start()
} else {
stop()
}
}}
var onTick: ((_ nextTick: DispatchTime) -> Void)?
var nextTick: DispatchTime = DispatchTime.distantFuture
let player: AVAudioPlayer = {
do {
let soundURL = Bundle.main.url(forResource: "metronome", withExtension: "wav")!
let soundFile = try AVAudioFile(forReading: soundURL)
let player = try AVAudioPlayer(contentsOf: soundURL)
return player
} catch {
print("Oops, unable to initialize metronome audio buffer: \(error)")
return AVAudioPlayer()
}
}()
private func start() {
print("Starting metronome, BPM: \(bpm)")
player.prepareToPlay()
nextTick = DispatchTime.now()
tick()
}
private func stop() {
player.stop()
print("Stoping metronome")
}
private func tick() {
guard
enabled,
nextTick <= DispatchTime.now()
else { return }
let interval: TimeInterval = 60.0 / TimeInterval(bpm)
nextTick = nextTick + interval
DispatchQueue.main.asyncAfter(deadline: nextTick) { [weak self] in
self?.tick()
}
player.play(atTime: interval)
onTick?(nextTick)
}
}
ViewController.swift
import UIKit
class ViewController: UIViewController {
#IBOutlet weak var bpmLabel: UILabel!
#IBOutlet weak var tickLabel: UILabel!
let myMetronome = Metronome()
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
myMetronome.onTick = { (nextTick) in
self.animateTick()
}
updateBpm()
}
private func animateTick() {
tickLabel.alpha = 1.0
UIView.animate(withDuration: 0.35) {
self.tickLabel.alpha = 0.0
}
}
#IBAction func startMetronome(_: Any?) {
myMetronome.enabled = true
}
#IBAction func stopMetronome(_: Any?) {
myMetronome.enabled = false
}
#IBAction func increaseBpm(_: Any?) {
myMetronome.bpm += 1.0
updateBpm()
}
#IBAction func decreaseBpm(_: Any?) {
myMetronome.bpm -= 1.0
updateBpm()
}
private func updateBpm() {
let metronomeBpm = Int(myMetronome.bpm)
bpmLabel.text = "\(metronomeBpm)"
}
}
Note: There seems to be a pre-loading issue, the prepareToPlay() doesn't fully load the audio file before playing and it causes some timing issue with the first playback of the tick audio file. This issue will be left to the reader to figure out. The original question being synchronization, this should be demonstrated in the code above.

Swift timer won't work after invalidating and reinitiating

I'm using Timer() for indicating current audio position in audio player
func startTimer() {
print("PlayerController: startTimer()")
if itemPlayTimer != nil {
return
}
itemPlayTimer = Timer.scheduledTimer(timeInterval: 0.001,
target: self,
selector: #selector(updateItemPlayerTimer),
userInfo: nil,
repeats: true)
}
#objc func updateItemPlayerTimer() {
guard let currentTime = player?.currentTime else {
return
}
updateTimeDescription?(currentTime)
}
when user pause player app invalidating timer
func stopTimer() {
itemPlayTimer?.invalidate()
itemPlayTimer = nil
}
But after calling startTimer() again selector won't call
The reason was that the timer starts executing in other thread, not in main thread.
change your functions in this way:
func startTimer() {
print("PlayerController: startTimer()")
if itemPlayTimer == nil {
itemPlayTimer = Timer.scheduledTimer(timeInterval: 0.001,
target: self,
selector: #selector(updateItemPlayerTimer),
userInfo: nil,
repeats: true)
}
}
#objc func updateItemPlayerTimer() {
guard let currentTime = player?.currentTime else {
return
}
updateTimeDescription?(currentTime)
}
func stopTimer() {
if itemPlayTimer != nil {
itemPlayTimer?.invalidate()
itemPlayTimer = nil
}
}
Use the following code. I have used An AVPlayer with a sample Video to demonstrate pausing/playing with Timer. Logic is same for audio player.
Just replace AVPlayer with your audioPlayer.
Key logic here is to properly manage the state of Playing/not playing and checking timer for nil etc.
As indicated in this Answer
startTimer() starts the timer only if it's nil and stopTimer() stops
it only if it's not nil.
You have only to take care of stopping the timer before
creating/starting a new one.
I have implemented this in a sample project and is working 100%.
Carefully See the function pauseTapped(_ sender: UIButton)
See sample gif At end of Code
import UIKit
import AVKit
class TimerVC: UIViewController {
///A container view for displaying the AVPlayer.
#IBOutlet weak var playerView: UIView!
/// A button to play and pause the video
#IBOutlet weak var btnPause: UIButton!
///to maintain the status of AVPlayer playing or not
var flagPlaying = true
///An AVPlayer for displaying and playing the video
var player: AVPlayer?
///to show the current time to user
#IBOutlet weak var lblCurrentTime: UILabel!
override func viewDidLoad() {
super.viewDidLoad()
//add an AVPlayer with sample URL link
addVideoPlayer( playerView: playerView)
}
///Timer
var itemPlayTimer: Timer?
#objc func startTimer() {
if itemPlayTimer != nil {
return
}
itemPlayTimer = Timer.scheduledTimer(timeInterval: 0.001,
target: self,
selector: #selector(updateItemPlayerTimer),
userInfo: nil,
repeats: true)
}
///update time label
#objc func updateItemPlayerTimer() {
guard let currentTime = player?.currentTime else {
return
}
updateTimeDescription(currentTime())
}
func updateTimeDescription( _ currentTime :CMTime ) {
self.lblCurrentTime.text = "\(currentTime.seconds)"
}
///To Pause and play the video
#IBAction func pauseTapped(_ sender: UIButton) {
if flagPlaying {
//pause
if itemPlayTimer != nil {
player?.pause()
itemPlayTimer?.invalidate()
itemPlayTimer = nil
flagPlaying = false
DispatchQueue.main.async {
self.btnPause.setTitle("Play", for: .normal)
}
}
}else {
//not playing
if itemPlayTimer == nil {
player?.play()
startTimer()
flagPlaying = true
DispatchQueue.main.async {
self.btnPause.setTitle("Pause", for: .normal)
}
}
}
}
private func addVideoPlayer(playerView: UIView) {
//let playerItem = AVPlayerItem(asset: asset)
player = AVPlayer.init(url: URL.init(string: "http://techslides.com/demos/sample-videos/small.mp4")!)
let layer: AVPlayerLayer = AVPlayerLayer(player: player)
layer.backgroundColor = UIColor.white.cgColor
layer.frame = CGRect(x: 0, y: 0, width: playerView.frame.width, height: playerView.frame.height)
layer.videoGravity = AVLayerVideoGravity.resizeAspectFill
playerView.layer.sublayers?.forEach({$0.removeFromSuperlayer()})
playerView.layer.addSublayer(layer)
flagPlaying = true
player?.play()
startTimer()
}
}
Working Example
Let me know if you need any help

CXCallObserver is not working properly and App getting crash when running the app more than one (when includes contacts image data)

I am facing two major problem first one is :
1. I am trying to detect incoming call, outgoing call , dialing call for this i am using this code :
import UIKit
import CoreTelephony
import CallKit
class ViewController: UIViewController,CXCallObserverDelegate {
let callObserver = CXCallObserver()
var seconds = 0
var timer = Timer()
override func viewDidLoad() {
super.viewDidLoad()
callObserver.setDelegate(self, queue: nil)
}
override func viewWillAppear(_ animated: Bool) {
print("viewWillAppear \(seconds)")
}
fileprivate func runTimer(){
timer = Timer.scheduledTimer(timeInterval: 1, target: self, selector: #selector(self.updateTimer), userInfo: nil, repeats: true)
}
func updateTimer() {
seconds += 1
print("Seconds \(seconds)")
}
#IBAction func callButton(_ sender: UIButton) {
if let url = URL(string: "tel://\(12345879)"){
UIApplication.shared.open(url, options: [:], completionHandler: nil)
}
}
func callObserver(_ callObserver: CXCallObserver, callChanged call: CXCall) {
if call.hasEnded == true {
print("Disconnected")
seconds = 0
self.timer.invalidate()
}
if call.isOutgoing == true && call.hasConnected == false {
print("Dialing call")
self.runTimer()
}
if call.isOutgoing == false && call.hasConnected == false && call.hasEnded == false {
print("Incoming")
}
if call.hasConnected == true && call.hasEnded == false {
print("Connected")
}
}
}
It working fine when i am dialing a number it shows "Dialling" but when i cut the call then it shows "Disconnected" then again "Dialing" State.
Another problem is when i am fetching all contacts information from the device it works fine when i am not fetching imageData but when i am fetching contacts image it works fine for the very first time . Then if i run it again app become slow . then next it crash shows found nil while unwrapping a value.
i wrote my contact data fetching function in AppDelegate . it is calling when the app start . this is the code :
func fetchContactList(){
let loginInformation = LoginInformation()
var contactModelData: [ContactsModel] = []
var profileImage : UIImage?
let store = CNContactStore()
store.requestAccess(for: .contacts, completionHandler: {
granted, error in
guard granted else {
let alert = UIAlertController(title: "Can't access contact", message: "Please go to Settings -> MyApp to enable contact permission", preferredStyle: .alert)
alert.addAction(UIAlertAction(title: "OK", style: .default, handler: nil))
self.window?.rootViewController?.present(alert, animated: true, completion: nil)
return
}
let keysToFetch = [CNContactFormatter.descriptorForRequiredKeys(for: .fullName),CNContactPhoneNumbersKey, CNContactEmailAddressesKey, CNContactPostalAddressesKey, CNContactImageDataKey, CNContactImageDataAvailableKey,CNContactThumbnailImageDataKey,CNContactThumbnailImageDataKey] as [Any]
let request = CNContactFetchRequest(keysToFetch: keysToFetch as! [CNKeyDescriptor])
var cnContacts = [CNContact]()
do {
try store.enumerateContacts(with: request){
(contact, cursor) -> Void in
cnContacts.append(contact)
}
} catch let error {
NSLog("Fetch contact error: \(error)")
}
for contact in cnContacts {
let fullName = CNContactFormatter.string(from: contact, style: .fullName) ?? "No Name"
var phoneNumberUnclean : String?
var labelofContact : String?
var phoneNumberClean: String?
for phoneNumber in contact.phoneNumbers {
if let number = phoneNumber.value as? CNPhoneNumber,
let label = phoneNumber.label {
let localizedLabel = CNLabeledValue<CNPhoneNumber>.localizedString(forLabel: label)
print("fullname \(fullName), localized \(localizedLabel), number \(number.stringValue)")
phoneNumberUnclean = number.stringValue
labelofContact = localizedLabel
}
}
if let imageData = contact.imageData {
profileImage = UIImage(data: imageData)
print("image \(String(describing: UIImage(data: imageData)))")
} else {
profileImage = UIImage(named: "user")
}
self.contactModelData.append(ContactsModel(contactName: fullName, contactNumber:phoneNumberUnclean!, contactLabel: labelofContact!, contactImage: profileImage!, contactNumberClean: phoneNumberUnclean!))
}
self.loginInformation.saveContactData(allContactData: self.contactModelData)
})
}
I have solved this two problems using this :
for number one when i disconnect a call then if unfortunately it goes to "Dialling" option again i checked the "seconds" variable's value if it greater than 0 in "Dialing" then invalidate the thread.
for number two problem :
I used Dispatch.async.main background thread and take the thumbnail image

Resources