Swift: Trying to control time in AVAudioPlayerNode using UISlider - ios

I'm using an AVAudioPlayerNode attached to an AVAudioEngine to play a sound.
to get the current time of the player I'm doing this:
extension AVAudioPlayerNode {
var currentTime: TimeInterval {
get {
if let nodeTime: AVAudioTime = self.lastRenderTime, let playerTime: AVAudioTime = self.playerTime(forNodeTime: nodeTime) {
return Double(playerTime.sampleTime) / playerTime.sampleRate
}
return 0
}
}
}
I have a slider that indicates the current time of the audio. When the user changes the slider value, on .ended event I have to change the current time of the player to that indicated in the slider.
To do so:
extension AVAudioPlayerNode {
func seekTo(value: Float, audioFile: AVAudioFile, duration: Float) {
if let nodetime = self.lastRenderTime{
let playerTime: AVAudioTime = self.playerTime(forNodeTime: nodetime)!
let sampleRate = self.outputFormat(forBus: 0).sampleRate
let newsampletime = AVAudioFramePosition(Int(sampleRate * Double(value)))
let length = duration - value
let framestoplay = AVAudioFrameCount(Float(playerTime.sampleRate) * length)
self.stop()
if framestoplay > 1000 {
self.scheduleSegment(audioFile, startingFrame: newsampletime, frameCount: framestoplay, at: nil,completionHandler: nil)
}
}
self.play()
}
However, my function seekTo is not working correctly(I'm printing currentTime before and after the function and it shows always a negative value ~= -0.02). What is the wrong thing I'm doing and can I find a simpler way to change the currentTime of the player?

I ran into same issue. Apparently the framestoplay was always 0, which happened because of sampleRate. The value for playerTime.sampleRate was always 0 in my case.
So,
let framestoplay = AVAudioFrameCount(Float(playerTime.sampleRate) * length)
must be replaced with
let framestoplay = AVAudioFrameCount(Float(sampleRate) * length)

Related

Swift - How to get the current position of AVAudioPlayerNode while it's looping?

I have an AVAudioPlayerNode looping a segment of a song:
audioPlayer.scheduleBuffer(segment, at: nil, options:.loops)
I want to get current position of the song while it's playing. Usually, this is done by calculating = currentFrame / audioSampleRate
where
var currentFrame: AVAudioFramePosition {
guard let lastRenderTime = audioPlayer.lastRenderTime,
let playerTime = audioPlayer.playerTime(forNodeTime: lastRenderTime) else {
return 0
}
return playerTime.sampleTime
}
However, when the loop ends and restarts, the currentFrame does not restart. But it still increases which makes currentFrame / audioSampleRate incorrect as the current position.
So what is the correct way to calculate the current position?
Good old modulo will do the job:
public var currentTime: TimeInterval {
guard let nodeTime = player.lastRenderTime,
let playerTime = player.playerTime(forNodeTime: nodeTime) else {
return 0
}
let time = (Double(playerTime.sampleTime) / playerTime.sampleRate)
.truncatingRemainder(dividingBy: Double(file.length) / Double(playerTime.sampleRate))
return time
}

Equivalent of getBufferedPosition() in exoplayer for iOS AVPlayer

I am currently developing an avplayer app which calculates HLS streaming metrics. I wanted to get buffer level for the current item.
private var availableDuration: Double {
guard let timeRange = player.currentItem?.loadedTimeRanges.first?.timeRangeValue else {
return 0.0
}
let startSeconds = timeRange.start.seconds
let durationSeconds = timeRange.duration.seconds
return startSeconds + durationSeconds
}
I am a little confused in the terminology used in apple documentations.
Here i am getting availableDuration of the current item but i am not sure if this represents the buffer level of the current item.
Your code seems ok. I used same
var bufferInSeconds: Double {
guard let range = self.loadedTimeRanges.first?.timeRangeValue else {
return 0.0
}
let sec = range.start.seconds + range.duration.seconds
return sec >= 0 ? sec : 0
}

Set left and right headphone volume using two different sliders

I am generating a wave sound for different frequencies and user should hear this wave sound using headphones only and he/she will set left and right headphone volumes using two different sliders. To achieve wave sound I wrote below code which works perfect.
But problem is: From last 5 days I am trying to set volume for left and right headphones separately, but no luck.
class Synth {
// MARK: Properties
public static let shared = Synth()
public var volume: Float {
set {
audioEngine.mainMixerNode.outputVolume = newValue
}
get {
audioEngine.mainMixerNode.outputVolume
}
}
public var frequencyRampValue: Float = 0
public var frequency: Float = 440 {
didSet {
if oldValue != 0 {
frequencyRampValue = frequency - oldValue
} else {
frequencyRampValue = 0
}
}
}
private var audioEngine: AVAudioEngine
private lazy var sourceNode = AVAudioSourceNode { _, _, frameCount, audioBufferList in
let ablPointer = UnsafeMutableAudioBufferListPointer(audioBufferList)
let localRampValue = self.frequencyRampValue
let localFrequency = self.frequency - localRampValue
let period = 1 / localFrequency
for frame in 0..<Int(frameCount) {
let percentComplete = self.time / period
let sampleVal = self.signal(localFrequency + localRampValue * percentComplete, self.time)
self.time += self.deltaTime
self.time = fmod(self.time, period)
for buffer in ablPointer {
let buf: UnsafeMutableBufferPointer<Float> = UnsafeMutableBufferPointer(buffer)
buf[frame] = sampleVal
}
}
self.frequencyRampValue = 0
return noErr
}
private var time: Float = 0
private let sampleRate: Double
private let deltaTime: Float
private var signal: Signal
// MARK: Init
init(signal: #escaping Signal = Oscillator.square) {
audioEngine = AVAudioEngine()
let mainMixer = audioEngine.mainMixerNode
let outputNode = audioEngine.outputNode
let format = outputNode.inputFormat(forBus: 0)
sampleRate = format.sampleRate
deltaTime = 1 / Float(sampleRate)
self.signal = signal
let inputFormat = AVAudioFormat(commonFormat: format.commonFormat,
sampleRate: format.sampleRate,
channels: 1,
interleaved: format.isInterleaved)
audioEngine.attach(sourceNode)
audioEngine.connect(sourceNode, to: mainMixer, format: inputFormat)
audioEngine.connect(mainMixer, to: outputNode, format: nil)
mainMixer.outputVolume = 0
audioEngine.mainMixerNode.pan = 100 // this does not work,
//audioEngine.mainMixerNode.pan = 1.0 // this also does not work
do {
try audioEngine.start()
} catch {
print("Could not start engine: \(error.localizedDescription)")
}
}
//This function will be called in view controller to generate sound
public func setWaveformTo(_ signal: #escaping Signal) {
self.signal = signal
}
}
With the above code I can hear the wave sound as normal in left and right headphone.
I tried to use audioEngine.mainMixerNode.pan for value 100 and -100 also -1.0 and 1.0 but this did not make any change.
I tried to use audioEngine.mainMixerNode.pan for value 100 and -100 but this did not make any change.
The allowable range for the pan value is {-1.0, 1.0}. The values that you say you used are outside that range, so it's not surprising that they had no effect. Try 0.75 or -0.75 instead.

iOS Adjust Pitch Whilst Playing via AVAudioUnitTimePitch

I’m trying to get some audio to be able to have the pitch adjusted whilst playing. I’m very new to Swift and iOS, but my initial attempt was to just change timePitchNode.pitch whilst it was playing; however, it wouldn’t update whilst playing. My current attempt is to reset audioEngine, and have it just resume from where it was playing (below). How do I determine where the audio currently is, and how do I get it to resume from there?
var audioFile: AVAudioFile?
var audioEngine: AVAudioEngine?
var audioPlayerNode: AVAudioPlayerNode?
var pitch: Int = 1 {
didSet {
playResumeAudio()
}
}
…
func playResumeAudio() {
var currentTime: AVAudioTime? = nil
if audioPlayerNode != nil {
let nodeTime = audioPlayerNode!.lastRenderTime!
currentTime = audioPlayerNode!.playerTimeForNodeTime(nodeTime)
}
if audioEngine != nil {
audioEngine!.stop()
audioEngine!.reset()
}
audioEngine = AVAudioEngine()
audioPlayerNode = AVAudioPlayerNode()
audioEngine!.attachNode(audioPlayerNode!)
let timePitchNode = AVAudioUnitTimePitch()
timePitchNode.pitch = Float(pitch * 100)
timePitchNode.rate = rate
audioEngine!.attachNode(timePitchNode)
audioEngine!.connect(audioPlayerNode!, to: timePitchNode, format: nil)
audioEngine!.connect(timePitchNode, to: audioEngine!.outputNode, format: nil)
audioPlayerNode!.scheduleFile(audioFile!, atTime: nil, completionHandler: nil)
let _ = try? audioEngine?.start()
audioPlayerNode!.playAtTime(currentTime)
}
I was being dumb apparently. You can modify the pitch during playback, and it does update. No need to reset any audio, just mutate the node as it’s playing, and it’ll work.

How do I get current playing time and total play time in AVPlayer?

Is it possible get playing time and total play time in AVPlayer? If yes, how can I do this?
You can access currently played item by using currentItem property:
AVPlayerItem *currentItem = yourAVPlayer.currentItem;
Then you can easily get the requested time values
CMTime duration = currentItem.duration; //total time
CMTime currentTime = currentItem.currentTime; //playing time
Swift 5:
if let currentItem = player.currentItem {
let duration = CMTimeGetSeconds(currentItem.duration)
let currentTime = CMTimeGetSeconds(currentItem.currentTime())
print("Duration: \(duration) s")
print("Current time: \(currentTime) s")
}
_audioPlayer = [self playerWithAudio:_audio];
_observer =
[_audioPlayer addPeriodicTimeObserverForInterval:CMTimeMake(1, 2)
queue:dispatch_get_main_queue()
usingBlock:^(CMTime time)
{
_progress = CMTimeGetSeconds(time);
}];
Swift 3
let currentTime:Double = player.currentItem.currentTime().seconds
You can get the seconds of your current time by accessing the seconds property of the currentTime(). This will return a Double that represents the seconds in time. Then you can use this value to construct a readable time to present to your user.
First, include a method to return the time variables for H:mm:ss that you will display to the user:
func getHoursMinutesSecondsFrom(seconds: Double) -> (hours: Int, minutes: Int, seconds: Int) {
let secs = Int(seconds)
let hours = secs / 3600
let minutes = (secs % 3600) / 60
let seconds = (secs % 3600) % 60
return (hours, minutes, seconds)
}
Next, a method that will convert the values you retrieved above into a readable string:
func formatTimeFor(seconds: Double) -> String {
let result = getHoursMinutesSecondsFrom(seconds: seconds)
let hoursString = "\(result.hours)"
var minutesString = "\(result.minutes)"
if minutesString.characters.count == 1 {
minutesString = "0\(result.minutes)"
}
var secondsString = "\(result.seconds)"
if secondsString.characters.count == 1 {
secondsString = "0\(result.seconds)"
}
var time = "\(hoursString):"
if result.hours >= 1 {
time.append("\(minutesString):\(secondsString)")
}
else {
time = "\(minutesString):\(secondsString)"
}
return time
}
Now, update the UI with the previous calculations:
func updateTime() {
// Access current item
if let currentItem = player.currentItem {
// Get the current time in seconds
let playhead = currentItem.currentTime().seconds
let duration = currentItem.duration.seconds
// Format seconds for human readable string
playheadLabel.text = formatTimeFor(seconds: playhead)
durationLabel.text = formatTimeFor(seconds: duration)
}
}
With Swift 4.2, use this;
let currentPlayer = AVPlayer()
if let currentItem = currentPlayer.currentItem {
let duration = currentItem.asset.duration
}
let currentTime = currentPlayer.currentTime()
Swift 4
self.playerItem = AVPlayerItem(url: videoUrl!)
self.player = AVPlayer(playerItem: self.playerItem)
self.player?.addPeriodicTimeObserver(forInterval: CMTimeMakeWithSeconds(1, 1), queue: DispatchQueue.main, using: { (time) in
if self.player!.currentItem?.status == .readyToPlay {
let currentTime = CMTimeGetSeconds(self.player!.currentTime())
let secs = Int(currentTime)
self.timeLabel.text = NSString(format: "%02d:%02d", secs/60, secs%60) as String//"\(secs/60):\(secs%60)"
})
}
AVPlayerItem *currentItem = player.currentItem;
NSTimeInterval currentTime = CMTimeGetSeconds(currentItem.currentTime);
NSLog(#" Capturing Time :%f ",currentTime);
Swift:
let currentItem = yourAVPlayer.currentItem
let duration = currentItem.asset.duration
var currentTime = currentItem.asset.currentTime
Swift 5:
Timer.scheduledTimer seems better than addPeriodicTimeObserver if you want to have a smooth progress bar
static public var currenTime = 0.0
static public var currenTimeString = "00:00"
Timer.scheduledTimer(withTimeInterval: 1/60, repeats: true) { timer in
if self.player!.currentItem?.status == .readyToPlay {
let timeElapsed = CMTimeGetSeconds(self.player!.currentTime())
let secs = Int(timeElapsed)
self.currenTime = timeElapsed
self.currenTimeString = NSString(format: "%02d:%02d", secs/60, secs%60) as String
print("AudioPlayer TIME UPDATE: \(self.currenTime) \(self.currenTimeString)")
}
}
Swift 4.2:
let currentItem = yourAVPlayer.currentItem
let duration = currentItem.asset.duration
let currentTime = currentItem.currentTime()
in swift 5+
You can query the player directly to find the current time of the actively playing AVPlayerItem.
The time is stored in a CMTime Struct for ease of conversion to various scales such as 10th of sec, 100th of a sec etc
In most cases we need to represent times in seconds so the following will show you what you want
let currentTimeInSecs = CMTimeGetSeconds(player.currentTime())

Resources