Metronome ios swift beat visuals lag - ios

I'm trying to create an metronome app by implementing the sample code provided by apple. Everything works fine but i'm seeing an delay in the beat visuals its not properly synchronised with the player time. Here is the sample code provided by apple
let secondsPerBeat = 60.0 / tempoBPM
let samplesPerBeat = Float(secondsPerBeat * Float(bufferSampleRate))
let beatSampleTime: AVAudioFramePosition = AVAudioFramePosition(nextBeatSampleTime)
let playerBeatTime: AVAudioTime = AVAudioTime(sampleTime: AVAudioFramePosition(beatSampleTime), atRate: bufferSampleRate)
// This time is relative to the player's start time.
player.scheduleBuffer(soundBuffer[bufferNumber]!, at: playerBeatTime, options: AVAudioPlayerNodeBufferOptions(rawValue: 0), completionHandler: {
self.syncQueue!.sync() {
self.beatsScheduled -= 1
self.bufferNumber ^= 1
self.scheduleBeats()
}
})
beatsScheduled += 1
if (!playerStarted) {
// We defer the starting of the player so that the first beat will play precisely
// at player time 0. Having scheduled the first beat, we need the player to be running
// in order for nodeTimeForPlayerTime to return a non-nil value.
player.play()
playerStarted = true
}
let callbackBeat = beatNumber
beatNumber += 1
// calculate the beattime for animating the UI based on the playerbeattime.
let nodeBeatTime: AVAudioTime = player.nodeTime(forPlayerTime: playerBeatTime)!
let output: AVAudioIONode = engine.outputNode
let latencyHostTicks: UInt64 = AVAudioTime.hostTime(forSeconds: output.presentationLatency)
//calcualte the final dispatch time which will update the UI in particualr intervals
let dispatchTime = DispatchTime(uptimeNanoseconds: nodeBeatTime.hostTime + latencyHostTicks)**
// Visuals.
DispatchQueue.global(qos: .userInitiated).asyncAfter(deadline: dispatchTime) {
if (self.isPlaying) {
// send current call back beat.
self.delegate!.metronomeTicking!(self, bar: (callbackBeat / 4) + 1, beat: (callbackBeat % 4) + 1)
}
}
}
// my view controller class where i'm showing the beat number
class ViewController: UIViewController ,UIGestureRecognizerDelegate,Metronomedelegate{
#IBOutlet var rhythmlabel: UILabel!
//view did load method
override func viewDidLoad() {
}
//delegate method for getting the beat value from metronome engine and showing in the UI label.
func metronomeTicking(_ metronome: Metronome, bar: Int, beat: Int) {
DispatchQueue.main.async {
print("Playing Beat \(beat)")
//show beat in label
self.rhythmlabel.text = "\(beat)"
}
}
}

I think you are approaching this a bit too complex for no reason. All you really need is to set a DispatchTime when you start the metronome, and fire a function call whenever the DispatchTime is up, update the dispatch time based on the desired frequency, and loop as long as the metronome is enabled.
I prepared a project for you which implements this method so you can play with and use as you see fit: https://github.com/ekscrypto/Swift-Tutorial-Metronome
Good luck!
Metronome.swift
import Foundation
import AVFoundation
class Metronome {
var bpm: Float = 60.0 { didSet {
bpm = min(300.0,max(30.0,bpm))
}}
var enabled: Bool = false { didSet {
if enabled {
start()
} else {
stop()
}
}}
var onTick: ((_ nextTick: DispatchTime) -> Void)?
var nextTick: DispatchTime = DispatchTime.distantFuture
let player: AVAudioPlayer = {
do {
let soundURL = Bundle.main.url(forResource: "metronome", withExtension: "wav")!
let soundFile = try AVAudioFile(forReading: soundURL)
let player = try AVAudioPlayer(contentsOf: soundURL)
return player
} catch {
print("Oops, unable to initialize metronome audio buffer: \(error)")
return AVAudioPlayer()
}
}()
private func start() {
print("Starting metronome, BPM: \(bpm)")
player.prepareToPlay()
nextTick = DispatchTime.now()
tick()
}
private func stop() {
player.stop()
print("Stoping metronome")
}
private func tick() {
guard
enabled,
nextTick <= DispatchTime.now()
else { return }
let interval: TimeInterval = 60.0 / TimeInterval(bpm)
nextTick = nextTick + interval
DispatchQueue.main.asyncAfter(deadline: nextTick) { [weak self] in
self?.tick()
}
player.play(atTime: interval)
onTick?(nextTick)
}
}
ViewController.swift
import UIKit
class ViewController: UIViewController {
#IBOutlet weak var bpmLabel: UILabel!
#IBOutlet weak var tickLabel: UILabel!
let myMetronome = Metronome()
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
myMetronome.onTick = { (nextTick) in
self.animateTick()
}
updateBpm()
}
private func animateTick() {
tickLabel.alpha = 1.0
UIView.animate(withDuration: 0.35) {
self.tickLabel.alpha = 0.0
}
}
#IBAction func startMetronome(_: Any?) {
myMetronome.enabled = true
}
#IBAction func stopMetronome(_: Any?) {
myMetronome.enabled = false
}
#IBAction func increaseBpm(_: Any?) {
myMetronome.bpm += 1.0
updateBpm()
}
#IBAction func decreaseBpm(_: Any?) {
myMetronome.bpm -= 1.0
updateBpm()
}
private func updateBpm() {
let metronomeBpm = Int(myMetronome.bpm)
bpmLabel.text = "\(metronomeBpm)"
}
}
Note: There seems to be a pre-loading issue, the prepareToPlay() doesn't fully load the audio file before playing and it causes some timing issue with the first playback of the tick audio file. This issue will be left to the reader to figure out. The original question being synchronization, this should be demonstrated in the code above.

Related

Picture in Picture from AVSampleBufferDisplayLayer not loading

I'm trying to support Picture in Picture for my iOS app and I need to display the content of a view, not a video. So I tried to use a library to record a view and show the video in a AVSampleBufferDisplayLayer. It works, the content of the view is displayed in the buffer display layer, but when I try to use PIP, only a loading indicator is shown. Here is my code:
import UIKit
import AVKit
class View: UIView {
override class var layerClass: AnyClass {
AVSampleBufferDisplayLayer.self
}
}
class ViewController: UIViewController, AVPictureInPictureSampleBufferPlaybackDelegate {
func pictureInPictureController(_ pictureInPictureController: AVPictureInPictureController, setPlaying playing: Bool) {
}
func pictureInPictureControllerTimeRangeForPlayback(_ pictureInPictureController: AVPictureInPictureController) -> CMTimeRange {
.init(start: .zero, duration: self.buffers.first?.duration ?? .indefinite)
}
func pictureInPictureControllerIsPlaybackPaused(_ pictureInPictureController: AVPictureInPictureController) -> Bool {
false
}
func pictureInPictureController(_ pictureInPictureController: AVPictureInPictureController, didTransitionToRenderSize newRenderSize: CMVideoDimensions) {
}
func pictureInPictureController(_ pictureInPictureController: AVPictureInPictureController, skipByInterval skipInterval: CMTime, completion completionHandler: #escaping () -> Void) {
}
#IBOutlet weak var playerView: View!
#IBOutlet weak var textView: UITextView!
var pipController: AVPictureInPictureController?
var glimpse: Glimpse!
var isRecording = false
var buffers = [CMSampleBuffer]()
#IBAction func pip() {
pipController?.startPictureInPicture()
}
func startRecording() {
glimpse = Glimpse()
glimpse.startRecording(textView, withCallback: { url in
if let url = url {
do {
DispatchQueue.main.async {
(self.playerView.layer as! AVSampleBufferDisplayLayer).flush()
if self.pipController == nil {
self.pipController = AVPictureInPictureController(contentSource: .init(sampleBufferDisplayLayer: self.playerView.layer as! AVSampleBufferDisplayLayer, playbackDelegate: self))
self.pipController?.requiresLinearPlayback = true
}
}
let reader = try AVAssetReader(asset: AVAsset(url: url))
let output = AVAssetReaderTrackOutput(track: reader.asset.tracks.first!, outputSettings: nil)
reader.add(output)
reader.startReading()
while let buffer = output.copyNextSampleBuffer() {
self.buffers.append(buffer)
}
try FileManager.default.removeItem(at: url)
} catch {
print(error)
}
}
})
isRecording = true
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
var i = 0
_ = Timer.scheduledTimer(withTimeInterval: 1, repeats: true, block: { _ in
self.textView.text += "Hello World! (\(i))\n"
if self.isRecording {
self.glimpse.stop()
self.startRecording()
}
i += 1
})
let layer = playerView.layer as! AVSampleBufferDisplayLayer
layer.requestMediaDataWhenReady(on: .global()) {
if let buffer = self.buffers.first {
layer.enqueue(buffer)
self.buffers.remove(at: 0)
}
}
startRecording()
}
}
In this example, I modify the content of a UITextView every second and I record a video of it. Then I extract the CMSampleBuffers to display them in the AVSampleBufferDisplayLayer.
I attached two screenshots, the first shows how the content of the text view is successfully shown in the AVSampleBufferDisplayLayer and the second shows how nothing is displayed when PIP is enabled.
What am I doing wrong?
I have experienced the same behavior when returning incorrect time range for playback. Make sure you return .positiveInfinity for duration otherwise your layer will be covered with the loading indicator.
func pictureInPictureControllerTimeRangeForPlayback(_ pictureInPictureController: AVPictureInPictureController) -> CMTimeRange {
return CMTimeRange(start: .negativeInfinity, duration: .positiveInfinity)
}
Documented here:
https://developer.apple.com/documentation/avkit/avpictureinpicturesamplebufferplaybackdelegate/3750337-pictureinpicturecontrollertimera?changes=la
I have something like this working in Secure ShellFish and it made a difference how the CMSampleBuffers was created.
I had to create the CMSampleBuffer from a CVPixelBuffer that was IOSurface compatible and I had to mark the CMSampleBuffer with kCMSampleAttachmentKey_DisplayImmediately.

how to live a live application screen having camera view with some other UIViews over Camera view

Actually, I want to broadcast a live match with some overlays over it like sponsors images on top corners of the screen and a score card on the bottom of the screen. Can someone help me or guide me on a way of implementation I use this pod (haishinkit) but this pod is not serving the purpose. I use rtmpstream.attachScreen function for broadcasting my UIView but this function is not picking up my camera view (AVCaptureVideoPreviewLayer) other than this scorecard and sponsor images are broadcasting. I want to broadcast my Camera Screen along with Scorecard, other images along with the audio.
import UIKit
import HaishinKit
import AVFoundation
import VideoToolbox
import Loaf
import WebKit
class BroadcastViewController: UIViewController, RTMPStreamDelegate {
// Camera Preview View
#IBOutlet private weak var previewView: UIView!
#IBOutlet weak var videoView: UIView!
// Camera Selector
#IBOutlet weak var cameraSelector: UISegmentedControl!
#IBOutlet weak var webview: WKWebView!
// Go Live Button
#IBOutlet weak var startStopButton: UIButton!
// FPS and Bitrate Labels
#IBOutlet weak var fpsLabel: UILabel!
#IBOutlet weak var bitrateLabel: UILabel!
// RTMP Connection & RTMP Stream
private var rtmpConnection = RTMPConnection()
private var rtmpStream: RTMPStream!
// Default Camera
private var defaultCamera: AVCaptureDevice.Position = .back
// Flag indicates if we should be attempting to go live
private var liveDesired = false
// Reconnect attempt tracker
private var reconnectAttempt = 0
// The RTMP Stream key to broadcast to.
public var streamKey: String!
// The Preset to use
public var preset: Preset!
// A tracker of the last time we changed the bitrate in ABR
private var lastBwChange = 0
// The RTMP endpoint
let rtmpEndpoint = "rtmps://live-api-s.facebook.com:443/rtmp/"
//Camera Capture requiered properties
var videoDataOutput: AVCaptureVideoDataOutput!
var videoDataOutputQueue: DispatchQueue!
var previewLayer:AVCaptureVideoPreviewLayer!
var captureDevice : AVCaptureDevice!
let session = AVCaptureSession()
var isPublic = false
// Some basic presets for live streaming
enum Preset {
case hd_1080p_30fps_5mbps
case hd_720p_30fps_3mbps
case sd_540p_30fps_2mbps
case sd_360p_30fps_1mbps
}
// An encoding profile - width, height, framerate, video bitrate
private class Profile {
public var width : Int = 0
public var height : Int = 0
public var frameRate : Int = 0
public var bitrate : Int = 0
init(width: Int, height: Int, frameRate: Int, bitrate: Int) {
self.width = width
self.height = height
self.frameRate = frameRate
self.bitrate = bitrate
}
}
// Converts a Preset to a Profile
private func presetToProfile(preset: Preset) -> Profile {
switch preset {
case .hd_1080p_30fps_5mbps:
return Profile(width: 1920, height: 1080, frameRate: 30, bitrate: 5000000)
case .hd_720p_30fps_3mbps:
return Profile(width: 1280, height: 720, frameRate: 30, bitrate: 3000000)
case .sd_540p_30fps_2mbps:
return Profile(width: 960, height: 540, frameRate: 30, bitrate: 2000000)
case .sd_360p_30fps_1mbps:
return Profile(width: 640, height: 360, frameRate: 30, bitrate: 1000000)
}
}
// Configures the live stream
private func configureStream(preset: Preset) {
let profile = presetToProfile(preset: preset)
// Configure the capture settings from the camera
rtmpStream.captureSettings = [
.sessionPreset: AVCaptureSession.Preset.hd1920x1080,
.continuousAutofocus: true,
.continuousExposure: true,
.fps: profile.frameRate
]
// Get the orientation of the app, and set the video orientation appropriately
if #available(iOS 13.0, *) {
if let orientation = UIApplication.shared.windows.first?.windowScene?.interfaceOrientation {
// let videoOrientation = DeviceUtil.videoOrientation(by: orientation)
rtmpStream.orientation = .landscapeRight
rtmpStream.videoSettings = [
.width: (orientation.isPortrait) ? profile.height : profile.width,
.height: (orientation.isPortrait) ? profile.width : profile.height,
.bitrate: profile.bitrate,
.profileLevel: kVTProfileLevel_H264_Main_AutoLevel,
.maxKeyFrameIntervalDuration: 2, // 2 seconds
]
}
} else {
// Fallback on earlier versions
}
// Configure the RTMP audio stream
// rtmpStream.audioSettings = [
// .bitrate: 128000 // Always use 128kbps
// ]
}
// Publishes the live stream
private func publishStream() {
print("Calling publish()")
rtmpStream.attachScreen(ScreenCaptureSession(viewToCapture: previewView))
rtmpStream.publish("minestreamkey")
DispatchQueue.main.async {
self.startStopButton.setTitle("Stop Streaming!", for: .normal)
}
}
// Triggers and attempt to connect to an RTMP hostname
private func connectRTMP() {
print("Calling connect()")
rtmpConnection.connect(rtmpEndpoint)
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
// videoView.startSession()
}
override func viewDidLoad() {
super.viewDidLoad()
self.setupAVCapture()
previewView.bringSubviewToFront(webview)
webview.load(NSURLRequest(url: NSURL(string: "https://graphics.crickslab.com/scorecard/0865e840-f147-11eb-95cb-65228ef0512c/Blitzz-vs-Crickslab-Officials-Fri30Jul2021-1201AM-")! as URL) as URLRequest)
print("Broadcast View Controller Init")
print("Stream Key: " + "FB-3940543509404805-0-AbxeU6r48NpFcasH")
// Work out the orientation of the device, and set this on the RTMP Stream
rtmpStream = RTMPStream(connection: rtmpConnection)
// Get the orientation of the app, and set the video orientation appropriately
if #available(iOS 13.0, *) {
if let orientation = UIApplication.shared.windows.first?.windowScene?.interfaceOrientation {
let videoOrientation = DeviceUtil.videoOrientation(by: orientation)
rtmpStream.orientation = videoOrientation!
}
} else {
// Fallback on earlier versions
}
// And a listener for orientation changes
// Note: Changing the orientation once the stream has been started will not change the orientation of the live stream, only the preview.
NotificationCenter.default.addObserver(self, selector: #selector(on(_:)), name: UIDevice.orientationDidChangeNotification, object: nil)
// Configure the encoder profile
configureStream(preset: self.preset)
// Attatch to the default audio device
// rtmpStream.attachAudio(AVCaptureDevice.default(for: .audio)) { error in
// print(error.description)
// }
//
// // Attatch to the default camera
// rtmpStream.attachCamera(DeviceUtil.device(withPosition: defaultCamera)) { error in
// print(error.description)
// }
// Register a tap gesture recogniser so we can use tap to focus
let tap = UITapGestureRecognizer(target: self, action: #selector(self.handleTap(_:)))
previewView.addGestureRecognizer(tap)
previewView.isUserInteractionEnabled = true
// Attatch the preview view
// previewView?.attachStream(rtmpStream)
// Add event listeners for RTMP status changes and IO Errors
rtmpConnection.addEventListener(.rtmpStatus, selector: #selector(rtmpStatusHandler), observer: self)
rtmpConnection.addEventListener(.ioError, selector: #selector(rtmpErrorHandler), observer: self)
rtmpStream.delegate = self
startStopButton.setTitle("Go Live!", for: .normal)
}
// 👉📱 Tap to focus / exposure
#objc func handleTap(_ sender: UITapGestureRecognizer) {
if sender.state == UIGestureRecognizer.State.ended {
let point = sender.location(in: previewView)
let pointOfInterest = CGPoint(x: point.x / previewView.bounds.size.width, y: point.y / previewView.bounds.size.height)
rtmpStream.setPointOfInterest(pointOfInterest, exposure: pointOfInterest)
}
}
// Triggered when the user tries to change camera
#IBAction func changeCameraToggle(_ sender: UISegmentedControl) {
switch cameraSelector.selectedSegmentIndex
{
case 0:
rtmpStream.attachCamera(DeviceUtil.device(withPosition: AVCaptureDevice.Position.back))
case 1:
rtmpStream.attachCamera(DeviceUtil.device(withPosition: AVCaptureDevice.Position.front))
default:
rtmpStream.attachCamera(DeviceUtil.device(withPosition: defaultCamera))
}
}
// Triggered when the user taps the go live button
#IBAction func goLiveButton(_ sender: UIButton) {
print("Go Live Button tapped!")
if !liveDesired {
if rtmpConnection.connected {
// If we're already connected to the RTMP server, wr can just call publish() to start the stream
publishStream()
} else {
// Otherwise, we need to setup the RTMP connection and wait for a callback before we can safely
// call publish() to start the stream
connectRTMP()
}
// Modify application state to streaming
liveDesired = true
startStopButton.setTitle("Connecting...", for: .normal)
} else {
// Unpublish the live stream
rtmpStream.close()
// Modify application state to idle
liveDesired = false
startStopButton.setTitle("Go Live!", for: .normal)
}
}
// Called when the RTMPStream or RTMPConnection changes status
#objc
private func rtmpStatusHandler(_ notification: Notification) {
print("RTMP Status Handler called.")
let e = Event.from(notification)
guard let data: ASObject = e.data as? ASObject, let code: String = data["code"] as? String else {
return
}
// Send a nicely styled notification about the RTMP Status
var loafStyle = Loaf.State.info
switch code {
case RTMPConnection.Code.connectSuccess.rawValue, RTMPStream.Code.publishStart.rawValue, RTMPStream.Code.unpublishSuccess.rawValue:
loafStyle = Loaf.State.success
case RTMPConnection.Code.connectFailed.rawValue:
loafStyle = Loaf.State.error
case RTMPConnection.Code.connectClosed.rawValue:
loafStyle = Loaf.State.warning
default:
break
}
DispatchQueue.main.async {
Loaf("RTMP Status: " + code, state: loafStyle, location: .top, sender: self).show(.short)
}
switch code {
case RTMPConnection.Code.connectSuccess.rawValue:
reconnectAttempt = 0
if liveDesired {
// Publish our stream to our stream key
publishStream()
}
case RTMPConnection.Code.connectFailed.rawValue, RTMPConnection.Code.connectClosed.rawValue:
print("RTMP Connection was not successful.")
// Retry the connection if "live" is still the desired state
if liveDesired {
reconnectAttempt += 1
DispatchQueue.main.async {
self.startStopButton.setTitle("Reconnect attempt " + String(self.reconnectAttempt) + " (Cancel)" , for: .normal)
}
// Retries the RTMP connection every 5 seconds
DispatchQueue.main.asyncAfter(deadline: .now() + 5) {
self.connectRTMP()
}
}
default:
break
}
}
// Called when there's an RTMP Error
#objc
private func rtmpErrorHandler(_ notification: Notification) {
print("RTMP Error Handler called.")
}
// Called when the device changes rotation
#objc
private func on(_ notification: Notification) {
if #available(iOS 13.0, *) {
if let orientation = UIApplication.shared.windows.first?.windowScene?.interfaceOrientation {
let videoOrientation = DeviceUtil.videoOrientation(by: orientation)
rtmpStream.orientation = videoOrientation!
// Do not change the outpur rotation if the stream has already started.
if liveDesired == false {
let profile = presetToProfile(preset: self.preset)
rtmpStream.videoSettings = [
.width: (orientation.isPortrait) ? profile.height : profile.width,
.height: (orientation.isPortrait) ? profile.width : profile.height
]
}
}
} else {
// Fallback on earlier versions
}
}
// Button tapped to return to the configuration screen
#IBAction func closeButton(_ sender: Any) {
self.dismiss(animated: true, completion: nil)
}
// RTMPStreamDelegate callbacks
func rtmpStreamDidClear(_ stream: RTMPStream) {
}
// Statistics callback
func rtmpStream(_ stream: RTMPStream, didStatics connection: RTMPConnection) {
DispatchQueue.main.async {
self.fpsLabel.text = String(stream.currentFPS) + " fps"
self.bitrateLabel.text = String((connection.currentBytesOutPerSecond / 125)) + " kbps"
}
}
// Insufficient bandwidth callback
func rtmpStream(_ stream: RTMPStream, didPublishInsufficientBW connection: RTMPConnection) {
print("ABR: didPublishInsufficientBW")
// If we last changed bandwidth over 10 seconds ago
if (Int(NSDate().timeIntervalSince1970) - lastBwChange) > 5 {
print("ABR: Will try to change bitrate")
// Reduce bitrate by 30% every 10 seconds
let b = Double(stream.videoSettings[.bitrate] as! UInt32) * Double(0.7)
print("ABR: Proposed bandwidth: " + String(b))
stream.videoSettings[.bitrate] = b
lastBwChange = Int(NSDate().timeIntervalSince1970)
DispatchQueue.main.async {
Loaf("Insuffient Bandwidth, changing video bandwidth to: " + String(b), state: Loaf.State.warning, location: .top, sender: self).show(.short)
}
} else {
print("ABR: Still giving grace time for last bandwidth change")
}
}
// Today this example doesn't attempt to increase bandwidth to find a sweet spot.
// An implementation might be to gently increase bandwidth by a few percent, but that's hard without getting into an aggressive cycle.
func rtmpStream(_ stream: RTMPStream, didPublishSufficientBW connection: RTMPConnection) {
}
}
// AVCaptureVideoDataOutputSampleBufferDelegate protocol and related methods
extension BroadcastViewController: AVCaptureVideoDataOutputSampleBufferDelegate{
func setupAVCapture(){
session.sessionPreset = AVCaptureSession.Preset.vga640x480
guard let device = AVCaptureDevice
.default(AVCaptureDevice.DeviceType.builtInWideAngleCamera,
for: .video,
position: AVCaptureDevice.Position.back) else {
return
}
captureDevice = device
beginSession()
}
func beginSession(){
var deviceInput: AVCaptureDeviceInput!
do {
deviceInput = try AVCaptureDeviceInput(device: captureDevice)
guard deviceInput != nil else {
print("error: cant get deviceInput")
return
}
if self.session.canAddInput(deviceInput){
self.session.addInput(deviceInput)
}
videoDataOutput = AVCaptureVideoDataOutput()
videoDataOutput.alwaysDiscardsLateVideoFrames=true
videoDataOutputQueue = DispatchQueue(label: "VideoDataOutputQueue")
videoDataOutput.setSampleBufferDelegate(self, queue:self.videoDataOutputQueue)
if session.canAddOutput(self.videoDataOutput){
session.addOutput(self.videoDataOutput)
}
videoDataOutput.connection(with: .video)?.isEnabled = true
previewLayer = AVCaptureVideoPreviewLayer(session: self.session)
previewLayer.videoGravity = AVLayerVideoGravity.resizeAspect
// let rootLayer :CALayer = self.previewView.layer
self.videoView.layer.masksToBounds=true
previewLayer.frame = videoView.bounds
videoView.layer.addSublayer(self.previewLayer)
session.startRunning()
} catch let error as NSError {
deviceInput = nil
print("error: \(error.localizedDescription)")
}
}
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
// do stuff here
if let description = CMSampleBufferGetFormatDescription(sampleBuffer) {
let dimensions = CMVideoFormatDescriptionGetDimensions(description)
rtmpStream.videoSettings = [
.width: dimensions.width,
.height: dimensions.height ,
.profileLevel: kVTProfileLevel_H264_Baseline_AutoLevel
]
}
rtmpStream.appendSampleBuffer(sampleBuffer, withType: .video)
}
// clean up AVCapture
func stopCamera(){
session.stopRunning()
}
}
I have found a way to live stream camera view with overlays on it by creating 2 RTMPStream objects, one for attaching the camera and the second one is for attachscreen. following is the code.
import AVFoundation
import HaishinKit
import Photos
import UIKit
import VideoToolbox
import WebKit
final class ExampleRecorderDelegate: DefaultAVRecorderDelegate {
static let `default` = ExampleRecorderDelegate()
override func didFinishWriting(_ recorder: AVRecorder) {
guard let writer: AVAssetWriter = recorder.writer else {
return
}
PHPhotoLibrary.shared().performChanges({() -> Void in
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: writer.outputURL)
}, completionHandler: { _, error -> Void in
do {
try FileManager.default.removeItem(at: writer.outputURL)
} catch {
print(error)
}
})
}
}
final class LiveViewController: UIViewController {
private static let maxRetryCount: Int = 5
#IBOutlet private weak var lfView: MTHKView!
#IBOutlet private weak var currentFPSLabel: UILabel!
#IBOutlet private weak var publishButton: UIButton!
#IBOutlet private weak var pauseButton: UIButton!
#IBOutlet private weak var videoBitrateLabel: UILabel!
#IBOutlet private weak var videoBitrateSlider: UISlider!
#IBOutlet private weak var audioBitrateLabel: UILabel!
#IBOutlet private weak var zoomSlider: UISlider!
#IBOutlet private weak var audioBitrateSlider: UISlider!
#IBOutlet private weak var fpsControl: UISegmentedControl!
#IBOutlet private weak var effectSegmentControl: UISegmentedControl!
#IBOutlet weak var webview: WKWebView!
private var rtmpConnection = RTMPConnection()
private var rtmpStream: RTMPStream!
private var rtmpStreamLayer: RTMPStream!
private var sharedObject: RTMPSharedObject!
private var currentEffect: VideoEffect?
private var currentPosition: AVCaptureDevice.Position = .back
private var retryCount: Int = 0
override func viewDidLoad() {
super.viewDidLoad()
rtmpStream = RTMPStream(connection: rtmpConnection)
rtmpStreamLayer = RTMPStream(connection: rtmpConnection)
if let orientation = DeviceUtil.videoOrientation(by: UIApplication.shared.statusBarOrientation) {
rtmpStream.orientation = orientation
}
rtmpStream.captureSettings = [
.sessionPreset: AVCaptureSession.Preset.hd1280x720,
.continuousAutofocus: true,
.continuousExposure: true
// .preferredVideoStabilizationMode: AVCaptureVideoStabilizationMode.auto
]
rtmpStreamLayer.captureSettings = [
.sessionPreset: AVCaptureSession.Preset.hd1280x720,
.continuousAutofocus: true,
.continuousExposure: true
// .preferredVideoStabilizationMode: AVCaptureVideoStabilizationMode.auto
]
rtmpStream.videoSettings = [
.width: 720,
.height: 1280
]
rtmpStream.mixer.recorder.delegate = ExampleRecorderDelegate.shared
rtmpStreamLayer.videoSettings = [
.width: 720,
.height: 1280
]
rtmpStream.mixer.recorder.delegate = ExampleRecorderDelegate.shared
videoBitrateSlider?.value = Float(RTMPStream.defaultVideoBitrate) / 1000
audioBitrateSlider?.value = Float(RTMPStream.defaultAudioBitrate) / 1000
NotificationCenter.default.addObserver(self, selector: #selector(on(_:)), name: UIDevice.orientationDidChangeNotification, object: nil)
NotificationCenter.default.addObserver(self, selector: #selector(didEnterBackground(_:)), name: UIApplication.didEnterBackgroundNotification, object: nil)
NotificationCenter.default.addObserver(self, selector: #selector(didBecomeActive(_:)), name: UIApplication.didBecomeActiveNotification, object: nil)
}
override func viewWillAppear(_ animated: Bool) {
logger.info("viewWillAppear")
super.viewWillAppear(animated)
rtmpStream.attachAudio(AVCaptureDevice.default(for: .audio)) { error in
logger.warn(error.description)
}
rtmpStream.attachScreen(ScreenCaptureSession(viewToCapture: view))
rtmpStream.attachCamera(DeviceUtil.device(withPosition: currentPosition)) { error in
logger.warn(error.description)
}
rtmpStreamLayer.attachScreen(ScreenCaptureSession(viewToCapture: view))
rtmpStreamLayer.receiveAudio = false
rtmpStream.addObserver(self, forKeyPath: "currentFPS", options: .new, context: nil)
lfView?.attachStream(rtmpStream)
lfView?.attachStream(rtmpStreamLayer)
}
override func viewWillDisappear(_ animated: Bool) {
logger.info("viewWillDisappear")
super.viewWillDisappear(animated)
rtmpStream.removeObserver(self, forKeyPath: "currentFPS")
rtmpStream.close()
rtmpStream.dispose()
}
#IBAction func rotateCamera(_ sender: UIButton) {
logger.info("rotateCamera")
let position: AVCaptureDevice.Position = currentPosition == .back ? .front : .back
rtmpStream.captureSettings[.isVideoMirrored] = position == .front
rtmpStream.attachCamera(DeviceUtil.device(withPosition: position)) { error in
logger.warn(error.description)
}
currentPosition = position
}
#IBAction func toggleTorch(_ sender: UIButton) {
rtmpStream.torch.toggle()
}
#IBAction func on(slider: UISlider) {
if slider == audioBitrateSlider {
audioBitrateLabel?.text = "audio \(Int(slider.value))/kbps"
rtmpStream.audioSettings[.bitrate] = slider.value * 1000
}
if slider == videoBitrateSlider {
videoBitrateLabel?.text = "video \(Int(slider.value))/kbps"
rtmpStream.videoSettings[.bitrate] = slider.value * 1000
}
if slider == zoomSlider {
rtmpStream.setZoomFactor(CGFloat(slider.value), ramping: true, withRate: 5.0)
}
}
#IBAction func on(pause: UIButton) {
rtmpStream.paused.toggle()
}
#IBAction func on(close: UIButton) {
self.dismiss(animated: true, completion: nil)
}
#IBAction func on(publish: UIButton) {
if publish.isSelected {
UIApplication.shared.isIdleTimerDisabled = false
rtmpConnection.close()
rtmpConnection.removeEventListener(.rtmpStatus, selector: #selector(rtmpStatusHandler), observer: self)
rtmpConnection.removeEventListener(.ioError, selector: #selector(rtmpErrorHandler), observer: self)
publish.setTitle("●", for: [])
} else {
UIApplication.shared.isIdleTimerDisabled = true
rtmpConnection.addEventListener(.rtmpStatus, selector: #selector(rtmpStatusHandler), observer: self)
rtmpConnection.addEventListener(.ioError, selector: #selector(rtmpErrorHandler), observer: self)
rtmpConnection.connect(Preference.defaultInstance.uri!)
publish.setTitle("■", for: [])
}
publish.isSelected.toggle()
}
#objc
private func rtmpStatusHandler(_ notification: Notification) {
let e = Event.from(notification)
guard let data: ASObject = e.data as? ASObject, let code: String = data["code"] as? String else {
return
}
logger.info(code)
switch code {
case RTMPConnection.Code.connectSuccess.rawValue:
retryCount = 0
rtmpStream!.publish("yourstreamkey")
DispatchQueue.main.asyncAfter(deadline: .now() + 0.2)
{
self.rtmpStreamLayer!.publish("yourstreamkey")
}
// sharedObject!.connect(rtmpConnection)
case RTMPConnection.Code.connectFailed.rawValue, RTMPConnection.Code.connectClosed.rawValue:
guard retryCount <= LiveViewController.maxRetryCount else {
return
}
Thread.sleep(forTimeInterval: pow(2.0, Double(retryCount)))
rtmpConnection.connect(Preference.defaultInstance.uri!)
retryCount += 1
default:
break
}
}
#objc
private func rtmpErrorHandler(_ notification: Notification) {
logger.error(notification)
rtmpConnection.connect(Preference.defaultInstance.uri!)
}
func tapScreen(_ gesture: UIGestureRecognizer) {
if let gestureView = gesture.view, gesture.state == .ended {
let touchPoint: CGPoint = gesture.location(in: gestureView)
let pointOfInterest = CGPoint(x: touchPoint.x / gestureView.bounds.size.width, y: touchPoint.y / gestureView.bounds.size.height)
print("pointOfInterest: \(pointOfInterest)")
rtmpStream.setPointOfInterest(pointOfInterest, exposure: pointOfInterest)
}
}
#IBAction private func onFPSValueChanged(_ segment: UISegmentedControl) {
switch segment.selectedSegmentIndex {
case 0:
rtmpStream.captureSettings[.fps] = 15.0
case 1:
rtmpStream.captureSettings[.fps] = 30.0
case 2:
rtmpStream.captureSettings[.fps] = 60.0
default:
break
}
}
#IBAction private func onEffectValueChanged(_ segment: UISegmentedControl) {
if let currentEffect: VideoEffect = currentEffect {
_ = rtmpStream.unregisterVideoEffect(currentEffect)
}
switch segment.selectedSegmentIndex {
case 1:
currentEffect = MonochromeEffect()
_ = rtmpStream.registerVideoEffect(currentEffect!)
case 2:
currentEffect = PronamaEffect()
_ = rtmpStream.registerVideoEffect(currentEffect!)
default:
break
}
}
#objc
private func on(_ notification: Notification) {
guard let orientation = DeviceUtil.videoOrientation(by: UIApplication.shared.statusBarOrientation) else {
return
}
rtmpStream.orientation = orientation
}
#objc
private func didEnterBackground(_ notification: Notification) {
// rtmpStream.receiveVideo = false
}
#objc
private func didBecomeActive(_ notification: Notification) {
// rtmpStream.receiveVideo = true
}
override func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey: Any]?, context: UnsafeMutableRawPointer?) {
if Thread.isMainThread {
currentFPSLabel?.text = "\(rtmpStream.currentFPS)"
}
}
}
extension LiveViewController : UIWebViewDelegate
{
func webViewDidFinishLoad(_ webView: UIWebView) {
webview.scrollView.zoomScale = 10
}
}

Elapsed Timer not starting

I am building an elapsed timer and while the code gives no errors the timer does not start.
I am using two ViewControllers, one called Stopwatch which has the start stop function in it under the class Stopwatch() and then a regular ViewController with the rest in it.
View Controller Code:
import UIKit
class ViewController: UIViewController {
let watch = Stopwatch()
#IBOutlet weak var elapsedTimeLabel: UILabel!
#IBAction func startButton(_ sender: Any) {
Timer.scheduledTimer(timeInterval: 0.1, target: self, selector: #selector(self.updateElapsedTimeLabel), userInfo: nil, repeats: true)
watch.start()
}
#IBAction func stopButton(_ sender: Any) {
watch.stop()
}
#objc func updateElapsedTimeLabel (timer : Timer) {
if watch.isRunning {
let minutes = Int (watch.elapsedTime/60)
let seconds = watch.elapsedTime.truncatingRemainder(dividingBy: 60)
let tenOfSeconds = (watch.elapsedTime * 10).truncatingRemainder(dividingBy: 10)
elapsedTimeLabel.text = String (format: "%02d:%02d:%02d", minutes, seconds, tenOfSeconds)
} else {
timer.invalidate()
}
}
override func viewDidLoad() {
super.viewDidLoad()
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
}
override var prefersStatusBarHidden: Bool {
return true
}
}
The Stopwatch View Controller code:
import Foundation
class Stopwatch {
private var startTime : Date?
var elapsedTime: TimeInterval {
if let startTime = self.startTime {
return -startTime.timeIntervalSinceNow
} else {
return 0
}
}
var isRunning: Bool {
return startTime != nil
}
func start() {
startTime = Date()
}
func stop() {
startTime = nil
}
}
There is nothing at all coming in the debug window, so not sure what the issue is, I reconnected the buttons over and over so it's not that. I also get no other errors in the code as mentioned above.
Can anyone shed some light on this. Maybe I am using the wrong #selector or I am doing the updateElapsedTimeLabel minutes, seconds, tenOfSeconds calculations wrong. Not sure. Thanks for having a look.
If you Option-click on seconds and tenOfSeconds you will find that one is of type TimeInterval (i.e. Double) and the other is of type Double. So your format specifier of %02d was wrong. In C, a mismatch between the format specifier and the argument is undefined behavior. Swift doesn't say how it handles that but I guess it will ignore the argument.
To fix it, change your format specifier for the last 2 components to %02.f:
let minutes = Int(watch.elapsedTime/60)
let seconds = watch.elapsedTime.truncatingRemainder(dividingBy: 60)
let tenOfSeconds = (watch.elapsedTime * 100).truncatingRemainder(dividingBy: 100) // fixed the math here
elapsedTimeLabel.text = String(format: "%02d:%02.f:%02.f", minutes, seconds, tenOfSeconds)
But why not use a DateFormatter to make your life simpler:
class ViewController: UIViewController {
private let formatter: DateFormatter = {
let formatter = DateFormatter()
formatter.dateFormat = "mm:ss:SS"
return formatter
}()
#objc func updateElapsedTimeLabel (timer : Timer) {
if watch.isRunning {
elapsedTimeLabel.text = formatter.string(from: Date(timeIntervalSince1970: watch.elapsedTime))
} else {
timer.invalidate()
}
}
}

Carrying Elapsed Time over to another ViewController

I have a small elapsed timer in my game and it works very well. However I am trying to figure out how to save the elapsed time when you die so I can carry it over to the Game Over Screen where the Score and High Score is displayed.
I tired a few things but none of them seem to work. I guess it's because the time is not being saved anywhere when the it's game over, but rather just reset to 00:00:00 when the game restarts.
I use two view Controllers for this timer. One is called Stopwatch the other code is in the GameScene. Here are the codes.
I wanna bring it into a label like for example:
let timeLabel = SKLabelNode(fontNamed: "Planer")
timeLabel.text = "Time: \(savedTimer)"
timeLabel.fontSize = 100
timeLabel.fontColor = SKColor.white
timeLabel.zPosition = 2
timeLabel.position = CGPoint (x: self.size.width/2, y: self.size.height * 0.5)
self.addChild(timeLabel)*/
Stopwatch.swift code
import Foundation
class Stopwatch {
private var startTime : Date?
var elapsedTime: TimeInterval {
if let startTime = self.startTime {
return -startTime.timeIntervalSinceNow
} else {
return 0
}
}
var isRunning: Bool {
return startTime != nil
}
func start() {
startTime = Date()
}
func stop() {
startTime = nil
}
}
And the code I got speed out through my Game Scene:
import UIKit
class ViewController: UIViewController {
private let formatter: DateFormatter = {
let formatter = DateFormatter()
formatter.dateFormat = "mm:ss:SS"
return formatter
}()
let watch = Stopwatch()
#IBOutlet weak var elapsedTimeLabel: UILabel!
#IBAction func startButton(_ sender: Any) {
Timer.scheduledTimer(timeInterval: 0.01, target: self, selector: #selector(self.updateElapsedTimeLabel), userInfo: nil, repeats: true)
watch.start()
}
#IBAction func stopButton(_ sender: Any) {
watch.stop()
}
#objc func updateElapsedTimeLabel (timer : Timer) {
if watch.isRunning {
elapsedTimeLabel.text = formatter.string(from: Date(timeIntervalSince1970: watch.elapsedTime))
} else {
timer.invalidate()
}
}
override func viewDidLoad() {
super.viewDidLoad()
}
override var prefersStatusBarHidden: Bool {
return true
}
}
What I understand is that you're trying to save the elapsedTime of your watch after the user taps the stop button. If that's the case, in your stopButton function you are calling watch.stop(), which in turn resets the startTime = nil. So you might want to edit it like so:
// Create a new class variable to store the time
var savedTime: TimeInterval? = nil
#IBAction func stopButton(_ sender: Any) {
savedTime = watch.elapsedTime
// Use the savedTime here to pass to the game over function
watch.stop()
}
If you don't need to save the time in your ViewController class, you can move the savedTime variable to a local one in the stopButton function.

Jumpy UISlider when scrubbing - Using UISlider with AVPlayer

I am using AvPlayer and am trying to set up a slider to allow scrubbing of audio files. Im having a problem with the slider jumping all over the place when its selected. It then goes back to the origin position for a second before going back to the location it was dragged to.
You cant see my cursor on the Gif, but the smooth elongated drags are me moving the knob, then the quick whips are the slider misbehaving.
Ive spent hours googling and combing through Stack Overflow and cant figure out what I'm doing wrong here, a lot of similar questions are quite old and in ObjC.
This is the section of code i think is responsible for the problem, it does handle the event of the slider being moved: Ive tried it without the if statement also and didn't see a different result.
#IBAction func horizontalSliderActioned(_ sender: Any) {
horizontalSlider.isContinuous = true
if self.horizontalSlider.isTouchInside {
audioPlayer?.pause()
let seconds : Int64 = Int64(horizontalSlider.value)
let preferredTimeScale : Int32 = 1
let seekTime : CMTime = CMTimeMake(seconds, preferredTimeScale)
audioPlayerItem?.seek(to: seekTime)
audioPlayer?.play()
} else {
let duration : CMTime = (self.audioPlayer?.currentItem!.asset.duration)!
let seconds : Float64 = CMTimeGetSeconds(duration)
self.horizontalSlider.value = Float(seconds)
}
}
I will include my entire class below for reference.
import UIKit
import Parse
import AVFoundation
import AVKit
class PlayerViewController: UIViewController, AVAudioPlayerDelegate {
#IBOutlet var horizontalSlider: UISlider!
var selectedAudio: String!
var audioPlayer: AVPlayer?
var audioPlayerItem: AVPlayerItem?
var timer: Timer?
func getAudio() {
let query = PFQuery(className: "Part")
query.whereKey("objectId", equalTo: selectedAudio)
query.getFirstObjectInBackground { (object, error) in
if error != nil || object == nil {
print("The getFirstObject request failed.")
} else {
print("There is an object now get the Audio. ")
let audioFileURL = (object?.object(forKey: "partAudio") as! PFFile).url
self.audioPlayerItem = AVPlayerItem(url: NSURL(string: audioFileURL!) as! URL)
self.audioPlayer = AVPlayer(playerItem: self.audioPlayerItem)
let playerLayer = AVPlayerLayer(player: self.audioPlayer)
playerLayer.frame = CGRect(x: 0, y: 0, width: 10, height: 10)
self.view.layer.addSublayer(playerLayer)
let duration : CMTime = (self.audioPlayer?.currentItem!.asset.duration)!
let seconds : Float64 = CMTimeGetSeconds(duration)
let maxTime : Float = Float(seconds)
self.horizontalSlider.maximumValue = maxTime
self.audioPlayer?.play()
self.timer = Timer.scheduledTimer(timeInterval: 1.0, target: self, selector: #selector(PlayerViewController.audioSliderUpdate), userInfo: nil, repeats: true)
}
}
}
#IBOutlet var playerButton: UIButton!
func playerButtonTapped() {
if audioPlayer?.rate == 0 {
audioPlayer?.play()
self.playerButton.setImage(UIImage(named: "play"), for: UIControlState.normal)
} else {
audioPlayer?.pause()
self.playerButton.setImage(UIImage(named: "pause"), for: UIControlState.normal)
}
}
override func viewDidLoad() {
super.viewDidLoad()
horizontalSlider.minimumValue = 0
horizontalSlider.value = 0
self.playerButton.addTarget(self, action: #selector(PlayerViewController.playerButtonTapped), for: UIControlEvents.touchUpInside)
getAudio()
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
NotificationCenter.default.addObserver(self, selector: #selector(PlayerViewController.finishedPlaying), name: NSNotification.Name.AVPlayerItemDidPlayToEndTime, object: self.audioPlayerItem)
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillAppear(animated)
// remove the timer
self.timer?.invalidate()
// remove the observer when leaving page
NotificationCenter.default.removeObserver(audioPlayer?.currentItem! as Any)
}
func finishedPlaying() {
// need option to play next track
self.playerButton.setImage(UIImage(named: "play"), for: UIControlState.normal)
let seconds : Int64 = 0
let preferredTimeScale : Int32 = 1
let seekTime : CMTime = CMTimeMake(seconds, preferredTimeScale)
audioPlayerItem!.seek(to: seekTime)
}
#IBAction func horizontalSliderActioned(_ sender: Any) {
horizontalSlider.isContinuous = true
if self.horizontalSlider.isTouchInside {
audioPlayer?.pause()
let seconds : Int64 = Int64(horizontalSlider.value)
let preferredTimeScale : Int32 = 1
let seekTime : CMTime = CMTimeMake(seconds, preferredTimeScale)
audioPlayerItem?.seek(to: seekTime)
audioPlayer?.play()
} else {
let duration : CMTime = (self.audioPlayer?.currentItem!.asset.duration)!
let seconds : Float64 = CMTimeGetSeconds(duration)
self.horizontalSlider.value = Float(seconds)
}
}
func audioSliderUpdate() {
let currentTime : CMTime = (self.audioPlayerItem?.currentTime())!
let seconds : Float64 = CMTimeGetSeconds(currentTime)
let time : Float = Float(seconds)
self.horizontalSlider.value = time
}
}
Swift 5, Xcode 11
I faced the same issue, it was apparently periodicTimeObserver which was causing to return incorrect time which caused lag or jump in the slider. I solved it by removing periodic time observer when the slider was changing and adding it back when seeking completion handler was called.
#objc func sliderValueChanged(_ playbackSlider: UISlider, event: UIEvent){
let seconds : Float = Float(playbackSlider.value)
let targetTime:CMTime = CMTimeMake(value: Int64(seconds), timescale: 1)
if let touchEvent = event.allTouches?.first {
switch touchEvent.phase {
case .began:
// handle drag began
//Remove observer when dragging is in progress
self.removePeriodicTimeObserver()
break
case .moved:
// handle drag moved
break
case .ended:
// handle drag ended
//Add Observer back when seeking got completed
player.seek(to: targetTime, toleranceBefore: .zero, toleranceAfter: .zero) { [weak self] (value) in
self?.addTimeObserver()
}
break
default:
break
}
}
}
you need to remove observers and invalidate timers as soon as user selects the thumb on slider and add them back again when dragging is done
to do add targets like this where you load your player:
mySlider.addTarget(self,
action: #selector(PlayerViewController.mySliderBeganTracking(_:)),
forControlEvents:.TouchDown)
mySlider.addTarget(self,
action: #selector(PlayerViewController.mySliderEndedTracking(_:)),
forControlEvents: .TouchUpInside)
mySlider.addTarget(self,
action: #selector(PlayerViewController.mySliderEndedTracking(_:)),
forControlEvents: .TouchUpOutside )
and remove observers and invalidate timers in mySliderBeganTracking then add observers in mySliderEndedTracking
for better control on what happens in your player write 2 functions : addObservers and removeObservers and call them when needed
Make sure to do the following:
isContinuous for the slider is NOT set to false.
Pause the player before seeking.
Seek to the position and use the completion handler to resume playing.
Example code:
#objc func sliderValueChanged(sender: UISlider, event: UIEvent) {
let roundedValue = sender.value.rounded()
guard let touchEvent = event.allTouches?.first else { return }
switch touchEvent.phase {
case .began:
PlayerManager.shared.pause()
case .moved:
print("Slider moved")
case .ended:
PlayerManager.shared.seek(to: roundedValue, playAfterSeeking: true)
default: ()
}
}
And here is the function for seeking:
func seek(to: Float, playAfterSeeking: Bool) {
player?.seek(to: CMTime(value: CMTimeValue(to), timescale: 1), completionHandler: { [weak self] (status) in
if playAfterSeeking {
self?.play()
}
})
}
Try using the time slider value like below:
#IBAction func timeSliderDidChange(_ sender: UISlider) {
AVPlayerManager.sharedInstance.currentTime = Double(sender.value)
}
var currentTime: Double {
get {
return CMTimeGetSeconds(player.currentTime())
}
set {
if self.player.status == .readyToPlay {
let newTime = CMTimeMakeWithSeconds(newValue, 1)
player.seek(to: newTime, toleranceBefore: kCMTimeZero, toleranceAfter: kCMTimeZero) { ( _ ) in
self.updatePlayerInfo()
}
}
}
}
and pass the value of slider when user release the slider, also don't update the slider value of current playing while user interaction happening on the slider
This is a temporary solution for me, I observed that the rebound is only once, so I set an int value isSeekInProgress:
When sliderDidFinish, isSeekInProgress = 0
In reply to avplayer time change:
if (self.isSeekInProgress > 1) {
float sliderValue = 1.f / (self.slider.maximumValue - self.slider.minimumValue) * progress;
// if (sliderValue > self.slider.value ) {
self.slider.value = sliderValue;
}else {
self.isSeekInProgress += 1;
}

Resources